2012-10-29 19:33:00 +00:00
|
|
|
# -*- encoding: utf-8 -*-
|
2012-06-07 16:08:47 +00:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2010-11-07 15:41:17 +00:00
|
|
|
from datetime import datetime, timedelta
|
2012-08-11 09:11:20 +00:00
|
|
|
from io import BytesIO
|
2013-07-13 08:16:40 +00:00
|
|
|
from itertools import chain
|
2013-07-01 12:22:27 +00:00
|
|
|
import time
|
|
|
|
from unittest import skipIf
|
2008-03-08 03:06:30 +00:00
|
|
|
|
2013-10-18 11:25:30 +00:00
|
|
|
from django.db import connection, connections
|
2013-02-05 21:52:29 +00:00
|
|
|
from django.core import signals
|
2012-10-18 18:10:46 +00:00
|
|
|
from django.core.exceptions import SuspiciousOperation
|
2010-11-07 15:41:17 +00:00
|
|
|
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
|
2013-10-08 12:05:39 +00:00
|
|
|
from django.http import (HttpRequest, HttpResponse, parse_cookie,
|
|
|
|
build_request_repr, UnreadablePostError, RawPostDataException)
|
2013-04-03 20:27:20 +00:00
|
|
|
from django.test import SimpleTestCase, TransactionTestCase
|
2012-10-20 13:53:28 +00:00
|
|
|
from django.test.client import FakePayload
|
2012-10-20 21:22:46 +00:00
|
|
|
from django.test.utils import override_settings, str_prefix
|
2012-12-17 09:49:26 +00:00
|
|
|
from django.utils import six
|
2012-11-03 11:54:06 +00:00
|
|
|
from django.utils.http import cookie_date, urlencode
|
2013-09-05 19:38:59 +00:00
|
|
|
from django.utils.six.moves.urllib.parse import urlencode as original_urlencode
|
2012-03-18 20:58:22 +00:00
|
|
|
from django.utils.timezone import utc
|
2008-03-08 03:06:30 +00:00
|
|
|
|
2011-09-10 00:46:38 +00:00
|
|
|
|
2013-04-03 20:27:20 +00:00
|
|
|
class RequestsTests(SimpleTestCase):
|
2010-09-28 07:06:37 +00:00
|
|
|
def test_httprequest(self):
|
2010-09-28 11:54:58 +00:00
|
|
|
request = HttpRequest()
|
2012-08-11 09:11:20 +00:00
|
|
|
self.assertEqual(list(request.GET.keys()), [])
|
|
|
|
self.assertEqual(list(request.POST.keys()), [])
|
|
|
|
self.assertEqual(list(request.COOKIES.keys()), [])
|
|
|
|
self.assertEqual(list(request.META.keys()), [])
|
2008-08-23 17:28:12 +00:00
|
|
|
|
2011-06-09 23:15:42 +00:00
|
|
|
def test_httprequest_repr(self):
|
|
|
|
request = HttpRequest()
|
2012-06-07 16:08:47 +00:00
|
|
|
request.path = '/somepath/'
|
|
|
|
request.GET = {'get-key': 'get-value'}
|
|
|
|
request.POST = {'post-key': 'post-value'}
|
|
|
|
request.COOKIES = {'post-key': 'post-value'}
|
|
|
|
request.META = {'post-key': 'post-value'}
|
|
|
|
self.assertEqual(repr(request), str_prefix("<HttpRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
|
2011-06-09 23:15:42 +00:00
|
|
|
self.assertEqual(build_request_repr(request), repr(request))
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
|
|
|
|
str_prefix("<HttpRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
|
2011-06-09 23:15:42 +00:00
|
|
|
|
2010-09-28 07:06:37 +00:00
|
|
|
def test_wsgirequest(self):
|
2012-08-11 09:11:20 +00:00
|
|
|
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(list(request.GET.keys()), [])
|
|
|
|
self.assertEqual(list(request.POST.keys()), [])
|
|
|
|
self.assertEqual(list(request.COOKIES.keys()), [])
|
2010-10-29 16:39:25 +00:00
|
|
|
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
|
2010-09-28 11:54:58 +00:00
|
|
|
self.assertEqual(request.META['PATH_INFO'], 'bogus')
|
|
|
|
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
|
|
|
|
self.assertEqual(request.META['SCRIPT_NAME'], '')
|
2010-09-12 20:52:49 +00:00
|
|
|
|
2013-03-31 07:54:52 +00:00
|
|
|
def test_wsgirequest_with_script_name(self):
|
|
|
|
"""
|
|
|
|
Ensure that the request's path is correctly assembled, regardless of
|
|
|
|
whether or not the SCRIPT_NAME has a trailing slash.
|
|
|
|
Refs #20169.
|
|
|
|
"""
|
|
|
|
# With trailing slash
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/PREFIX/somepath/')
|
|
|
|
# Without trailing slash
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/PREFIX/somepath/')
|
|
|
|
|
|
|
|
def test_wsgirequest_with_force_script_name(self):
|
|
|
|
"""
|
|
|
|
Ensure that the FORCE_SCRIPT_NAME setting takes precedence over the
|
|
|
|
request's SCRIPT_NAME environment parameter.
|
|
|
|
Refs #20169.
|
|
|
|
"""
|
|
|
|
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
|
|
|
|
|
|
|
|
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
|
|
|
|
"""
|
|
|
|
Ensure that the request's path is correctly assembled, regardless of
|
|
|
|
whether or not the FORCE_SCRIPT_NAME setting has a trailing slash.
|
|
|
|
Refs #20169.
|
|
|
|
"""
|
|
|
|
# With trailing slash
|
|
|
|
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
|
|
|
|
# Without trailing slash
|
|
|
|
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX'):
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
|
|
|
|
|
2011-06-09 23:15:42 +00:00
|
|
|
def test_wsgirequest_repr(self):
|
2012-08-11 09:11:20 +00:00
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
2012-06-07 16:08:47 +00:00
|
|
|
request.GET = {'get-key': 'get-value'}
|
|
|
|
request.POST = {'post-key': 'post-value'}
|
|
|
|
request.COOKIES = {'post-key': 'post-value'}
|
|
|
|
request.META = {'post-key': 'post-value'}
|
|
|
|
self.assertEqual(repr(request), str_prefix("<WSGIRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
|
2011-06-09 23:15:42 +00:00
|
|
|
self.assertEqual(build_request_repr(request), repr(request))
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
|
|
|
|
str_prefix("<WSGIRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
|
2011-06-09 23:15:42 +00:00
|
|
|
|
2012-12-17 09:49:26 +00:00
|
|
|
def test_wsgirequest_path_info(self):
|
|
|
|
def wsgi_str(path_info):
|
|
|
|
path_info = path_info.encode('utf-8') # Actual URL sent by the browser (bytestring)
|
|
|
|
if six.PY3:
|
|
|
|
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
|
|
|
|
return path_info
|
|
|
|
# Regression for #19468
|
|
|
|
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, "/سلام/")
|
|
|
|
|
2010-09-28 07:06:37 +00:00
|
|
|
def test_parse_cookie(self):
|
2012-09-28 15:09:05 +00:00
|
|
|
self.assertEqual(parse_cookie('invalid@key=true'), {})
|
2010-09-28 07:06:37 +00:00
|
|
|
|
|
|
|
def test_httprequest_location(self):
|
|
|
|
request = HttpRequest()
|
2010-09-28 11:54:58 +00:00
|
|
|
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
|
2010-09-28 07:06:37 +00:00
|
|
|
'https://www.example.com/asdf')
|
|
|
|
|
|
|
|
request.get_host = lambda: 'www.example.com'
|
|
|
|
request.path = ''
|
2010-09-28 11:54:58 +00:00
|
|
|
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
|
2010-09-28 07:06:37 +00:00
|
|
|
'http://www.example.com/path/with:colons')
|
|
|
|
|
|
|
|
def test_near_expiration(self):
|
|
|
|
"Cookie will expire when an near expiration time is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
# There is a timing weakness in this test; The
|
|
|
|
# expected result for max-age requires that there be
|
|
|
|
# a very slight difference between the evaluated expiration
|
|
|
|
# time, and the time evaluated in set_cookie(). If this
|
|
|
|
# difference doesn't exist, the cookie time will be
|
|
|
|
# 1 second larger. To avoid the problem, put in a quick sleep,
|
|
|
|
# which guarantees that there will be a time difference.
|
|
|
|
expires = datetime.utcnow() + timedelta(seconds=10)
|
|
|
|
time.sleep(0.001)
|
|
|
|
response.set_cookie('datetime', expires=expires)
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
2010-09-28 11:54:58 +00:00
|
|
|
self.assertEqual(datetime_cookie['max-age'], 10)
|
2010-09-28 07:06:37 +00:00
|
|
|
|
2012-03-18 20:58:22 +00:00
|
|
|
def test_aware_expiration(self):
|
|
|
|
"Cookie accepts an aware datetime as expiration time"
|
|
|
|
response = HttpResponse()
|
|
|
|
expires = (datetime.utcnow() + timedelta(seconds=10)).replace(tzinfo=utc)
|
|
|
|
time.sleep(0.001)
|
|
|
|
response.set_cookie('datetime', expires=expires)
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
|
|
|
self.assertEqual(datetime_cookie['max-age'], 10)
|
|
|
|
|
2010-09-28 07:06:37 +00:00
|
|
|
def test_far_expiration(self):
|
|
|
|
"Cookie will expire when an distant expiration time is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
2010-09-28 11:54:58 +00:00
|
|
|
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
|
2010-09-28 07:06:37 +00:00
|
|
|
|
|
|
|
def test_max_age_expiration(self):
|
|
|
|
"Cookie will expire if max_age is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('max_age', max_age=10)
|
|
|
|
max_age_cookie = response.cookies['max_age']
|
|
|
|
self.assertEqual(max_age_cookie['max-age'], 10)
|
2013-11-03 18:08:55 +00:00
|
|
|
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time() + 10))
|
2010-10-29 16:39:25 +00:00
|
|
|
|
2010-11-26 13:30:50 +00:00
|
|
|
def test_httponly_cookie(self):
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('example', httponly=True)
|
|
|
|
example_cookie = response.cookies['example']
|
|
|
|
# A compat cookie may be in use -- check that it has worked
|
|
|
|
# both as an output string, and using the cookie attributes
|
|
|
|
self.assertTrue('; httponly' in str(example_cookie))
|
|
|
|
self.assertTrue(example_cookie['httponly'])
|
|
|
|
|
2010-10-29 16:39:25 +00:00
|
|
|
def test_limited_stream(self):
|
|
|
|
# Read all of a limited stream
|
2012-08-11 09:11:20 +00:00
|
|
|
stream = LimitedStream(BytesIO(b'test'), 2)
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.read(), b'te')
|
2011-01-16 07:31:35 +00:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.read(), b'')
|
2010-10-29 16:39:25 +00:00
|
|
|
|
|
|
|
# Read a number of characters greater than the stream has to offer
|
2012-08-11 09:11:20 +00:00
|
|
|
stream = LimitedStream(BytesIO(b'test'), 2)
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.read(5), b'te')
|
2011-01-16 07:31:35 +00:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(5), b'')
|
2010-10-29 16:39:25 +00:00
|
|
|
|
|
|
|
# Read sequentially from a stream
|
2012-08-11 09:11:20 +00:00
|
|
|
stream = LimitedStream(BytesIO(b'12345678'), 8)
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.read(5), b'12345')
|
|
|
|
self.assertEqual(stream.read(5), b'678')
|
2011-01-16 07:31:35 +00:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(5), b'')
|
2010-10-29 16:39:25 +00:00
|
|
|
|
|
|
|
# Read lines from a stream
|
2012-08-11 09:11:20 +00:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read a full line, unconditionally
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(), b'1234\n')
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read a number of characters less than a line
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(2), b'56')
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read the rest of the partial line
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(), b'78\n')
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read a full line, with a character limit greater than the line length
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(6), b'abcd\n')
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read the next line, deliberately terminated at the line end
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(4), b'efgh')
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read the next line... just the line end
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(), b'\n')
|
2010-10-29 16:39:25 +00:00
|
|
|
# Read everything else.
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(), b'ijkl')
|
2010-10-29 16:39:25 +00:00
|
|
|
|
2011-01-16 07:31:35 +00:00
|
|
|
# Regression for #15018
|
|
|
|
# If a stream contains a newline, but the provided length
|
|
|
|
# is less than the number of provided characters, the newline
|
|
|
|
# doesn't reset the available character count
|
2012-08-11 09:11:20 +00:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(10), b'1234\n')
|
|
|
|
self.assertEqual(stream.readline(3), b'abc')
|
2011-01-16 07:31:35 +00:00
|
|
|
# Now expire the available characters
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(3), b'd')
|
2011-01-16 07:31:35 +00:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.readline(2), b'')
|
2011-01-16 07:31:35 +00:00
|
|
|
|
|
|
|
# Same test, but with read, not readline.
|
2012-08-11 09:11:20 +00:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(stream.read(6), b'1234\na')
|
|
|
|
self.assertEqual(stream.read(2), b'bc')
|
|
|
|
self.assertEqual(stream.read(2), b'd')
|
|
|
|
self.assertEqual(stream.read(2), b'')
|
|
|
|
self.assertEqual(stream.read(), b'')
|
2011-01-16 07:31:35 +00:00
|
|
|
|
2010-10-29 16:39:25 +00:00
|
|
|
def test_stream(self):
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 10:17:56 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 10:17:56 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(request.read(), b'name=value')
|
2010-10-29 16:39:25 +00:00
|
|
|
|
|
|
|
def test_read_after_value(self):
|
|
|
|
"""
|
|
|
|
Reading from request is allowed after accessing request contents as
|
2011-12-16 23:40:32 +00:00
|
|
|
POST or body.
|
2010-10-29 16:39:25 +00:00
|
|
|
"""
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 10:17:56 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 10:17:56 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(request.body, b'name=value')
|
|
|
|
self.assertEqual(request.read(), b'name=value')
|
2010-10-29 16:39:25 +00:00
|
|
|
|
|
|
|
def test_value_after_read(self):
|
|
|
|
"""
|
2011-12-16 23:40:32 +00:00
|
|
|
Construction of POST or body is not allowed after reading
|
2010-10-29 16:39:25 +00:00
|
|
|
from request.
|
|
|
|
"""
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 10:17:56 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 10:17:56 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(request.read(2), b'na')
|
2013-10-08 12:05:39 +00:00
|
|
|
self.assertRaises(RawPostDataException, lambda: request.body)
|
2010-10-29 16:39:25 +00:00
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
|
2012-11-03 11:54:06 +00:00
|
|
|
def test_non_ascii_POST(self):
|
|
|
|
payload = FakePayload(urlencode({'key': 'España'}))
|
|
|
|
request = WSGIRequest({
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
})
|
|
|
|
self.assertEqual(request.POST, {'key': ['España']})
|
|
|
|
|
2012-10-29 19:33:00 +00:00
|
|
|
def test_alternate_charset_POST(self):
|
|
|
|
"""
|
|
|
|
Test a POST with non-utf-8 payload encoding.
|
|
|
|
"""
|
2013-09-05 19:38:59 +00:00
|
|
|
payload = FakePayload(original_urlencode({'key': 'España'.encode('latin-1')}))
|
2012-10-29 19:33:00 +00:00
|
|
|
request = WSGIRequest({
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
})
|
|
|
|
self.assertEqual(request.POST, {'key': ['España']})
|
|
|
|
|
2013-05-18 10:26:38 +00:00
|
|
|
def test_body_after_POST_multipart_form_data(self):
|
2011-03-28 16:11:40 +00:00
|
|
|
"""
|
2013-05-18 10:26:38 +00:00
|
|
|
Reading body after parsing multipart/form-data is not allowed
|
2011-03-28 16:11:40 +00:00
|
|
|
"""
|
|
|
|
# Because multipart is used for large amounts fo data i.e. file uploads,
|
|
|
|
# we don't want the data held in memory twice, and we don't want to
|
2011-12-16 23:40:32 +00:00
|
|
|
# silence the error by setting body = '' either.
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload("\r\n".join([
|
2013-10-19 23:33:10 +00:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
|
|
|
'']))
|
2011-03-28 16:11:40 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2013-10-08 12:05:39 +00:00
|
|
|
self.assertRaises(RawPostDataException, lambda: request.body)
|
2011-03-28 16:11:40 +00:00
|
|
|
|
2013-05-18 10:26:38 +00:00
|
|
|
def test_body_after_POST_multipart_related(self):
|
|
|
|
"""
|
|
|
|
Reading body after parsing multipart that isn't form-data is allowed
|
|
|
|
"""
|
|
|
|
# Ticket #9054
|
|
|
|
# There are cases in which the multipart data is related instead of
|
|
|
|
# being a binary upload, in which case it should still be accessible
|
|
|
|
# via body.
|
2013-05-18 11:34:08 +00:00
|
|
|
payload_data = b"\r\n".join([
|
2013-10-19 23:33:10 +00:00
|
|
|
b'--boundary',
|
|
|
|
b'Content-ID: id; name="name"',
|
|
|
|
b'',
|
|
|
|
b'value',
|
|
|
|
b'--boundary--'
|
|
|
|
b''])
|
2013-05-18 10:26:38 +00:00
|
|
|
payload = FakePayload(payload_data)
|
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': payload})
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.body, payload_data)
|
|
|
|
|
2011-06-10 08:39:38 +00:00
|
|
|
def test_POST_multipart_with_content_length_zero(self):
|
|
|
|
"""
|
|
|
|
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
|
|
|
|
"""
|
|
|
|
# According to:
|
|
|
|
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
|
|
|
|
# Every request.POST with Content-Length >= 0 is a valid request,
|
|
|
|
# this test ensures that we handle Content-Length == 0.
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload("\r\n".join([
|
2013-10-19 23:33:10 +00:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
|
|
|
'']))
|
2011-06-10 08:39:38 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': 0,
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2011-06-10 08:39:38 +00:00
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
|
2012-10-20 12:33:57 +00:00
|
|
|
def test_POST_binary_only(self):
|
|
|
|
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
|
|
|
|
environ = {'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'application/octet-stream',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': BytesIO(payload)}
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.FILES, {})
|
|
|
|
self.assertEqual(request.body, payload)
|
|
|
|
|
|
|
|
# Same test without specifying content-type
|
|
|
|
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.FILES, {})
|
|
|
|
self.assertEqual(request.body, payload)
|
|
|
|
|
2010-10-29 16:39:25 +00:00
|
|
|
def test_read_by_lines(self):
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 10:17:56 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 10:17:56 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(list(request), [b'name=value'])
|
2011-03-28 16:11:40 +00:00
|
|
|
|
2011-12-16 23:40:32 +00:00
|
|
|
def test_POST_after_body_read(self):
|
2011-03-28 16:11:40 +00:00
|
|
|
"""
|
2011-12-16 23:40:32 +00:00
|
|
|
POST should be populated even if body is read first
|
2011-03-28 16:11:40 +00:00
|
|
|
"""
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 10:17:56 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 10:17:56 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2013-10-19 12:31:38 +00:00
|
|
|
request.body # evaluate
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-03-28 16:11:40 +00:00
|
|
|
|
2011-12-16 23:40:32 +00:00
|
|
|
def test_POST_after_body_read_and_stream_read(self):
|
2011-03-28 16:11:40 +00:00
|
|
|
"""
|
2011-12-16 23:40:32 +00:00
|
|
|
POST should be populated even if body is read first, and then
|
2011-03-28 16:11:40 +00:00
|
|
|
the stream is read second.
|
|
|
|
"""
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 10:17:56 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 10:17:56 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2013-10-19 12:31:38 +00:00
|
|
|
request.body # evaluate
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(request.read(1), b'n')
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-03-28 16:11:40 +00:00
|
|
|
|
2011-12-16 23:40:32 +00:00
|
|
|
def test_POST_after_body_read_and_stream_read_multipart(self):
|
2011-03-28 16:11:40 +00:00
|
|
|
"""
|
2011-12-16 23:40:32 +00:00
|
|
|
POST should be populated even if body is read first, and then
|
2011-03-28 16:11:40 +00:00
|
|
|
the stream is read second. Using multipart/form-data instead of urlencoded.
|
|
|
|
"""
|
2012-10-20 13:53:28 +00:00
|
|
|
payload = FakePayload("\r\n".join([
|
2013-10-19 23:33:10 +00:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
|
|
|
'']))
|
2011-03-28 16:11:40 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 13:53:28 +00:00
|
|
|
'wsgi.input': payload})
|
2013-10-19 12:31:38 +00:00
|
|
|
request.body # evaluate
|
2011-03-28 16:11:40 +00:00
|
|
|
# Consume enough data to mess up the parsing:
|
2012-05-19 15:43:34 +00:00
|
|
|
self.assertEqual(request.read(13), b'--boundary\r\nC')
|
2012-06-07 16:08:47 +00:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-12-16 23:40:32 +00:00
|
|
|
|
2012-02-10 22:51:07 +00:00
|
|
|
def test_POST_connection_error(self):
|
|
|
|
"""
|
|
|
|
If wsgi.input.read() raises an exception while trying to read() the
|
|
|
|
POST, the exception should be identifiable (not a generic IOError).
|
|
|
|
"""
|
2012-08-11 09:11:20 +00:00
|
|
|
class ExplodingBytesIO(BytesIO):
|
2012-02-10 22:51:07 +00:00
|
|
|
def read(self, len=0):
|
|
|
|
raise IOError("kaboom!")
|
|
|
|
|
2012-05-19 15:43:34 +00:00
|
|
|
payload = b'name=value'
|
2012-02-10 22:51:07 +00:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 12:33:57 +00:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2012-02-10 22:51:07 +00:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-08-11 09:11:20 +00:00
|
|
|
'wsgi.input': ExplodingBytesIO(payload)})
|
2012-02-10 22:51:07 +00:00
|
|
|
|
2012-12-24 22:20:38 +00:00
|
|
|
with self.assertRaises(UnreadablePostError):
|
|
|
|
request.body
|
2013-02-05 21:52:29 +00:00
|
|
|
|
2013-06-01 08:26:46 +00:00
|
|
|
def test_FILES_connection_error(self):
|
|
|
|
"""
|
|
|
|
If wsgi.input.read() raises an exception while trying to read() the
|
|
|
|
FILES, the exception should be identifiable (not a generic IOError).
|
|
|
|
"""
|
|
|
|
class ExplodingBytesIO(BytesIO):
|
|
|
|
def read(self, len=0):
|
|
|
|
raise IOError("kaboom!")
|
|
|
|
|
|
|
|
payload = b'x'
|
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=foo_',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': ExplodingBytesIO(payload)})
|
|
|
|
|
|
|
|
with self.assertRaises(UnreadablePostError):
|
|
|
|
request.FILES
|
|
|
|
|
2013-02-28 16:05:25 +00:00
|
|
|
|
2013-07-13 08:16:40 +00:00
|
|
|
class HostValidationTests(SimpleTestCase):
|
|
|
|
poisoned_hosts = [
|
|
|
|
'example.com@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld:80',
|
|
|
|
'example.com:80/badpath',
|
|
|
|
'example.com: recovermypassword.com',
|
|
|
|
]
|
|
|
|
|
|
|
|
@override_settings(
|
|
|
|
USE_X_FORWARDED_HOST=False,
|
|
|
|
ALLOWED_HOSTS=[
|
|
|
|
'forward.com', 'example.com', 'internal.com', '12.34.56.78',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
|
|
|
|
'.multitenant.com', 'INSENSITIVE.com',
|
2013-10-18 09:02:43 +00:00
|
|
|
])
|
2013-07-13 08:16:40 +00:00
|
|
|
def test_http_get_host(self):
|
|
|
|
# Check if X_FORWARDED_HOST is provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_X_FORWARDED_HOST': 'forward.com',
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
# X_FORWARDED_HOST is ignored.
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if X_FORWARDED_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 8042,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com:8042')
|
|
|
|
|
|
|
|
legit_hosts = [
|
|
|
|
'example.com',
|
|
|
|
'example.com:80',
|
|
|
|
'12.34.56.78',
|
|
|
|
'12.34.56.78:443',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
2013-11-02 21:02:56 +00:00
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
2013-07-13 08:16:40 +00:00
|
|
|
'anything.multitenant.com',
|
|
|
|
'multitenant.com',
|
|
|
|
'insensitive.com',
|
2013-10-24 17:34:40 +00:00
|
|
|
'example.com.',
|
|
|
|
'example.com.:80',
|
2013-07-13 08:16:40 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
for host in legit_hosts:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
# Poisoned host headers are rejected as suspicious
|
2013-10-24 17:34:40 +00:00
|
|
|
for host in chain(self.poisoned_hosts, ['other.com', 'example.com..']):
|
2013-07-13 08:16:40 +00:00
|
|
|
with self.assertRaises(SuspiciousOperation):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
|
|
|
|
def test_http_get_host_with_x_forwarded_host(self):
|
|
|
|
# Check if X_FORWARDED_HOST is provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_X_FORWARDED_HOST': 'forward.com',
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
# X_FORWARDED_HOST is obeyed.
|
|
|
|
self.assertEqual(request.get_host(), 'forward.com')
|
|
|
|
|
|
|
|
# Check if X_FORWARDED_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 8042,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com:8042')
|
|
|
|
|
|
|
|
# Poisoned host headers are rejected as suspicious
|
|
|
|
legit_hosts = [
|
|
|
|
'example.com',
|
|
|
|
'example.com:80',
|
|
|
|
'12.34.56.78',
|
|
|
|
'12.34.56.78:443',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
2013-11-02 21:02:56 +00:00
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
2013-07-13 08:16:40 +00:00
|
|
|
]
|
|
|
|
|
|
|
|
for host in legit_hosts:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
for host in self.poisoned_hosts:
|
|
|
|
with self.assertRaises(SuspiciousOperation):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
|
|
|
|
def test_host_validation_disabled_in_debug_mode(self):
|
|
|
|
"""If ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass."""
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
2013-03-27 16:37:08 +00:00
|
|
|
# Invalid hostnames would normally raise a SuspiciousOperation,
|
|
|
|
# but we have DEBUG=True, so this check is disabled.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': "invalid_hostname.com",
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), "invalid_hostname.com")
|
2013-07-13 08:16:40 +00:00
|
|
|
|
|
|
|
@override_settings(ALLOWED_HOSTS=[])
|
|
|
|
def test_get_host_suggestion_of_allowed_host(self):
|
|
|
|
"""get_host() makes helpful suggestions if a valid-looking host is not in ALLOWED_HOSTS."""
|
|
|
|
msg_invalid_host = "Invalid HTTP_HOST header: %r."
|
|
|
|
msg_suggestion = msg_invalid_host + "You may need to add %r to ALLOWED_HOSTS."
|
2013-03-27 16:37:08 +00:00
|
|
|
msg_suggestion2 = msg_invalid_host + "The domain name provided is not valid according to RFC 1034/1035"
|
2013-07-13 08:16:40 +00:00
|
|
|
|
2013-11-02 21:02:56 +00:00
|
|
|
for host in [ # Valid-looking hosts
|
2013-07-13 08:16:40 +00:00
|
|
|
'example.com',
|
|
|
|
'12.34.56.78',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
2013-11-02 21:02:56 +00:00
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
2013-07-13 08:16:40 +00:00
|
|
|
]:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': host}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_suggestion % (host, host),
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
2013-11-02 21:02:56 +00:00
|
|
|
for domain, port in [ # Valid-looking hosts with a port number
|
2013-07-13 08:16:40 +00:00
|
|
|
('example.com', 80),
|
|
|
|
('12.34.56.78', 443),
|
|
|
|
('[2001:19f0:feee::dead:beef:cafe]', 8080),
|
|
|
|
]:
|
|
|
|
host = '%s:%s' % (domain, port)
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': host}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_suggestion % (host, domain),
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
|
|
|
for host in self.poisoned_hosts:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': host}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_invalid_host % host,
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
2013-03-27 16:37:08 +00:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': "invalid_hostname.com"}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_suggestion2 % "invalid_hostname.com",
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
2013-07-13 08:16:40 +00:00
|
|
|
|
2013-04-03 20:27:20 +00:00
|
|
|
@skipIf(connection.vendor == 'sqlite'
|
2013-07-09 19:35:01 +00:00
|
|
|
and connection.settings_dict['TEST_NAME'] in (None, '', ':memory:'),
|
2013-02-28 16:05:25 +00:00
|
|
|
"Cannot establish two connections to an in-memory SQLite database.")
|
|
|
|
class DatabaseConnectionHandlingTests(TransactionTestCase):
|
|
|
|
|
2013-06-04 06:09:29 +00:00
|
|
|
available_apps = []
|
|
|
|
|
2013-02-28 16:05:25 +00:00
|
|
|
def setUp(self):
|
|
|
|
# Use a temporary connection to avoid messing with the main one.
|
|
|
|
self._old_default_connection = connections['default']
|
|
|
|
del connections['default']
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
try:
|
|
|
|
connections['default'].close()
|
|
|
|
finally:
|
|
|
|
connections['default'] = self._old_default_connection
|
|
|
|
|
2013-02-05 21:52:29 +00:00
|
|
|
def test_request_finished_db_state(self):
|
2013-02-28 16:05:25 +00:00
|
|
|
# Force closing connection on request end
|
|
|
|
connection.settings_dict['CONN_MAX_AGE'] = 0
|
|
|
|
|
2013-02-05 21:52:29 +00:00
|
|
|
# The GET below will not succeed, but it will give a response with
|
|
|
|
# defined ._handler_class. That is needed for sending the
|
|
|
|
# request_finished signal.
|
|
|
|
response = self.client.get('/')
|
|
|
|
# Make sure there is an open connection
|
|
|
|
connection.cursor()
|
|
|
|
connection.enter_transaction_management()
|
|
|
|
signals.request_finished.send(sender=response._handler_class)
|
|
|
|
self.assertEqual(len(connection.transaction_state), 0)
|
|
|
|
|
|
|
|
def test_request_finished_failed_connection(self):
|
2013-02-28 16:05:25 +00:00
|
|
|
# Force closing connection on request end
|
|
|
|
connection.settings_dict['CONN_MAX_AGE'] = 0
|
|
|
|
|
|
|
|
connection.enter_transaction_management()
|
|
|
|
connection.set_dirty()
|
2013-10-22 10:21:07 +00:00
|
|
|
|
2013-02-05 21:52:29 +00:00
|
|
|
# Test that the rollback doesn't succeed (for example network failure
|
|
|
|
# could cause this).
|
|
|
|
def fail_horribly():
|
|
|
|
raise Exception("Horrible failure!")
|
2013-02-28 16:05:25 +00:00
|
|
|
connection._rollback = fail_horribly
|
2013-02-12 21:11:22 +00:00
|
|
|
try:
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
signals.request_finished.send(sender=self.__class__)
|
|
|
|
# The connection's state wasn't cleaned up
|
2013-02-28 16:05:25 +00:00
|
|
|
self.assertEqual(len(connection.transaction_state), 1)
|
2013-02-12 21:11:22 +00:00
|
|
|
finally:
|
2013-02-28 16:05:25 +00:00
|
|
|
del connection._rollback
|
2013-02-12 21:11:22 +00:00
|
|
|
# The connection will be cleaned on next request where the conn
|
|
|
|
# works again.
|
|
|
|
signals.request_finished.send(sender=self.__class__)
|
2013-02-05 21:52:29 +00:00
|
|
|
self.assertEqual(len(connection.transaction_state), 0)
|