2009-04-04 17:34:58 +00:00
|
|
|
#! -*- coding: utf-8 -*-
|
2013-07-29 17:19:04 +00:00
|
|
|
from __future__ import unicode_literals
|
2011-10-13 18:51:33 +00:00
|
|
|
|
2011-05-07 16:59:16 +00:00
|
|
|
import base64
|
2008-07-20 12:44:41 +00:00
|
|
|
import errno
|
2011-03-28 02:11:19 +00:00
|
|
|
import hashlib
|
2012-04-29 17:58:00 +00:00
|
|
|
import json
|
2010-12-04 07:28:12 +00:00
|
|
|
import os
|
2008-07-20 12:44:41 +00:00
|
|
|
import shutil
|
2012-10-30 21:20:42 +00:00
|
|
|
import tempfile as sys_tempfile
|
2013-07-01 12:22:27 +00:00
|
|
|
import unittest
|
2015-07-20 12:02:22 +00:00
|
|
|
from io import BytesIO
|
2008-07-20 12:44:41 +00:00
|
|
|
|
2008-07-26 22:48:51 +00:00
|
|
|
from django.core.files import temp as tempfile
|
2008-07-20 12:44:41 +00:00
|
|
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
2014-07-12 12:08:50 +00:00
|
|
|
from django.http.multipartparser import MultiPartParser, parse_header
|
2015-04-17 21:38:20 +00:00
|
|
|
from django.test import SimpleTestCase, TestCase, client, override_settings
|
2012-08-28 18:59:56 +00:00
|
|
|
from django.utils.encoding import force_bytes
|
2014-07-12 12:08:50 +00:00
|
|
|
from django.utils.http import urlquote
|
2015-07-20 12:02:22 +00:00
|
|
|
from django.utils.six import PY2, StringIO
|
2008-07-01 15:10:51 +00:00
|
|
|
|
2011-10-13 18:51:33 +00:00
|
|
|
from . import uploadhandler
|
2012-10-30 21:20:42 +00:00
|
|
|
from .models import FileModel
|
2008-07-20 12:44:41 +00:00
|
|
|
|
2012-06-07 16:08:47 +00:00
|
|
|
UNICODE_FILENAME = 'test-0123456789_中文_Orléans.jpg'
|
2015-02-21 17:56:36 +00:00
|
|
|
MEDIA_ROOT = sys_tempfile.mkdtemp()
|
2012-10-30 21:20:42 +00:00
|
|
|
UPLOAD_TO = os.path.join(MEDIA_ROOT, 'test_upload')
|
2009-04-04 17:34:58 +00:00
|
|
|
|
2013-11-02 19:37:48 +00:00
|
|
|
|
2015-01-21 16:55:57 +00:00
|
|
|
@override_settings(MEDIA_ROOT=MEDIA_ROOT, ROOT_URLCONF='file_uploads.urls', MIDDLEWARE_CLASSES=[])
|
2008-07-01 15:10:51 +00:00
|
|
|
class FileUploadTests(TestCase):
|
2014-01-14 15:43:27 +00:00
|
|
|
|
2012-10-30 21:20:42 +00:00
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
2014-10-18 18:03:10 +00:00
|
|
|
super(FileUploadTests, cls).setUpClass()
|
2012-10-30 21:20:42 +00:00
|
|
|
if not os.path.isdir(MEDIA_ROOT):
|
|
|
|
os.makedirs(MEDIA_ROOT)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
shutil.rmtree(MEDIA_ROOT)
|
2014-10-18 18:03:10 +00:00
|
|
|
super(FileUploadTests, cls).tearDownClass()
|
2012-10-30 21:20:42 +00:00
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
def test_simple_upload(self):
|
2012-05-19 15:43:34 +00:00
|
|
|
with open(__file__, 'rb') as fp:
|
2012-05-05 12:01:38 +00:00
|
|
|
post_data = {
|
|
|
|
'name': 'Ringo',
|
|
|
|
'file_field': fp,
|
|
|
|
}
|
2014-01-14 15:43:27 +00:00
|
|
|
response = self.client.post('/upload/', post_data)
|
2008-07-01 15:10:51 +00:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
|
|
|
def test_large_upload(self):
|
2014-05-25 20:08:05 +00:00
|
|
|
file = tempfile.NamedTemporaryFile
|
2015-02-21 18:18:54 +00:00
|
|
|
with file(suffix=".file1") as file1, file(suffix=".file2") as file2:
|
2014-05-25 20:08:05 +00:00
|
|
|
file1.write(b'a' * (2 ** 21))
|
|
|
|
file1.seek(0)
|
2008-07-01 15:10:51 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
file2.write(b'a' * (10 * 2 ** 20))
|
|
|
|
file2.seek(0)
|
2008-07-01 15:10:51 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
post_data = {
|
|
|
|
'name': 'Ringo',
|
|
|
|
'file_field1': file1,
|
|
|
|
'file_field2': file2,
|
|
|
|
}
|
2008-07-01 15:10:51 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
for key in list(post_data):
|
|
|
|
try:
|
|
|
|
post_data[key + '_hash'] = hashlib.sha1(post_data[key].read()).hexdigest()
|
|
|
|
post_data[key].seek(0)
|
|
|
|
except AttributeError:
|
|
|
|
post_data[key + '_hash'] = hashlib.sha1(force_bytes(post_data[key])).hexdigest()
|
2008-07-01 15:10:51 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
response = self.client.post('/verify/', post_data)
|
2008-07-01 15:10:51 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2009-04-04 17:34:58 +00:00
|
|
|
|
2014-09-02 17:23:51 +00:00
|
|
|
def _test_base64_upload(self, content, encode=base64.b64encode):
|
2012-10-20 13:36:24 +00:00
|
|
|
payload = client.FakePayload("\r\n".join([
|
2011-05-07 16:59:16 +00:00
|
|
|
'--' + client.BOUNDARY,
|
|
|
|
'Content-Disposition: form-data; name="file"; filename="test.txt"',
|
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'Content-Transfer-Encoding: base64',
|
2013-10-26 19:15:03 +00:00
|
|
|
'']))
|
2014-09-02 17:23:51 +00:00
|
|
|
payload.write(b"\r\n" + encode(force_bytes(content)) + b"\r\n")
|
2012-10-20 13:36:24 +00:00
|
|
|
payload.write('--' + client.BOUNDARY + '--\r\n')
|
2011-05-07 16:59:16 +00:00
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2013-11-02 19:37:48 +00:00
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
2014-01-14 15:43:27 +00:00
|
|
|
'PATH_INFO': "/echo_content/",
|
2011-05-07 16:59:16 +00:00
|
|
|
'REQUEST_METHOD': 'POST',
|
2013-11-02 19:37:48 +00:00
|
|
|
'wsgi.input': payload,
|
2011-05-07 16:59:16 +00:00
|
|
|
}
|
|
|
|
response = self.client.request(**r)
|
2012-08-14 18:51:50 +00:00
|
|
|
received = json.loads(response.content.decode('utf-8'))
|
2011-05-07 16:59:16 +00:00
|
|
|
|
2012-10-11 21:20:25 +00:00
|
|
|
self.assertEqual(received['file'], content)
|
|
|
|
|
|
|
|
def test_base64_upload(self):
|
|
|
|
self._test_base64_upload("This data will be transmitted base64-encoded.")
|
|
|
|
|
|
|
|
def test_big_base64_upload(self):
|
|
|
|
self._test_base64_upload("Big data" * 68000) # > 512Kb
|
2011-05-07 16:59:16 +00:00
|
|
|
|
2014-09-02 17:23:51 +00:00
|
|
|
def test_big_base64_newlines_upload(self):
|
|
|
|
self._test_base64_upload(
|
2015-01-08 00:02:14 +00:00
|
|
|
# encodestring is a deprecated alias on Python 3
|
|
|
|
"Big data" * 68000, encode=base64.encodestring if PY2 else base64.encodebytes)
|
2014-09-02 17:23:51 +00:00
|
|
|
|
2009-04-04 17:34:58 +00:00
|
|
|
def test_unicode_file_name(self):
|
2012-10-30 21:20:42 +00:00
|
|
|
tdir = sys_tempfile.mkdtemp()
|
|
|
|
self.addCleanup(shutil.rmtree, tdir, True)
|
2009-04-04 17:34:58 +00:00
|
|
|
|
2016-01-23 17:18:11 +00:00
|
|
|
# This file contains Chinese symbols and an accented char in the name.
|
2012-08-12 21:53:54 +00:00
|
|
|
with open(os.path.join(tdir, UNICODE_FILENAME), 'w+b') as file1:
|
2012-05-19 15:43:34 +00:00
|
|
|
file1.write(b'b' * (2 ** 10))
|
2012-05-05 12:01:38 +00:00
|
|
|
file1.seek(0)
|
2009-04-04 17:34:58 +00:00
|
|
|
|
2012-05-05 12:01:38 +00:00
|
|
|
post_data = {
|
|
|
|
'file_unicode': file1,
|
2013-10-18 09:02:43 +00:00
|
|
|
}
|
2009-04-04 17:34:58 +00:00
|
|
|
|
2014-01-14 15:43:27 +00:00
|
|
|
response = self.client.post('/unicode_name/', post_data)
|
2009-04-04 17:34:58 +00:00
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2008-07-26 22:48:51 +00:00
|
|
|
|
2014-07-12 12:08:50 +00:00
|
|
|
def test_unicode_file_name_rfc2231(self):
|
|
|
|
"""
|
|
|
|
Test receiving file upload when filename is encoded with RFC2231
|
|
|
|
(#22971).
|
|
|
|
"""
|
|
|
|
payload = client.FakePayload()
|
|
|
|
payload.write('\r\n'.join([
|
|
|
|
'--' + client.BOUNDARY,
|
|
|
|
'Content-Disposition: form-data; name="file_unicode"; filename*=UTF-8\'\'%s' % urlquote(UNICODE_FILENAME),
|
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
|
|
|
'You got pwnd.\r\n',
|
|
|
|
'\r\n--' + client.BOUNDARY + '--\r\n'
|
|
|
|
]))
|
|
|
|
|
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
|
|
|
'PATH_INFO': "/unicode_name/",
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
}
|
|
|
|
response = self.client.request(**r)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
|
|
|
def test_unicode_name_rfc2231(self):
|
|
|
|
"""
|
|
|
|
Test receiving file upload when filename is encoded with RFC2231
|
|
|
|
(#22971).
|
|
|
|
"""
|
|
|
|
payload = client.FakePayload()
|
2015-09-11 23:33:12 +00:00
|
|
|
payload.write(
|
|
|
|
'\r\n'.join([
|
|
|
|
'--' + client.BOUNDARY,
|
|
|
|
'Content-Disposition: form-data; name*=UTF-8\'\'file_unicode; filename*=UTF-8\'\'%s' % urlquote(
|
|
|
|
UNICODE_FILENAME
|
|
|
|
),
|
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
|
|
|
'You got pwnd.\r\n',
|
|
|
|
'\r\n--' + client.BOUNDARY + '--\r\n'
|
|
|
|
])
|
|
|
|
)
|
2014-07-12 12:08:50 +00:00
|
|
|
|
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
|
|
|
'PATH_INFO': "/unicode_name/",
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
}
|
|
|
|
response = self.client.request(**r)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
2016-03-07 12:06:46 +00:00
|
|
|
def test_blank_filenames(self):
|
|
|
|
"""
|
|
|
|
Receiving file upload when filename is blank (before and after
|
|
|
|
sanitization) should be okay.
|
|
|
|
"""
|
|
|
|
# The second value is normalized to an empty name by
|
|
|
|
# MultiPartParser.IE_sanitize()
|
|
|
|
filenames = ['', 'C:\\Windows\\']
|
|
|
|
|
|
|
|
payload = client.FakePayload()
|
|
|
|
for i, name in enumerate(filenames):
|
|
|
|
payload.write('\r\n'.join([
|
|
|
|
'--' + client.BOUNDARY,
|
|
|
|
'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name),
|
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
|
|
|
'You got pwnd.\r\n'
|
|
|
|
]))
|
|
|
|
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
|
|
|
|
|
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
|
|
|
'PATH_INFO': '/echo/',
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
}
|
|
|
|
response = self.client.request(**r)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
|
|
|
# Empty filenames should be ignored
|
|
|
|
received = json.loads(response.content.decode('utf-8'))
|
|
|
|
for i, name in enumerate(filenames):
|
|
|
|
self.assertIsNone(received.get('file%s' % i))
|
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
def test_dangerous_file_names(self):
|
|
|
|
"""Uploaded file names should be sanitized before ever reaching the view."""
|
|
|
|
# This test simulates possible directory traversal attacks by a
|
2008-07-26 22:48:51 +00:00
|
|
|
# malicious uploader We have to do some monkeybusiness here to construct
|
2008-07-01 15:10:51 +00:00
|
|
|
# a malicious payload with an invalid file name (containing os.sep or
|
|
|
|
# os.pardir). This similar to what an attacker would need to do when
|
|
|
|
# trying such an attack.
|
|
|
|
scary_file_names = [
|
|
|
|
"/tmp/hax0rd.txt", # Absolute path, *nix-style.
|
2015-01-20 14:54:12 +00:00
|
|
|
"C:\\Windows\\hax0rd.txt", # Absolute path, win-style.
|
2008-07-01 15:10:51 +00:00
|
|
|
"C:/Windows/hax0rd.txt", # Absolute path, broken-style.
|
|
|
|
"\\tmp\\hax0rd.txt", # Absolute path, broken in a different way.
|
|
|
|
"/tmp\\hax0rd.txt", # Absolute path, broken by mixing.
|
|
|
|
"subdir/hax0rd.txt", # Descendant path, *nix-style.
|
|
|
|
"subdir\\hax0rd.txt", # Descendant path, win-style.
|
|
|
|
"sub/dir\\hax0rd.txt", # Descendant path, mixed.
|
|
|
|
"../../hax0rd.txt", # Relative path, *nix-style.
|
|
|
|
"..\\..\\hax0rd.txt", # Relative path, win-style.
|
|
|
|
"../..\\hax0rd.txt" # Relative path, mixed.
|
|
|
|
]
|
2008-07-26 22:48:51 +00:00
|
|
|
|
2012-10-20 13:36:24 +00:00
|
|
|
payload = client.FakePayload()
|
2008-07-01 15:10:51 +00:00
|
|
|
for i, name in enumerate(scary_file_names):
|
2012-10-20 13:36:24 +00:00
|
|
|
payload.write('\r\n'.join([
|
2008-07-01 15:10:51 +00:00
|
|
|
'--' + client.BOUNDARY,
|
|
|
|
'Content-Disposition: form-data; name="file%s"; filename="%s"' % (i, name),
|
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
2012-10-20 13:36:24 +00:00
|
|
|
'You got pwnd.\r\n'
|
|
|
|
]))
|
|
|
|
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
|
2008-07-26 22:48:51 +00:00
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2013-11-02 19:37:48 +00:00
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
2014-01-14 15:43:27 +00:00
|
|
|
'PATH_INFO': "/echo/",
|
2008-07-01 15:10:51 +00:00
|
|
|
'REQUEST_METHOD': 'POST',
|
2013-11-02 19:37:48 +00:00
|
|
|
'wsgi.input': payload,
|
2008-07-01 15:10:51 +00:00
|
|
|
}
|
|
|
|
response = self.client.request(**r)
|
|
|
|
|
|
|
|
# The filenames should have been sanitized by the time it got to the view.
|
2014-04-26 17:18:45 +00:00
|
|
|
received = json.loads(response.content.decode('utf-8'))
|
2008-07-01 15:10:51 +00:00
|
|
|
for i, name in enumerate(scary_file_names):
|
2014-04-26 17:18:45 +00:00
|
|
|
got = received["file%s" % i]
|
2008-07-01 15:10:51 +00:00
|
|
|
self.assertEqual(got, "hax0rd.txt")
|
2008-07-26 22:48:51 +00:00
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
def test_filename_overflow(self):
|
|
|
|
"""File names over 256 characters (dangerous on some platforms) get fixed up."""
|
2013-09-04 16:08:13 +00:00
|
|
|
long_str = 'f' * 300
|
|
|
|
cases = [
|
|
|
|
# field name, filename, expected
|
|
|
|
('long_filename', '%s.txt' % long_str, '%s.txt' % long_str[:251]),
|
|
|
|
('long_extension', 'foo.%s' % long_str, '.%s' % long_str[:254]),
|
|
|
|
('no_extension', long_str, long_str[:255]),
|
|
|
|
('no_filename', '.%s' % long_str, '.%s' % long_str[:254]),
|
|
|
|
('long_everything', '%s.%s' % (long_str, long_str), '.%s' % long_str[:254]),
|
|
|
|
]
|
|
|
|
payload = client.FakePayload()
|
|
|
|
for name, filename, _ in cases:
|
|
|
|
payload.write("\r\n".join([
|
|
|
|
'--' + client.BOUNDARY,
|
2014-11-27 00:41:27 +00:00
|
|
|
'Content-Disposition: form-data; name="{}"; filename="{}"',
|
2013-09-04 16:08:13 +00:00
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
|
|
|
'Oops.',
|
|
|
|
''
|
|
|
|
]).format(name, filename))
|
|
|
|
payload.write('\r\n--' + client.BOUNDARY + '--\r\n')
|
2008-07-01 15:10:51 +00:00
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2013-11-02 19:37:48 +00:00
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
2014-01-14 15:43:27 +00:00
|
|
|
'PATH_INFO': "/echo/",
|
2008-07-01 15:10:51 +00:00
|
|
|
'REQUEST_METHOD': 'POST',
|
2013-11-02 19:37:48 +00:00
|
|
|
'wsgi.input': payload,
|
2008-07-01 15:10:51 +00:00
|
|
|
}
|
2014-05-25 20:08:05 +00:00
|
|
|
response = self.client.request(**r)
|
|
|
|
|
|
|
|
result = json.loads(response.content.decode('utf-8'))
|
2013-09-04 16:08:13 +00:00
|
|
|
for name, _, expected in cases:
|
|
|
|
got = result[name]
|
2014-11-27 00:41:27 +00:00
|
|
|
self.assertEqual(expected, got, 'Mismatch for {}'.format(name))
|
2014-10-28 10:02:56 +00:00
|
|
|
self.assertLess(len(got), 256,
|
2013-09-04 16:08:13 +00:00
|
|
|
"Got a long file name (%s characters)." % len(got))
|
2008-07-26 22:48:51 +00:00
|
|
|
|
2014-11-29 16:41:06 +00:00
|
|
|
def test_file_content(self):
|
|
|
|
file = tempfile.NamedTemporaryFile
|
2015-02-21 18:18:54 +00:00
|
|
|
with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file:
|
2014-11-29 16:41:06 +00:00
|
|
|
no_content_type.write(b'no content')
|
|
|
|
no_content_type.seek(0)
|
|
|
|
|
|
|
|
simple_file.write(b'text content')
|
|
|
|
simple_file.seek(0)
|
|
|
|
simple_file.content_type = 'text/plain'
|
|
|
|
|
|
|
|
string_io = StringIO('string content')
|
|
|
|
bytes_io = BytesIO(b'binary content')
|
|
|
|
|
|
|
|
response = self.client.post('/echo_content/', {
|
|
|
|
'no_content_type': no_content_type,
|
|
|
|
'simple_file': simple_file,
|
|
|
|
'string': string_io,
|
|
|
|
'binary': bytes_io,
|
|
|
|
})
|
|
|
|
received = json.loads(response.content.decode('utf-8'))
|
|
|
|
self.assertEqual(received['no_content_type'], 'no content')
|
|
|
|
self.assertEqual(received['simple_file'], 'text content')
|
|
|
|
self.assertEqual(received['string'], 'string content')
|
|
|
|
self.assertEqual(received['binary'], 'binary content')
|
|
|
|
|
2013-04-19 17:20:23 +00:00
|
|
|
def test_content_type_extra(self):
|
|
|
|
"""Uploaded files may have content type parameters available."""
|
2014-05-25 20:08:05 +00:00
|
|
|
file = tempfile.NamedTemporaryFile
|
2015-02-21 18:18:54 +00:00
|
|
|
with file(suffix=".ctype_extra") as no_content_type, file(suffix=".ctype_extra") as simple_file:
|
2014-05-25 20:08:05 +00:00
|
|
|
no_content_type.write(b'something')
|
|
|
|
no_content_type.seek(0)
|
2013-04-19 17:20:23 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
simple_file.write(b'something')
|
|
|
|
simple_file.seek(0)
|
|
|
|
simple_file.content_type = 'text/plain; test-key=test_value'
|
2013-04-19 17:20:23 +00:00
|
|
|
|
2014-05-25 20:08:05 +00:00
|
|
|
response = self.client.post('/echo_content_type_extra/', {
|
|
|
|
'no_content_type': no_content_type,
|
|
|
|
'simple_file': simple_file,
|
|
|
|
})
|
|
|
|
received = json.loads(response.content.decode('utf-8'))
|
|
|
|
self.assertEqual(received['no_content_type'], {})
|
|
|
|
self.assertEqual(received['simple_file'], {'test-key': 'test_value'})
|
2013-04-19 17:20:23 +00:00
|
|
|
|
2011-06-28 10:17:56 +00:00
|
|
|
def test_truncated_multipart_handled_gracefully(self):
|
|
|
|
"""
|
|
|
|
If passed an incomplete multipart message, MultiPartParser does not
|
|
|
|
attempt to read beyond the end of the stream, and simply will handle
|
|
|
|
the part that can be parsed gracefully.
|
|
|
|
"""
|
2012-10-20 13:36:24 +00:00
|
|
|
payload_str = "\r\n".join([
|
2011-06-28 10:17:56 +00:00
|
|
|
'--' + client.BOUNDARY,
|
|
|
|
'Content-Disposition: form-data; name="file"; filename="foo.txt"',
|
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
|
|
|
'file contents'
|
|
|
|
'--' + client.BOUNDARY + '--',
|
|
|
|
'',
|
2012-10-20 13:36:24 +00:00
|
|
|
])
|
|
|
|
payload = client.FakePayload(payload_str[:-10])
|
2011-06-28 10:17:56 +00:00
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
2014-01-14 15:43:27 +00:00
|
|
|
'PATH_INFO': '/echo/',
|
2011-06-28 10:17:56 +00:00
|
|
|
'REQUEST_METHOD': 'POST',
|
2012-10-20 13:36:24 +00:00
|
|
|
'wsgi.input': payload,
|
2011-06-28 10:17:56 +00:00
|
|
|
}
|
2012-08-14 18:51:50 +00:00
|
|
|
got = json.loads(self.client.request(**r).content.decode('utf-8'))
|
2012-05-03 14:39:16 +00:00
|
|
|
self.assertEqual(got, {})
|
2011-06-28 10:17:56 +00:00
|
|
|
|
|
|
|
def test_empty_multipart_handled_gracefully(self):
|
|
|
|
"""
|
|
|
|
If passed an empty multipart message, MultiPartParser will return
|
|
|
|
an empty QueryDict.
|
|
|
|
"""
|
|
|
|
r = {
|
|
|
|
'CONTENT_LENGTH': 0,
|
|
|
|
'CONTENT_TYPE': client.MULTIPART_CONTENT,
|
2014-01-14 15:43:27 +00:00
|
|
|
'PATH_INFO': '/echo/',
|
2011-06-28 10:17:56 +00:00
|
|
|
'REQUEST_METHOD': 'POST',
|
2012-05-19 15:43:34 +00:00
|
|
|
'wsgi.input': client.FakePayload(b''),
|
2011-06-28 10:17:56 +00:00
|
|
|
}
|
2012-08-14 18:51:50 +00:00
|
|
|
got = json.loads(self.client.request(**r).content.decode('utf-8'))
|
2012-05-03 14:39:16 +00:00
|
|
|
self.assertEqual(got, {})
|
2011-06-28 10:17:56 +00:00
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
def test_custom_upload_handler(self):
|
2014-05-25 20:08:05 +00:00
|
|
|
file = tempfile.NamedTemporaryFile
|
|
|
|
with file() as smallfile, file() as bigfile:
|
|
|
|
# A small file (under the 5M quota)
|
|
|
|
smallfile.write(b'a' * (2 ** 21))
|
|
|
|
smallfile.seek(0)
|
|
|
|
|
|
|
|
# A big file (over the quota)
|
|
|
|
bigfile.write(b'a' * (10 * 2 ** 20))
|
|
|
|
bigfile.seek(0)
|
|
|
|
|
|
|
|
# Small file posting should work.
|
|
|
|
response = self.client.post('/quota/', {'f': smallfile})
|
|
|
|
got = json.loads(response.content.decode('utf-8'))
|
2014-10-28 10:02:56 +00:00
|
|
|
self.assertIn('f', got)
|
2014-05-25 20:08:05 +00:00
|
|
|
|
|
|
|
# Large files don't go through.
|
|
|
|
response = self.client.post("/quota/", {'f': bigfile})
|
|
|
|
got = json.loads(response.content.decode('utf-8'))
|
2014-10-28 10:02:56 +00:00
|
|
|
self.assertNotIn('f', got)
|
2008-07-26 22:48:51 +00:00
|
|
|
|
2008-07-01 15:10:51 +00:00
|
|
|
def test_broken_custom_upload_handler(self):
|
2014-05-25 20:08:05 +00:00
|
|
|
with tempfile.NamedTemporaryFile() as file:
|
|
|
|
file.write(b'a' * (2 ** 21))
|
|
|
|
file.seek(0)
|
|
|
|
|
|
|
|
# AttributeError: You cannot alter upload handlers after the upload has been processed.
|
2016-01-17 11:26:39 +00:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
self.client.post('/quota/broken/', {'f': file})
|
2008-07-07 22:06:32 +00:00
|
|
|
|
|
|
|
def test_fileupload_getlist(self):
|
2014-05-25 20:08:05 +00:00
|
|
|
file = tempfile.NamedTemporaryFile
|
|
|
|
with file() as file1, file() as file2, file() as file2a:
|
|
|
|
file1.write(b'a' * (2 ** 23))
|
|
|
|
file1.seek(0)
|
|
|
|
|
|
|
|
file2.write(b'a' * (2 * 2 ** 18))
|
|
|
|
file2.seek(0)
|
|
|
|
|
|
|
|
file2a.write(b'a' * (5 * 2 ** 20))
|
|
|
|
file2a.seek(0)
|
|
|
|
|
|
|
|
response = self.client.post('/getlist_count/', {
|
|
|
|
'file1': file1,
|
|
|
|
'field1': 'test',
|
|
|
|
'field2': 'test3',
|
|
|
|
'field3': 'test5',
|
|
|
|
'field4': 'test6',
|
|
|
|
'field5': 'test7',
|
|
|
|
'file2': (file2, file2a)
|
|
|
|
})
|
|
|
|
got = json.loads(response.content.decode('utf-8'))
|
|
|
|
|
|
|
|
self.assertEqual(got.get('file1'), 1)
|
|
|
|
self.assertEqual(got.get('file2'), 2)
|
2008-07-20 12:44:41 +00:00
|
|
|
|
2014-05-25 20:52:47 +00:00
|
|
|
def test_fileuploads_closed_at_request_end(self):
|
|
|
|
file = tempfile.NamedTemporaryFile
|
|
|
|
with file() as f1, file() as f2a, file() as f2b:
|
|
|
|
response = self.client.post('/fd_closing/t/', {
|
|
|
|
'file': f1,
|
|
|
|
'file2': (f2a, f2b),
|
|
|
|
})
|
|
|
|
|
|
|
|
request = response.wsgi_request
|
|
|
|
# Check that the files got actually parsed.
|
|
|
|
self.assertTrue(hasattr(request, '_files'))
|
|
|
|
|
|
|
|
file = request._files['file']
|
|
|
|
self.assertTrue(file.closed)
|
|
|
|
|
|
|
|
files = request._files.getlist('file2')
|
|
|
|
self.assertTrue(files[0].closed)
|
|
|
|
self.assertTrue(files[1].closed)
|
|
|
|
|
|
|
|
def test_no_parsing_triggered_by_fd_closing(self):
|
|
|
|
file = tempfile.NamedTemporaryFile
|
|
|
|
with file() as f1, file() as f2a, file() as f2b:
|
|
|
|
response = self.client.post('/fd_closing/f/', {
|
|
|
|
'file': f1,
|
|
|
|
'file2': (f2a, f2b),
|
|
|
|
})
|
|
|
|
|
|
|
|
request = response.wsgi_request
|
|
|
|
# Check that the fd closing logic doesn't trigger parsing of the stream
|
|
|
|
self.assertFalse(hasattr(request, '_files'))
|
|
|
|
|
2008-08-30 19:56:14 +00:00
|
|
|
def test_file_error_blocking(self):
|
|
|
|
"""
|
|
|
|
The server should not block when there are upload errors (bug #8622).
|
|
|
|
This can happen if something -- i.e. an exception handler -- tries to
|
|
|
|
access POST while handling an error in parsing POST. This shouldn't
|
|
|
|
cause an infinite loop!
|
|
|
|
"""
|
|
|
|
class POSTAccessingHandler(client.ClientHandler):
|
|
|
|
"""A handler that'll access POST during an exception."""
|
|
|
|
def handle_uncaught_exception(self, request, resolver, exc_info):
|
|
|
|
ret = super(POSTAccessingHandler, self).handle_uncaught_exception(request, resolver, exc_info)
|
2013-10-19 12:31:38 +00:00
|
|
|
request.POST # evaluate
|
2008-08-30 19:56:14 +00:00
|
|
|
return ret
|
2010-10-11 12:55:17 +00:00
|
|
|
|
2008-08-30 19:56:14 +00:00
|
|
|
# Maybe this is a little more complicated that it needs to be; but if
|
|
|
|
# the django.test.client.FakePayload.read() implementation changes then
|
|
|
|
# this test would fail. So we need to know exactly what kind of error
|
|
|
|
# it raises when there is an attempt to read more than the available bytes:
|
|
|
|
try:
|
2012-05-19 15:43:34 +00:00
|
|
|
client.FakePayload(b'a').read(2)
|
2012-08-14 18:51:50 +00:00
|
|
|
except Exception as err:
|
|
|
|
reference_error = err
|
2008-08-30 19:56:14 +00:00
|
|
|
|
|
|
|
# install the custom handler that tries to access request.POST
|
|
|
|
self.client.handler = POSTAccessingHandler()
|
|
|
|
|
2012-05-19 15:43:34 +00:00
|
|
|
with open(__file__, 'rb') as fp:
|
2012-05-05 12:01:38 +00:00
|
|
|
post_data = {
|
|
|
|
'name': 'Ringo',
|
|
|
|
'file_field': fp,
|
|
|
|
}
|
|
|
|
try:
|
2014-01-14 15:43:27 +00:00
|
|
|
self.client.post('/upload_errors/', post_data)
|
2012-05-05 12:01:38 +00:00
|
|
|
except reference_error.__class__ as err:
|
|
|
|
self.assertFalse(
|
|
|
|
str(err) == str(reference_error),
|
|
|
|
"Caught a repeated exception that'll cause an infinite loop in file uploads."
|
|
|
|
)
|
|
|
|
except Exception as err:
|
|
|
|
# CustomUploadError is the error that should have been raised
|
|
|
|
self.assertEqual(err.__class__, uploadhandler.CustomUploadError)
|
2011-05-22 23:56:42 +00:00
|
|
|
|
|
|
|
def test_filename_case_preservation(self):
|
|
|
|
"""
|
|
|
|
The storage backend shouldn't mess with the case of the filenames
|
|
|
|
uploaded.
|
|
|
|
"""
|
|
|
|
# Synthesize the contents of a file upload with a mixed case filename
|
|
|
|
# so we don't have to carry such a file in the Django tests source code
|
|
|
|
# tree.
|
|
|
|
vars = {'boundary': 'oUrBoUnDaRyStRiNg'}
|
|
|
|
post_data = [
|
|
|
|
'--%(boundary)s',
|
2013-10-19 23:33:10 +00:00
|
|
|
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
|
2011-05-22 23:56:42 +00:00
|
|
|
'Content-Type: application/octet-stream',
|
|
|
|
'',
|
|
|
|
'file contents\n'
|
|
|
|
'',
|
|
|
|
'--%(boundary)s--\r\n',
|
|
|
|
]
|
|
|
|
response = self.client.post(
|
2014-01-14 15:43:27 +00:00
|
|
|
'/filename_case/',
|
2011-05-22 23:56:42 +00:00
|
|
|
'\r\n'.join(post_data) % vars,
|
|
|
|
'multipart/form-data; boundary=%(boundary)s' % vars
|
|
|
|
)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
id = int(response.content)
|
|
|
|
obj = FileModel.objects.get(pk=id)
|
|
|
|
# The name of the file uploaded and the file stored in the server-side
|
|
|
|
# shouldn't differ.
|
|
|
|
self.assertEqual(os.path.basename(obj.testfile.path), 'MiXeD_cAsE.txt')
|
2008-08-30 19:56:14 +00:00
|
|
|
|
2013-11-03 04:36:09 +00:00
|
|
|
|
2012-10-30 21:20:42 +00:00
|
|
|
@override_settings(MEDIA_ROOT=MEDIA_ROOT)
|
2015-04-17 21:38:20 +00:00
|
|
|
class DirectoryCreationTests(SimpleTestCase):
|
2008-07-20 12:44:41 +00:00
|
|
|
"""
|
2008-07-26 22:48:51 +00:00
|
|
|
Tests for error handling during directory creation
|
2008-07-20 12:44:41 +00:00
|
|
|
via _save_FIELD_file (ticket #6450)
|
|
|
|
"""
|
2012-10-30 21:20:42 +00:00
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
2014-10-18 18:03:10 +00:00
|
|
|
super(DirectoryCreationTests, cls).setUpClass()
|
2012-10-30 21:20:42 +00:00
|
|
|
if not os.path.isdir(MEDIA_ROOT):
|
|
|
|
os.makedirs(MEDIA_ROOT)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
shutil.rmtree(MEDIA_ROOT)
|
2014-10-18 18:03:10 +00:00
|
|
|
super(DirectoryCreationTests, cls).tearDownClass()
|
2012-10-30 21:20:42 +00:00
|
|
|
|
2008-07-20 12:44:41 +00:00
|
|
|
def setUp(self):
|
|
|
|
self.obj = FileModel()
|
|
|
|
|
|
|
|
def test_readonly_root(self):
|
|
|
|
"""Permission errors are not swallowed"""
|
2012-10-30 21:20:42 +00:00
|
|
|
os.chmod(MEDIA_ROOT, 0o500)
|
|
|
|
self.addCleanup(os.chmod, MEDIA_ROOT, 0o700)
|
2008-07-20 12:44:41 +00:00
|
|
|
try:
|
2015-05-25 23:09:01 +00:00
|
|
|
self.obj.testfile.save('foo.txt', SimpleUploadedFile('foo.txt', b'x'), save=False)
|
2012-04-28 16:09:37 +00:00
|
|
|
except OSError as err:
|
2011-03-03 15:04:39 +00:00
|
|
|
self.assertEqual(err.errno, errno.EACCES)
|
2012-04-28 16:09:37 +00:00
|
|
|
except Exception:
|
2008-08-08 20:59:02 +00:00
|
|
|
self.fail("OSError [Errno %s] not raised." % errno.EACCES)
|
2008-07-20 12:44:41 +00:00
|
|
|
|
|
|
|
def test_not_a_directory(self):
|
|
|
|
"""The correct IOError is raised when the upload directory name exists but isn't a directory"""
|
|
|
|
# Create a file with the upload directory name
|
2012-05-19 15:43:34 +00:00
|
|
|
open(UPLOAD_TO, 'wb').close()
|
2012-10-30 21:20:42 +00:00
|
|
|
self.addCleanup(os.remove, UPLOAD_TO)
|
2012-09-07 19:34:48 +00:00
|
|
|
with self.assertRaises(IOError) as exc_info:
|
2014-05-25 20:08:05 +00:00
|
|
|
with SimpleUploadedFile('foo.txt', b'x') as file:
|
2015-05-25 23:09:01 +00:00
|
|
|
self.obj.testfile.save('foo.txt', file, save=False)
|
2012-09-07 19:34:48 +00:00
|
|
|
# The test needs to be done on a specific string as IOError
|
|
|
|
# is raised even without the patch (just not early enough)
|
2016-04-08 02:04:45 +00:00
|
|
|
self.assertEqual(exc_info.exception.args[0], "%s exists and is not a directory." % UPLOAD_TO)
|
2012-09-07 19:34:48 +00:00
|
|
|
|
2009-05-08 17:22:34 +00:00
|
|
|
|
|
|
|
class MultiParserTests(unittest.TestCase):
|
|
|
|
|
|
|
|
def test_empty_upload_handlers(self):
|
|
|
|
# We're not actually parsing here; just checking if the parser properly
|
|
|
|
# instantiates with empty upload handlers.
|
2013-10-19 12:31:38 +00:00
|
|
|
MultiPartParser({
|
2013-11-02 19:37:48 +00:00
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=_foo',
|
|
|
|
'CONTENT_LENGTH': '1'
|
2009-05-08 17:22:34 +00:00
|
|
|
}, StringIO('x'), [], 'utf-8')
|
2014-07-12 12:08:50 +00:00
|
|
|
|
|
|
|
def test_rfc2231_parsing(self):
|
|
|
|
test_data = (
|
|
|
|
(b"Content-Type: application/x-stuff; title*=us-ascii'en-us'This%20is%20%2A%2A%2Afun%2A%2A%2A",
|
|
|
|
"This is ***fun***"),
|
|
|
|
(b"Content-Type: application/x-stuff; title*=UTF-8''foo-%c3%a4.html",
|
|
|
|
"foo-ä.html"),
|
|
|
|
(b"Content-Type: application/x-stuff; title*=iso-8859-1''foo-%E4.html",
|
|
|
|
"foo-ä.html"),
|
|
|
|
)
|
|
|
|
for raw_line, expected_title in test_data:
|
|
|
|
parsed = parse_header(raw_line)
|
|
|
|
self.assertEqual(parsed[1]['title'], expected_title)
|
2015-01-24 12:14:30 +00:00
|
|
|
|
|
|
|
def test_rfc2231_wrong_title(self):
|
|
|
|
"""
|
|
|
|
Test wrongly formatted RFC 2231 headers (missing double single quotes).
|
|
|
|
Parsing should not crash (#24209).
|
|
|
|
"""
|
|
|
|
test_data = (
|
|
|
|
(b"Content-Type: application/x-stuff; title*='This%20is%20%2A%2A%2Afun%2A%2A%2A",
|
|
|
|
b"'This%20is%20%2A%2A%2Afun%2A%2A%2A"),
|
|
|
|
(b"Content-Type: application/x-stuff; title*='foo.html",
|
|
|
|
b"'foo.html"),
|
|
|
|
(b"Content-Type: application/x-stuff; title*=bar.html",
|
|
|
|
b"bar.html"),
|
|
|
|
)
|
|
|
|
for raw_line, expected_title in test_data:
|
|
|
|
parsed = parse_header(raw_line)
|
|
|
|
self.assertEqual(parsed[1]['title'], expected_title)
|