2012-10-29 20:33:00 +01:00
|
|
|
# -*- encoding: utf-8 -*-
|
2012-06-07 18:08:47 +02:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2010-09-28 09:06:37 +02:00
|
|
|
import time
|
2011-12-17 00:40:32 +01:00
|
|
|
import warnings
|
2010-11-07 16:41:17 +01:00
|
|
|
from datetime import datetime, timedelta
|
2012-08-11 11:11:20 +02:00
|
|
|
from io import BytesIO
|
2008-03-08 04:06:30 +01:00
|
|
|
|
2013-02-05 22:52:29 +01:00
|
|
|
from django.db import connection, connections, DEFAULT_DB_ALIAS
|
|
|
|
from django.core import signals
|
2012-10-18 20:10:46 +02:00
|
|
|
from django.core.exceptions import SuspiciousOperation
|
2010-11-07 16:41:17 +01:00
|
|
|
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
|
2012-02-10 23:51:07 +01:00
|
|
|
from django.http import HttpRequest, HttpResponse, parse_cookie, build_request_repr, UnreadablePostError
|
2013-02-05 22:52:29 +01:00
|
|
|
from django.test import TransactionTestCase
|
2012-10-20 15:53:28 +02:00
|
|
|
from django.test.client import FakePayload
|
2012-10-20 23:22:46 +02:00
|
|
|
from django.test.utils import override_settings, str_prefix
|
2012-12-17 10:49:26 +01:00
|
|
|
from django.utils import six
|
2010-11-07 16:41:17 +01:00
|
|
|
from django.utils import unittest
|
2012-11-03 12:54:06 +01:00
|
|
|
from django.utils.http import cookie_date, urlencode
|
2012-03-18 21:58:22 +01:00
|
|
|
from django.utils.timezone import utc
|
2008-03-08 04:06:30 +01:00
|
|
|
|
2011-09-10 02:46:38 +02:00
|
|
|
|
2010-09-28 09:06:37 +02:00
|
|
|
class RequestsTests(unittest.TestCase):
|
|
|
|
def test_httprequest(self):
|
2010-09-28 13:54:58 +02:00
|
|
|
request = HttpRequest()
|
2012-08-11 11:11:20 +02:00
|
|
|
self.assertEqual(list(request.GET.keys()), [])
|
|
|
|
self.assertEqual(list(request.POST.keys()), [])
|
|
|
|
self.assertEqual(list(request.COOKIES.keys()), [])
|
|
|
|
self.assertEqual(list(request.META.keys()), [])
|
2008-08-23 19:28:12 +02:00
|
|
|
|
2011-06-10 01:15:42 +02:00
|
|
|
def test_httprequest_repr(self):
|
|
|
|
request = HttpRequest()
|
2012-06-07 18:08:47 +02:00
|
|
|
request.path = '/somepath/'
|
|
|
|
request.GET = {'get-key': 'get-value'}
|
|
|
|
request.POST = {'post-key': 'post-value'}
|
|
|
|
request.COOKIES = {'post-key': 'post-value'}
|
|
|
|
request.META = {'post-key': 'post-value'}
|
|
|
|
self.assertEqual(repr(request), str_prefix("<HttpRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
|
2011-06-10 01:15:42 +02:00
|
|
|
self.assertEqual(build_request_repr(request), repr(request))
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
|
|
|
|
str_prefix("<HttpRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
|
2011-06-10 01:15:42 +02:00
|
|
|
|
2010-09-28 09:06:37 +02:00
|
|
|
def test_wsgirequest(self):
|
2012-08-11 11:11:20 +02:00
|
|
|
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(list(request.GET.keys()), [])
|
|
|
|
self.assertEqual(list(request.POST.keys()), [])
|
|
|
|
self.assertEqual(list(request.COOKIES.keys()), [])
|
2010-10-29 18:39:25 +02:00
|
|
|
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
|
2010-09-28 13:54:58 +02:00
|
|
|
self.assertEqual(request.META['PATH_INFO'], 'bogus')
|
|
|
|
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
|
|
|
|
self.assertEqual(request.META['SCRIPT_NAME'], '')
|
2010-09-12 22:52:49 +02:00
|
|
|
|
2011-06-10 01:15:42 +02:00
|
|
|
def test_wsgirequest_repr(self):
|
2012-08-11 11:11:20 +02:00
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
2012-06-07 18:08:47 +02:00
|
|
|
request.GET = {'get-key': 'get-value'}
|
|
|
|
request.POST = {'post-key': 'post-value'}
|
|
|
|
request.COOKIES = {'post-key': 'post-value'}
|
|
|
|
request.META = {'post-key': 'post-value'}
|
|
|
|
self.assertEqual(repr(request), str_prefix("<WSGIRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
|
2011-06-10 01:15:42 +02:00
|
|
|
self.assertEqual(build_request_repr(request), repr(request))
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
|
|
|
|
str_prefix("<WSGIRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
|
2011-06-10 01:15:42 +02:00
|
|
|
|
2012-12-17 10:49:26 +01:00
|
|
|
def test_wsgirequest_path_info(self):
|
|
|
|
def wsgi_str(path_info):
|
|
|
|
path_info = path_info.encode('utf-8') # Actual URL sent by the browser (bytestring)
|
|
|
|
if six.PY3:
|
|
|
|
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
|
|
|
|
return path_info
|
|
|
|
# Regression for #19468
|
|
|
|
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, "/سلام/")
|
|
|
|
|
2010-09-28 09:06:37 +02:00
|
|
|
def test_parse_cookie(self):
|
2012-09-28 17:09:05 +02:00
|
|
|
self.assertEqual(parse_cookie('invalid@key=true'), {})
|
2010-09-28 09:06:37 +02:00
|
|
|
|
|
|
|
def test_httprequest_location(self):
|
|
|
|
request = HttpRequest()
|
2010-09-28 13:54:58 +02:00
|
|
|
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
|
2010-09-28 09:06:37 +02:00
|
|
|
'https://www.example.com/asdf')
|
|
|
|
|
|
|
|
request.get_host = lambda: 'www.example.com'
|
|
|
|
request.path = ''
|
2010-09-28 13:54:58 +02:00
|
|
|
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
|
2010-09-28 09:06:37 +02:00
|
|
|
'http://www.example.com/path/with:colons')
|
|
|
|
|
2012-10-20 23:22:46 +02:00
|
|
|
@override_settings(USE_X_FORWARDED_HOST=False)
|
2011-09-10 02:46:38 +02:00
|
|
|
def test_http_get_host(self):
|
2012-10-20 23:22:46 +02:00
|
|
|
# Check if X_FORWARDED_HOST is provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_X_FORWARDED_HOST': 'forward.com',
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
# X_FORWARDED_HOST is ignored.
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if X_FORWARDED_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com')
|
2011-09-10 02:46:38 +02:00
|
|
|
|
2012-10-20 23:22:46 +02:00
|
|
|
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 8042,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com:8042')
|
|
|
|
|
|
|
|
# Poisoned host headers are rejected as suspicious
|
|
|
|
legit_hosts = [
|
|
|
|
'example.com',
|
|
|
|
'example.com:80',
|
|
|
|
'12.34.56.78',
|
|
|
|
'12.34.56.78:443',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
2012-11-27 22:19:37 +01:00
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
2012-10-20 23:22:46 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
poisoned_hosts = [
|
|
|
|
'example.com@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld',
|
2012-11-27 22:19:37 +01:00
|
|
|
'example.com:dr.frankenstein@evil.tld:80',
|
|
|
|
'example.com:80/badpath',
|
|
|
|
'example.com: recovermypassword.com',
|
2012-10-20 23:22:46 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for host in legit_hosts:
|
2011-09-10 02:46:38 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
2012-10-20 23:22:46 +02:00
|
|
|
'HTTP_HOST': host,
|
2011-09-10 02:46:38 +02:00
|
|
|
}
|
2012-10-20 23:22:46 +02:00
|
|
|
request.get_host()
|
2011-09-10 02:46:38 +02:00
|
|
|
|
2012-10-20 23:22:46 +02:00
|
|
|
for host in poisoned_hosts:
|
|
|
|
with self.assertRaises(SuspiciousOperation):
|
2012-10-18 20:10:46 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
2012-10-20 23:22:46 +02:00
|
|
|
@override_settings(USE_X_FORWARDED_HOST=True)
|
2011-09-10 02:46:38 +02:00
|
|
|
def test_http_get_host_with_x_forwarded_host(self):
|
2012-10-20 23:22:46 +02:00
|
|
|
# Check if X_FORWARDED_HOST is provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_X_FORWARDED_HOST': 'forward.com',
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
# X_FORWARDED_HOST is obeyed.
|
|
|
|
self.assertEqual(request.get_host(), 'forward.com')
|
|
|
|
|
|
|
|
# Check if X_FORWARDED_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com')
|
2011-09-10 02:46:38 +02:00
|
|
|
|
2012-10-20 23:22:46 +02:00
|
|
|
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 8042,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com:8042')
|
|
|
|
|
|
|
|
# Poisoned host headers are rejected as suspicious
|
|
|
|
legit_hosts = [
|
|
|
|
'example.com',
|
|
|
|
'example.com:80',
|
|
|
|
'12.34.56.78',
|
|
|
|
'12.34.56.78:443',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
2012-11-27 22:19:37 +01:00
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
2012-10-20 23:22:46 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
poisoned_hosts = [
|
|
|
|
'example.com@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld:80',
|
2012-11-27 22:19:37 +01:00
|
|
|
'example.com:80/badpath',
|
|
|
|
'example.com: recovermypassword.com',
|
2012-10-20 23:22:46 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for host in legit_hosts:
|
2011-09-10 02:46:38 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
2012-10-20 23:22:46 +02:00
|
|
|
'HTTP_HOST': host,
|
2011-09-10 02:46:38 +02:00
|
|
|
}
|
2012-10-20 23:22:46 +02:00
|
|
|
request.get_host()
|
2011-09-10 02:46:38 +02:00
|
|
|
|
2012-10-20 23:22:46 +02:00
|
|
|
for host in poisoned_hosts:
|
|
|
|
with self.assertRaises(SuspiciousOperation):
|
2012-10-18 20:10:46 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
2011-09-10 02:46:38 +02:00
|
|
|
|
2010-09-28 09:06:37 +02:00
|
|
|
def test_near_expiration(self):
|
|
|
|
"Cookie will expire when an near expiration time is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
# There is a timing weakness in this test; The
|
|
|
|
# expected result for max-age requires that there be
|
|
|
|
# a very slight difference between the evaluated expiration
|
|
|
|
# time, and the time evaluated in set_cookie(). If this
|
|
|
|
# difference doesn't exist, the cookie time will be
|
|
|
|
# 1 second larger. To avoid the problem, put in a quick sleep,
|
|
|
|
# which guarantees that there will be a time difference.
|
|
|
|
expires = datetime.utcnow() + timedelta(seconds=10)
|
|
|
|
time.sleep(0.001)
|
|
|
|
response.set_cookie('datetime', expires=expires)
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
2010-09-28 13:54:58 +02:00
|
|
|
self.assertEqual(datetime_cookie['max-age'], 10)
|
2010-09-28 09:06:37 +02:00
|
|
|
|
2012-03-18 21:58:22 +01:00
|
|
|
def test_aware_expiration(self):
|
|
|
|
"Cookie accepts an aware datetime as expiration time"
|
|
|
|
response = HttpResponse()
|
|
|
|
expires = (datetime.utcnow() + timedelta(seconds=10)).replace(tzinfo=utc)
|
|
|
|
time.sleep(0.001)
|
|
|
|
response.set_cookie('datetime', expires=expires)
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
|
|
|
self.assertEqual(datetime_cookie['max-age'], 10)
|
|
|
|
|
2010-09-28 09:06:37 +02:00
|
|
|
def test_far_expiration(self):
|
|
|
|
"Cookie will expire when an distant expiration time is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
2010-09-28 13:54:58 +02:00
|
|
|
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
|
2010-09-28 09:06:37 +02:00
|
|
|
|
|
|
|
def test_max_age_expiration(self):
|
|
|
|
"Cookie will expire if max_age is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('max_age', max_age=10)
|
|
|
|
max_age_cookie = response.cookies['max_age']
|
|
|
|
self.assertEqual(max_age_cookie['max-age'], 10)
|
|
|
|
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
|
2010-10-29 18:39:25 +02:00
|
|
|
|
2010-11-26 14:30:50 +01:00
|
|
|
def test_httponly_cookie(self):
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('example', httponly=True)
|
|
|
|
example_cookie = response.cookies['example']
|
|
|
|
# A compat cookie may be in use -- check that it has worked
|
|
|
|
# both as an output string, and using the cookie attributes
|
|
|
|
self.assertTrue('; httponly' in str(example_cookie))
|
|
|
|
self.assertTrue(example_cookie['httponly'])
|
|
|
|
|
2010-10-29 18:39:25 +02:00
|
|
|
def test_limited_stream(self):
|
|
|
|
# Read all of a limited stream
|
2012-08-11 11:11:20 +02:00
|
|
|
stream = LimitedStream(BytesIO(b'test'), 2)
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.read(), b'te')
|
2011-01-16 08:31:35 +01:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.read(), b'')
|
2010-10-29 18:39:25 +02:00
|
|
|
|
|
|
|
# Read a number of characters greater than the stream has to offer
|
2012-08-11 11:11:20 +02:00
|
|
|
stream = LimitedStream(BytesIO(b'test'), 2)
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.read(5), b'te')
|
2011-01-16 08:31:35 +01:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(5), b'')
|
2010-10-29 18:39:25 +02:00
|
|
|
|
|
|
|
# Read sequentially from a stream
|
2012-08-11 11:11:20 +02:00
|
|
|
stream = LimitedStream(BytesIO(b'12345678'), 8)
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.read(5), b'12345')
|
|
|
|
self.assertEqual(stream.read(5), b'678')
|
2011-01-16 08:31:35 +01:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(5), b'')
|
2010-10-29 18:39:25 +02:00
|
|
|
|
|
|
|
# Read lines from a stream
|
2012-08-11 11:11:20 +02:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read a full line, unconditionally
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(), b'1234\n')
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read a number of characters less than a line
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(2), b'56')
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read the rest of the partial line
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(), b'78\n')
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read a full line, with a character limit greater than the line length
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(6), b'abcd\n')
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read the next line, deliberately terminated at the line end
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(4), b'efgh')
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read the next line... just the line end
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(), b'\n')
|
2010-10-29 18:39:25 +02:00
|
|
|
# Read everything else.
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(), b'ijkl')
|
2010-10-29 18:39:25 +02:00
|
|
|
|
2011-01-16 08:31:35 +01:00
|
|
|
# Regression for #15018
|
|
|
|
# If a stream contains a newline, but the provided length
|
|
|
|
# is less than the number of provided characters, the newline
|
|
|
|
# doesn't reset the available character count
|
2012-08-11 11:11:20 +02:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(10), b'1234\n')
|
|
|
|
self.assertEqual(stream.readline(3), b'abc')
|
2011-01-16 08:31:35 +01:00
|
|
|
# Now expire the available characters
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(3), b'd')
|
2011-01-16 08:31:35 +01:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.readline(2), b'')
|
2011-01-16 08:31:35 +01:00
|
|
|
|
|
|
|
# Same test, but with read, not readline.
|
2012-08-11 11:11:20 +02:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(stream.read(6), b'1234\na')
|
|
|
|
self.assertEqual(stream.read(2), b'bc')
|
|
|
|
self.assertEqual(stream.read(2), b'd')
|
|
|
|
self.assertEqual(stream.read(2), b'')
|
|
|
|
self.assertEqual(stream.read(), b'')
|
2011-01-16 08:31:35 +01:00
|
|
|
|
2010-10-29 18:39:25 +02:00
|
|
|
def test_stream(self):
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 12:17:56 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 12:17:56 +02:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(request.read(), b'name=value')
|
2010-10-29 18:39:25 +02:00
|
|
|
|
|
|
|
def test_read_after_value(self):
|
|
|
|
"""
|
|
|
|
Reading from request is allowed after accessing request contents as
|
2011-12-17 00:40:32 +01:00
|
|
|
POST or body.
|
2010-10-29 18:39:25 +02:00
|
|
|
"""
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 12:17:56 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 12:17:56 +02:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(request.body, b'name=value')
|
|
|
|
self.assertEqual(request.read(), b'name=value')
|
2010-10-29 18:39:25 +02:00
|
|
|
|
|
|
|
def test_value_after_read(self):
|
|
|
|
"""
|
2011-12-17 00:40:32 +01:00
|
|
|
Construction of POST or body is not allowed after reading
|
2010-10-29 18:39:25 +02:00
|
|
|
from request.
|
|
|
|
"""
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 12:17:56 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 12:17:56 +02:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(request.read(2), b'na')
|
2011-12-17 00:40:32 +01:00
|
|
|
self.assertRaises(Exception, lambda: request.body)
|
2010-10-29 18:39:25 +02:00
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
|
2012-11-03 12:54:06 +01:00
|
|
|
def test_non_ascii_POST(self):
|
|
|
|
payload = FakePayload(urlencode({'key': 'España'}))
|
|
|
|
request = WSGIRequest({
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
})
|
|
|
|
self.assertEqual(request.POST, {'key': ['España']})
|
|
|
|
|
2012-10-29 20:33:00 +01:00
|
|
|
def test_alternate_charset_POST(self):
|
|
|
|
"""
|
|
|
|
Test a POST with non-utf-8 payload encoding.
|
|
|
|
"""
|
|
|
|
from django.utils.http import urllib_parse
|
|
|
|
payload = FakePayload(urllib_parse.urlencode({'key': 'España'.encode('latin-1')}))
|
|
|
|
request = WSGIRequest({
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
})
|
|
|
|
self.assertEqual(request.POST, {'key': ['España']})
|
|
|
|
|
2011-12-17 00:40:32 +01:00
|
|
|
def test_body_after_POST_multipart(self):
|
2011-03-28 18:11:40 +02:00
|
|
|
"""
|
2011-12-17 00:40:32 +01:00
|
|
|
Reading body after parsing multipart is not allowed
|
2011-03-28 18:11:40 +02:00
|
|
|
"""
|
|
|
|
# Because multipart is used for large amounts fo data i.e. file uploads,
|
|
|
|
# we don't want the data held in memory twice, and we don't want to
|
2011-12-17 00:40:32 +01:00
|
|
|
# silence the error by setting body = '' either.
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload("\r\n".join([
|
2011-03-28 18:11:40 +02:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
2012-10-20 15:53:28 +02:00
|
|
|
'']))
|
2011-03-28 18:11:40 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-12-17 00:40:32 +01:00
|
|
|
self.assertRaises(Exception, lambda: request.body)
|
2011-03-28 18:11:40 +02:00
|
|
|
|
2011-06-10 10:39:38 +02:00
|
|
|
def test_POST_multipart_with_content_length_zero(self):
|
|
|
|
"""
|
|
|
|
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
|
|
|
|
"""
|
|
|
|
# According to:
|
|
|
|
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
|
|
|
|
# Every request.POST with Content-Length >= 0 is a valid request,
|
|
|
|
# this test ensures that we handle Content-Length == 0.
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload("\r\n".join([
|
2011-06-10 10:39:38 +02:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
2012-10-20 15:53:28 +02:00
|
|
|
'']))
|
2011-06-10 10:39:38 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': 0,
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2011-06-10 10:39:38 +02:00
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
|
2012-10-20 14:33:57 +02:00
|
|
|
def test_POST_binary_only(self):
|
|
|
|
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
|
|
|
|
environ = {'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'application/octet-stream',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': BytesIO(payload)}
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.FILES, {})
|
|
|
|
self.assertEqual(request.body, payload)
|
|
|
|
|
|
|
|
# Same test without specifying content-type
|
|
|
|
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.FILES, {})
|
|
|
|
self.assertEqual(request.body, payload)
|
|
|
|
|
2010-10-29 18:39:25 +02:00
|
|
|
def test_read_by_lines(self):
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 12:17:56 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 12:17:56 +02:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(list(request), [b'name=value'])
|
2011-03-28 18:11:40 +02:00
|
|
|
|
2011-12-17 00:40:32 +01:00
|
|
|
def test_POST_after_body_read(self):
|
2011-03-28 18:11:40 +02:00
|
|
|
"""
|
2011-12-17 00:40:32 +01:00
|
|
|
POST should be populated even if body is read first
|
2011-03-28 18:11:40 +02:00
|
|
|
"""
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 12:17:56 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 12:17:56 +02:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2011-12-17 00:40:32 +01:00
|
|
|
raw_data = request.body
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-03-28 18:11:40 +02:00
|
|
|
|
2011-12-17 00:40:32 +01:00
|
|
|
def test_POST_after_body_read_and_stream_read(self):
|
2011-03-28 18:11:40 +02:00
|
|
|
"""
|
2011-12-17 00:40:32 +01:00
|
|
|
POST should be populated even if body is read first, and then
|
2011-03-28 18:11:40 +02:00
|
|
|
the stream is read second.
|
|
|
|
"""
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 12:17:56 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 12:17:56 +02:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2011-12-17 00:40:32 +01:00
|
|
|
raw_data = request.body
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(request.read(1), b'n')
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-03-28 18:11:40 +02:00
|
|
|
|
2011-12-17 00:40:32 +01:00
|
|
|
def test_POST_after_body_read_and_stream_read_multipart(self):
|
2011-03-28 18:11:40 +02:00
|
|
|
"""
|
2011-12-17 00:40:32 +01:00
|
|
|
POST should be populated even if body is read first, and then
|
2011-03-28 18:11:40 +02:00
|
|
|
the stream is read second. Using multipart/form-data instead of urlencoded.
|
|
|
|
"""
|
2012-10-20 15:53:28 +02:00
|
|
|
payload = FakePayload("\r\n".join([
|
2011-03-28 18:11:40 +02:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
2012-10-20 15:53:28 +02:00
|
|
|
'']))
|
2011-03-28 18:11:40 +02:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 15:53:28 +02:00
|
|
|
'wsgi.input': payload})
|
2011-12-17 00:40:32 +01:00
|
|
|
raw_data = request.body
|
2011-03-28 18:11:40 +02:00
|
|
|
# Consume enough data to mess up the parsing:
|
2012-05-19 17:43:34 +02:00
|
|
|
self.assertEqual(request.read(13), b'--boundary\r\nC')
|
2012-06-07 18:08:47 +02:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-12-17 00:40:32 +01:00
|
|
|
|
2012-02-10 23:51:07 +01:00
|
|
|
def test_POST_connection_error(self):
|
|
|
|
"""
|
|
|
|
If wsgi.input.read() raises an exception while trying to read() the
|
|
|
|
POST, the exception should be identifiable (not a generic IOError).
|
|
|
|
"""
|
2012-08-11 11:11:20 +02:00
|
|
|
class ExplodingBytesIO(BytesIO):
|
2012-02-10 23:51:07 +01:00
|
|
|
def read(self, len=0):
|
|
|
|
raise IOError("kaboom!")
|
|
|
|
|
2012-05-19 17:43:34 +02:00
|
|
|
payload = b'name=value'
|
2012-02-10 23:51:07 +01:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 14:33:57 +02:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2012-02-10 23:51:07 +01:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-08-11 11:11:20 +02:00
|
|
|
'wsgi.input': ExplodingBytesIO(payload)})
|
2012-02-10 23:51:07 +01:00
|
|
|
|
2012-12-24 23:20:38 +01:00
|
|
|
with self.assertRaises(UnreadablePostError):
|
|
|
|
request.body
|
2013-02-05 22:52:29 +01:00
|
|
|
|
|
|
|
class TransactionRequestTests(TransactionTestCase):
|
|
|
|
def test_request_finished_db_state(self):
|
|
|
|
# The GET below will not succeed, but it will give a response with
|
|
|
|
# defined ._handler_class. That is needed for sending the
|
|
|
|
# request_finished signal.
|
|
|
|
response = self.client.get('/')
|
|
|
|
# Make sure there is an open connection
|
|
|
|
connection.cursor()
|
|
|
|
connection.enter_transaction_management()
|
|
|
|
connection.managed(True)
|
|
|
|
signals.request_finished.send(sender=response._handler_class)
|
|
|
|
# In-memory sqlite doesn't actually close connections.
|
|
|
|
if connection.vendor != 'sqlite':
|
|
|
|
self.assertIs(connection.connection, None)
|
|
|
|
self.assertEqual(len(connection.transaction_state), 0)
|
|
|
|
|
|
|
|
@unittest.skipIf(connection.vendor == 'sqlite',
|
|
|
|
'This test will close the connection, in-memory '
|
|
|
|
'sqlite connections must not be closed.')
|
|
|
|
def test_request_finished_failed_connection(self):
|
|
|
|
# See comments in test_request_finished_db_state() for the self.client
|
|
|
|
# usage.
|
|
|
|
response = self.client.get('/')
|
|
|
|
conn = connections[DEFAULT_DB_ALIAS]
|
|
|
|
conn.enter_transaction_management()
|
|
|
|
conn.managed(True)
|
|
|
|
conn.set_dirty()
|
|
|
|
# Test that the rollback doesn't succeed (for example network failure
|
|
|
|
# could cause this).
|
|
|
|
def fail_horribly():
|
|
|
|
raise Exception("Horrible failure!")
|
|
|
|
conn._rollback = fail_horribly
|
|
|
|
signals.request_finished.send(sender=response._handler_class)
|
|
|
|
# As even rollback wasn't possible the connection wrapper itself was
|
|
|
|
# abandoned. Accessing the connections[alias] will create a new
|
|
|
|
# connection wrapper, whch must be different than the original one.
|
|
|
|
self.assertIsNot(conn, connections[DEFAULT_DB_ALIAS])
|
|
|
|
self.assertEqual(len(connection.transaction_state), 0)
|