aboutsummaryrefslogtreecommitdiffstats
path: root/test/test_http.py
diff options
context:
space:
mode:
authorAldo Cortesi <aldo@nullcube.com>2012-06-23 13:56:17 +1200
committerAldo Cortesi <aldo@nullcube.com>2012-06-23 13:56:17 +1200
commit5cf6aeb926e0b3a1cad23a0b169b8dfa8536a22f (patch)
tree74ae0724c4e7647fc5d0b330691cd64bb6ce37c6 /test/test_http.py
parent227e72abf4124cbf55328cd15be917b4af99367f (diff)
downloadmitmproxy-5cf6aeb926e0b3a1cad23a0b169b8dfa8536a22f.tar.gz
mitmproxy-5cf6aeb926e0b3a1cad23a0b169b8dfa8536a22f.tar.bz2
mitmproxy-5cf6aeb926e0b3a1cad23a0b169b8dfa8536a22f.zip
protocol.py -> http.py
Diffstat (limited to 'test/test_http.py')
-rw-r--r--test/test_http.py163
1 files changed, 163 insertions, 0 deletions
diff --git a/test/test_http.py b/test/test_http.py
new file mode 100644
index 00000000..d272f343
--- /dev/null
+++ b/test/test_http.py
@@ -0,0 +1,163 @@
+import cStringIO, textwrap
+from netlib import http, odict
+import tutils
+
+def test_has_chunked_encoding():
+ h = odict.ODictCaseless()
+ assert not http.has_chunked_encoding(h)
+ h["transfer-encoding"] = ["chunked"]
+ assert http.has_chunked_encoding(h)
+
+
+def test_read_chunked():
+ s = cStringIO.StringIO("1\r\na\r\n0\r\n")
+ tutils.raises(IOError, http.read_chunked, s, None)
+
+ s = cStringIO.StringIO("1\r\na\r\n0\r\n\r\n")
+ assert http.read_chunked(s, None) == "a"
+
+ s = cStringIO.StringIO("\r\n")
+ tutils.raises(IOError, http.read_chunked, s, None)
+
+ s = cStringIO.StringIO("1\r\nfoo")
+ tutils.raises(IOError, http.read_chunked, s, None)
+
+ s = cStringIO.StringIO("foo\r\nfoo")
+ tutils.raises(http.HttpError, http.read_chunked, s, None)
+
+
+def test_request_connection_close():
+ h = odict.ODictCaseless()
+ assert http.request_connection_close((1, 0), h)
+ assert not http.request_connection_close((1, 1), h)
+
+ h["connection"] = ["keep-alive"]
+ assert not http.request_connection_close((1, 1), h)
+
+
+def test_read_http_body():
+ h = odict.ODict()
+ s = cStringIO.StringIO("testing")
+ assert http.read_http_body(s, h, False, None) == ""
+
+ h["content-length"] = ["foo"]
+ s = cStringIO.StringIO("testing")
+ tutils.raises(http.HttpError, http.read_http_body, s, h, False, None)
+
+ h["content-length"] = [5]
+ s = cStringIO.StringIO("testing")
+ assert len(http.read_http_body(s, h, False, None)) == 5
+ s = cStringIO.StringIO("testing")
+ tutils.raises(http.HttpError, http.read_http_body, s, h, False, 4)
+
+ h = odict.ODict()
+ s = cStringIO.StringIO("testing")
+ assert len(http.read_http_body(s, h, True, 4)) == 4
+ s = cStringIO.StringIO("testing")
+ assert len(http.read_http_body(s, h, True, 100)) == 7
+
+def test_parse_http_protocol():
+ assert http.parse_http_protocol("HTTP/1.1") == (1, 1)
+ assert http.parse_http_protocol("HTTP/0.0") == (0, 0)
+ assert not http.parse_http_protocol("foo/0.0")
+
+
+def test_parse_init_connect():
+ assert http.parse_init_connect("CONNECT host.com:443 HTTP/1.0")
+ assert not http.parse_init_connect("bogus")
+ assert not http.parse_init_connect("GET host.com:443 HTTP/1.0")
+ assert not http.parse_init_connect("CONNECT host.com443 HTTP/1.0")
+ assert not http.parse_init_connect("CONNECT host.com:443 foo/1.0")
+
+
+def test_prase_init_proxy():
+ u = "GET http://foo.com:8888/test HTTP/1.1"
+ m, s, h, po, pa, httpversion = http.parse_init_proxy(u)
+ assert m == "GET"
+ assert s == "http"
+ assert h == "foo.com"
+ assert po == 8888
+ assert pa == "/test"
+ assert httpversion == (1, 1)
+
+ assert not http.parse_init_proxy("invalid")
+ assert not http.parse_init_proxy("GET invalid HTTP/1.1")
+ assert not http.parse_init_proxy("GET http://foo.com:8888/test foo/1.1")
+
+
+def test_parse_init_http():
+ u = "GET /test HTTP/1.1"
+ m, u, httpversion= http.parse_init_http(u)
+ assert m == "GET"
+ assert u == "/test"
+ assert httpversion == (1, 1)
+
+ assert not http.parse_init_http("invalid")
+ assert not http.parse_init_http("GET invalid HTTP/1.1")
+ assert not http.parse_init_http("GET /test foo/1.1")
+
+
+class TestReadHeaders:
+ def test_read_simple(self):
+ data = """
+ Header: one
+ Header2: two
+ \r\n
+ """
+ data = textwrap.dedent(data)
+ data = data.strip()
+ s = cStringIO.StringIO(data)
+ h = http.read_headers(s)
+ assert h == [["Header", "one"], ["Header2", "two"]]
+
+ def test_read_multi(self):
+ data = """
+ Header: one
+ Header: two
+ \r\n
+ """
+ data = textwrap.dedent(data)
+ data = data.strip()
+ s = cStringIO.StringIO(data)
+ h = http.read_headers(s)
+ assert h == [["Header", "one"], ["Header", "two"]]
+
+ def test_read_continued(self):
+ data = """
+ Header: one
+ \ttwo
+ Header2: three
+ \r\n
+ """
+ data = textwrap.dedent(data)
+ data = data.strip()
+ s = cStringIO.StringIO(data)
+ h = http.read_headers(s)
+ assert h == [["Header", "one\r\n two"], ["Header2", "three"]]
+
+
+def test_parse_url():
+ assert not http.parse_url("")
+
+ u = "http://foo.com:8888/test"
+ s, h, po, pa = http.parse_url(u)
+ assert s == "http"
+ assert h == "foo.com"
+ assert po == 8888
+ assert pa == "/test"
+
+ s, h, po, pa = http.parse_url("http://foo/bar")
+ assert s == "http"
+ assert h == "foo"
+ assert po == 80
+ assert pa == "/bar"
+
+ s, h, po, pa = http.parse_url("http://foo")
+ assert pa == "/"
+
+ s, h, po, pa = http.parse_url("https://foo")
+ assert po == 443
+
+ assert not http.parse_url("https://foo:bar")
+ assert not http.parse_url("https://foo:")
+