aboutsummaryrefslogtreecommitdiffstats
path: root/netlib/http
diff options
context:
space:
mode:
authorMaximilian Hils <git@maximilianhils.com>2015-09-15 19:12:15 +0200
committerMaximilian Hils <git@maximilianhils.com>2015-09-15 19:12:15 +0200
commit11e7f476bd4bbcd6d072fa3659f628ae3a19705d (patch)
tree7fe3f67bcf41af6c573e312ef4e6adfa18f9f870 /netlib/http
parent2f9c566e480c377566a0ae044d698a75b45cd54c (diff)
downloadmitmproxy-11e7f476bd4bbcd6d072fa3659f628ae3a19705d.tar.gz
mitmproxy-11e7f476bd4bbcd6d072fa3659f628ae3a19705d.tar.bz2
mitmproxy-11e7f476bd4bbcd6d072fa3659f628ae3a19705d.zip
wip
Diffstat (limited to 'netlib/http')
-rw-r--r--netlib/http/__init__.py9
-rw-r--r--netlib/http/authentication.py4
-rw-r--r--netlib/http/exceptions.py9
-rw-r--r--netlib/http/http1/__init__.py23
-rw-r--r--netlib/http/http1/assemble.py105
-rw-r--r--netlib/http/http1/protocol.py586
-rw-r--r--netlib/http/http1/read.py346
-rw-r--r--netlib/http/http2/__init__.py2
-rw-r--r--netlib/http/http2/connections.py (renamed from netlib/http/http2/protocol.py)0
-rw-r--r--netlib/http/http2/frames.py (renamed from netlib/http/http2/frame.py)0
-rw-r--r--netlib/http/models.py (renamed from netlib/http/semantics.py)221
11 files changed, 562 insertions, 743 deletions
diff --git a/netlib/http/__init__.py b/netlib/http/__init__.py
index 9b4b0e6b..0b1a0bc5 100644
--- a/netlib/http/__init__.py
+++ b/netlib/http/__init__.py
@@ -1,2 +1,7 @@
-from exceptions import *
-from semantics import *
+from .models import Request, Response, Headers, CONTENT_MISSING
+from . import http1, http2
+
+__all__ = [
+ "Request", "Response", "Headers", "CONTENT_MISSING"
+ "http1", "http2"
+]
diff --git a/netlib/http/authentication.py b/netlib/http/authentication.py
index fe1f0d14..2055f843 100644
--- a/netlib/http/authentication.py
+++ b/netlib/http/authentication.py
@@ -19,8 +19,8 @@ def parse_http_basic_auth(s):
def assemble_http_basic_auth(scheme, username, password):
- v = binascii.b2a_base64(username + ":" + password)
- return scheme + " " + v
+ v = binascii.b2a_base64(username + b":" + password)
+ return scheme + b" " + v
class NullProxyAuth(object):
diff --git a/netlib/http/exceptions.py b/netlib/http/exceptions.py
deleted file mode 100644
index 8a2bbebc..00000000
--- a/netlib/http/exceptions.py
+++ /dev/null
@@ -1,9 +0,0 @@
-class HttpError(Exception):
-
- def __init__(self, code, message):
- super(HttpError, self).__init__(message)
- self.code = code
-
-
-class HttpErrorConnClosed(HttpError):
- pass
diff --git a/netlib/http/http1/__init__.py b/netlib/http/http1/__init__.py
index 6b5043af..4d223f97 100644
--- a/netlib/http/http1/__init__.py
+++ b/netlib/http/http1/__init__.py
@@ -1 +1,22 @@
-from protocol import *
+from .read import (
+ read_request, read_request_head,
+ read_response, read_response_head,
+ read_message_body, read_message_body_chunked,
+ connection_close,
+ expected_http_body_size,
+)
+from .assemble import (
+ assemble_request, assemble_request_head,
+ assemble_response, assemble_response_head,
+)
+
+
+__all__ = [
+ "read_request", "read_request_head",
+ "read_response", "read_response_head",
+ "read_message_body", "read_message_body_chunked",
+ "connection_close",
+ "expected_http_body_size",
+ "assemble_request", "assemble_request_head",
+ "assemble_response", "assemble_response_head",
+]
diff --git a/netlib/http/http1/assemble.py b/netlib/http/http1/assemble.py
new file mode 100644
index 00000000..a3269eed
--- /dev/null
+++ b/netlib/http/http1/assemble.py
@@ -0,0 +1,105 @@
+from __future__ import absolute_import, print_function, division
+
+from ... import utils
+from ...exceptions import HttpException
+from .. import CONTENT_MISSING
+
+
+def assemble_request(request):
+ if request.body == CONTENT_MISSING:
+ raise HttpException("Cannot assemble flow with CONTENT_MISSING")
+ head = assemble_request_head(request)
+ return head + request.body
+
+
+def assemble_request_head(request):
+ first_line = _assemble_request_line(request)
+ headers = _assemble_request_headers(request)
+ return b"%s\r\n%s\r\n" % (first_line, headers)
+
+
+def assemble_response(response):
+ if response.body == CONTENT_MISSING:
+ raise HttpException("Cannot assemble flow with CONTENT_MISSING")
+ head = assemble_response_head(response)
+ return head + response.body
+
+
+def assemble_response_head(response):
+ first_line = _assemble_response_line(response)
+ headers = _assemble_response_headers(response)
+ return b"%s\r\n%s\r\n" % (first_line, headers)
+
+
+
+
+def _assemble_request_line(request, form=None):
+ if form is None:
+ form = request.form_out
+ if form == "relative":
+ return b"%s %s %s" % (
+ request.method,
+ request.path,
+ request.httpversion
+ )
+ elif form == "authority":
+ return b"%s %s:%d %s" % (
+ request.method,
+ request.host,
+ request.port,
+ request.httpversion
+ )
+ elif form == "absolute":
+ return b"%s %s://%s:%s%s %s" % (
+ request.method,
+ request.scheme,
+ request.host,
+ request.port,
+ request.path,
+ request.httpversion
+ )
+ else: # pragma: nocover
+ raise RuntimeError("Invalid request form")
+
+
+def _assemble_request_headers(request):
+ headers = request.headers.copy()
+ for k in request._headers_to_strip_off:
+ headers.pop(k, None)
+ if b"host" not in headers and request.scheme and request.host and request.port:
+ headers[b"Host"] = utils.hostport(
+ request.scheme,
+ request.host,
+ request.port
+ )
+
+ # If content is defined (i.e. not None or CONTENT_MISSING), we always
+ # add a content-length header.
+ if request.body or request.body == b"":
+ headers[b"Content-Length"] = str(len(request.body)).encode("ascii")
+
+ return str(headers)
+
+
+def _assemble_response_line(response):
+ return b"%s %s %s" % (
+ response.httpversion,
+ response.status_code,
+ response.msg,
+ )
+
+
+def _assemble_response_headers(response, preserve_transfer_encoding=False):
+ # TODO: Remove preserve_transfer_encoding
+ headers = response.headers.copy()
+ for k in response._headers_to_strip_off:
+ headers.pop(k, None)
+ if not preserve_transfer_encoding:
+ headers.pop(b"Transfer-Encoding", None)
+
+ # If body is defined (i.e. not None or CONTENT_MISSING), we always
+ # add a content-length header.
+ if response.body or response.body == b"":
+ headers[b"Content-Length"] = str(len(response.body)).encode("ascii")
+
+ return bytes(headers)
diff --git a/netlib/http/http1/protocol.py b/netlib/http/http1/protocol.py
deleted file mode 100644
index cf1dffa3..00000000
--- a/netlib/http/http1/protocol.py
+++ /dev/null
@@ -1,586 +0,0 @@
-from __future__ import (absolute_import, print_function, division)
-import string
-import sys
-import time
-
-from ... import utils, tcp, http
-from .. import semantics, Headers
-from ..exceptions import *
-
-
-class TCPHandler(object):
-
- def __init__(self, rfile, wfile=None):
- self.rfile = rfile
- self.wfile = wfile
-
-
-class HTTP1Protocol(semantics.ProtocolMixin):
-
- ALPN_PROTO_HTTP1 = 'http/1.1'
-
- def __init__(self, tcp_handler=None, rfile=None, wfile=None):
- self.tcp_handler = tcp_handler or TCPHandler(rfile, wfile)
-
- def read_request(
- self,
- include_body=True,
- body_size_limit=None,
- allow_empty=False,
- ):
- """
- Parse an HTTP request from a file stream
-
- Args:
- include_body (bool): Read response body as well
- body_size_limit (bool): Maximum body size
- wfile (file): If specified, HTTP Expect headers are handled
- automatically, by writing a HTTP 100 CONTINUE response to the stream.
-
- Returns:
- Request: The HTTP request
-
- Raises:
- HttpError: If the input is invalid.
- """
- timestamp_start = time.time()
- if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
- self.tcp_handler.rfile.reset_timestamps()
-
- httpversion, host, port, scheme, method, path, headers, body = (
- None, None, None, None, None, None, None, None)
-
- request_line = self._get_request_line()
- if not request_line:
- if allow_empty:
- return http.EmptyRequest()
- else:
- raise tcp.NetLibDisconnect()
-
- request_line_parts = self._parse_init(request_line)
- if not request_line_parts:
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- method, path, httpversion = request_line_parts
-
- if path == '*' or path.startswith("/"):
- form_in = "relative"
- if not utils.isascii(path):
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- elif method == 'CONNECT':
- form_in = "authority"
- r = self._parse_init_connect(request_line)
- if not r:
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- host, port, httpversion = r
- path = None
- else:
- form_in = "absolute"
- r = self._parse_init_proxy(request_line)
- if not r:
- raise HttpError(
- 400,
- "Bad HTTP request line: %s" % repr(request_line)
- )
- _, scheme, host, port, path, _ = r
-
- headers = self.read_headers()
- if headers is None:
- raise HttpError(400, "Invalid headers")
-
- expect_header = headers.get("expect", "").lower()
- if expect_header == "100-continue" and httpversion == (1, 1):
- self.tcp_handler.wfile.write(
- 'HTTP/1.1 100 Continue\r\n'
- '\r\n'
- )
- self.tcp_handler.wfile.flush()
- del headers['expect']
-
- if include_body:
- body = self.read_http_body(
- headers,
- body_size_limit,
- method,
- None,
- True
- )
-
- if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
- # more accurate timestamp_start
- timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
-
- timestamp_end = time.time()
-
- return http.Request(
- form_in,
- method,
- scheme,
- host,
- port,
- path,
- httpversion,
- headers,
- body,
- timestamp_start,
- timestamp_end,
- )
-
- def read_response(
- self,
- request_method,
- body_size_limit=None,
- include_body=True,
- ):
- """
- Returns an http.Response
-
- By default, both response header and body are read.
- If include_body=False is specified, body may be one of the
- following:
- - None, if the response is technically allowed to have a response body
- - "", if the response must not have a response body (e.g. it's a
- response to a HEAD request)
- """
- timestamp_start = time.time()
- if hasattr(self.tcp_handler.rfile, "reset_timestamps"):
- self.tcp_handler.rfile.reset_timestamps()
-
- line = self.tcp_handler.rfile.readline()
- # Possible leftover from previous message
- if line == "\r\n" or line == "\n":
- line = self.tcp_handler.rfile.readline()
- if not line:
- raise HttpErrorConnClosed(502, "Server disconnect.")
- parts = self.parse_response_line(line)
- if not parts:
- raise HttpError(502, "Invalid server response: %s" % repr(line))
- proto, code, msg = parts
- httpversion = self._parse_http_protocol(proto)
- if httpversion is None:
- raise HttpError(502, "Invalid HTTP version in line: %s" % repr(proto))
- headers = self.read_headers()
- if headers is None:
- raise HttpError(502, "Invalid headers.")
-
- if include_body:
- body = self.read_http_body(
- headers,
- body_size_limit,
- request_method,
- code,
- False
- )
- else:
- # if include_body==False then a None body means the body should be
- # read separately
- body = None
-
- if hasattr(self.tcp_handler.rfile, "first_byte_timestamp"):
- # more accurate timestamp_start
- timestamp_start = self.tcp_handler.rfile.first_byte_timestamp
-
- if include_body:
- timestamp_end = time.time()
- else:
- timestamp_end = None
-
- return http.Response(
- httpversion,
- code,
- msg,
- headers,
- body,
- timestamp_start=timestamp_start,
- timestamp_end=timestamp_end,
- )
-
- def assemble_request(self, request):
- assert isinstance(request, semantics.Request)
-
- if request.body == semantics.CONTENT_MISSING:
- raise http.HttpError(
- 502,
- "Cannot assemble flow with CONTENT_MISSING"
- )
- first_line = self._assemble_request_first_line(request)
- headers = self._assemble_request_headers(request)
- return "%s\r\n%s\r\n%s" % (first_line, headers, request.body)
-
- def assemble_response(self, response):
- assert isinstance(response, semantics.Response)
-
- if response.body == semantics.CONTENT_MISSING:
- raise http.HttpError(
- 502,
- "Cannot assemble flow with CONTENT_MISSING"
- )
- first_line = self._assemble_response_first_line(response)
- headers = self._assemble_response_headers(response)
- return "%s\r\n%s\r\n%s" % (first_line, headers, response.body)
-
- def read_headers(self):
- """
- Read a set of headers.
- Stop once a blank line is reached.
-
- Return a Header object, or None if headers are invalid.
- """
- ret = []
- while True:
- line = self.tcp_handler.rfile.readline()
- if not line or line == '\r\n' or line == '\n':
- break
- if line[0] in ' \t':
- if not ret:
- return None
- # continued header
- ret[-1][1] = ret[-1][1] + '\r\n ' + line.strip()
- else:
- i = line.find(':')
- # We're being liberal in what we accept, here.
- if i > 0:
- name = line[:i]
- value = line[i + 1:].strip()
- ret.append([name, value])
- else:
- return None
- return Headers(ret)
-
-
- def read_http_body(self, *args, **kwargs):
- return "".join(self.read_http_body_chunked(*args, **kwargs))
-
-
- def read_http_body_chunked(
- self,
- headers,
- limit,
- request_method,
- response_code,
- is_request,
- max_chunk_size=None
- ):
- """
- Read an HTTP message body:
- headers: A Header object
- limit: Size limit.
- is_request: True if the body to read belongs to a request, False
- otherwise
- """
- if max_chunk_size is None:
- max_chunk_size = limit or sys.maxsize
-
- expected_size = self.expected_http_body_size(
- headers, is_request, request_method, response_code
- )
-
- if expected_size is None:
- if self.has_chunked_encoding(headers):
- # Python 3: yield from
- for x in self._read_chunked(limit, is_request):
- yield x
- else: # pragma: nocover
- raise HttpError(
- 400 if is_request else 502,
- "Content-Length unknown but no chunked encoding"
- )
- elif expected_size >= 0:
- if limit is not None and expected_size > limit:
- raise HttpError(
- 400 if is_request else 509,
- "HTTP Body too large. Limit is %s, content-length was %s" % (
- limit, expected_size
- )
- )
- bytes_left = expected_size
- while bytes_left:
- chunk_size = min(bytes_left, max_chunk_size)
- content = self.tcp_handler.rfile.read(chunk_size)
- yield content
- bytes_left -= chunk_size
- else:
- bytes_left = limit or -1
- while bytes_left:
- chunk_size = min(bytes_left, max_chunk_size)
- content = self.tcp_handler.rfile.read(chunk_size)
- if not content:
- return
- yield content
- bytes_left -= chunk_size
- not_done = self.tcp_handler.rfile.read(1)
- if not_done:
- raise HttpError(
- 400 if is_request else 509,
- "HTTP Body too large. Limit is %s," % limit
- )
-
- @classmethod
- def expected_http_body_size(
- self,
- headers,
- is_request,
- request_method,
- response_code,
- ):
- """
- Returns the expected body length:
- - a positive integer, if the size is known in advance
- - None, if the size in unknown in advance (chunked encoding or invalid
- data)
- - -1, if all data should be read until end of stream.
-
- May raise HttpError.
- """
- # Determine response size according to
- # http://tools.ietf.org/html/rfc7230#section-3.3
- if request_method:
- request_method = request_method.upper()
-
- if (not is_request and (
- request_method == "HEAD" or
- (request_method == "CONNECT" and response_code == 200) or
- response_code in [204, 304] or
- 100 <= response_code <= 199)):
- return 0
- if self.has_chunked_encoding(headers):
- return None
- if "content-length" in headers:
- try:
- size = int(headers["content-length"])
- if size < 0:
- raise ValueError()
- return size
- except ValueError:
- return None
- if is_request:
- return 0
- return -1
-
-
- @classmethod
- def has_chunked_encoding(self, headers):
- return "chunked" in headers.get("transfer-encoding", "").lower()
-
-
- def _get_request_line(self):
- """
- Get a line, possibly preceded by a blank.
- """
- line = self.tcp_handler.rfile.readline()
- if line == "\r\n" or line == "\n":
- # Possible leftover from previous message
- line = self.tcp_handler.rfile.readline()
- return line
-
- def _read_chunked(self, limit, is_request):
- """
- Read a chunked HTTP body.
-
- May raise HttpError.
- """
- # FIXME: Should check if chunked is the final encoding in the headers
- # http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-16#section-3.3
- # 3.3 2.
- total = 0
- code = 400 if is_request else 502
- while True:
- line = self.tcp_handler.rfile.readline(128)
- if line == "":
- raise HttpErrorConnClosed(code, "Connection closed prematurely")
- if line != '\r\n' and line != '\n':
- try:
- length = int(line, 16)
- except ValueError:
- raise HttpError(
- code,
- "Invalid chunked encoding length: %s" % line
- )
- total += length
- if limit is not None and total > limit:
- msg = "HTTP Body too large. Limit is %s," \
- " chunked content longer than %s" % (limit, total)
- raise HttpError(code, msg)
- chunk = self.tcp_handler.rfile.read(length)
- suffix = self.tcp_handler.rfile.readline(5)
- if suffix != '\r\n':
- raise HttpError(code, "Malformed chunked body")
- if length == 0:
- return
- yield chunk
-
- @classmethod
- def _parse_http_protocol(self, line):
- """
- Parse an HTTP protocol declaration.
- Returns a (major, minor) tuple, or None.
- """
- if not line.startswith("HTTP/"):
- return None
- _, version = line.split('/', 1)
- if "." not in version:
- return None
- major, minor = version.split('.', 1)
- try:
- major = int(major)
- minor = int(minor)
- except ValueError:
- return None
- return major, minor
-
- @classmethod
- def _parse_init(self, line):
- try:
- method, url, protocol = string.split(line)
- except ValueError:
- return None
- httpversion = self._parse_http_protocol(protocol)
- if not httpversion:
- return None
- if not utils.isascii(method):
- return None
- return method, url, httpversion
-
- @classmethod
- def _parse_init_connect(self, line):
- """
- Returns (host, port, httpversion) if line is a valid CONNECT line.
- http://tools.ietf.org/html/draft-luotonen-web-proxy-tunneling-01 section 3.1
- """
- v = self._parse_init(line)
- if not v:
- return None
- method, url, httpversion = v
-
- if method.upper() != 'CONNECT':
- return None
- try:
- host, port = url.split(":")
- except ValueError:
- return None
- try:
- port = int(port)
- except ValueError:
- return None
- if not utils.is_valid_port(port):
- return None
- if not utils.is_valid_host(host):
- return None
- return host, port, httpversion
-
- @classmethod
- def _parse_init_proxy(self, line):
- v = self._parse_init(line)
- if not v:
- return None
- method, url, httpversion = v
-
- parts = utils.parse_url(url)
- if not parts:
- return None
- scheme, host, port, path = parts
- return method, scheme, host, port, path, httpversion
-
- @classmethod
- def _parse_init_http(self, line):
- """
- Returns (method, url, httpversion)
- """
- v = self._parse_init(line)
- if not v:
- return None
- method, url, httpversion = v
- if not utils.isascii(url):
- return None
- if not (url.startswith("/") or url == "*"):
- return None
- return method, url, httpversion
-
- @classmethod
- def connection_close(self, httpversion, headers):
- """
- Checks the message to see if the client connection should be closed
- according to RFC 2616 Section 8.1 Note that a connection should be
- closed as well if the response has been read until end of the stream.
- """
- # At first, check if we have an explicit Connection header.
- if "connection" in headers:
- toks = utils.get_header_tokens(headers, "connection")
- if "close" in toks:
- return True
- elif "keep-alive" in toks:
- return False
-
- # If we don't have a Connection header, HTTP 1.1 connections are assumed to
- # be persistent
- return httpversion != (1, 1)
-
- @classmethod
- def parse_response_line(self, line):
- parts = line.strip().split(" ", 2)
- if len(parts) == 2: # handle missing message gracefully
- parts.append("")
- if len(parts) != 3:
- return None
- proto, code, msg = parts
- try:
- code = int(code)
- except ValueError:
- return None
- return (proto, code, msg)
-
- @classmethod
- def _assemble_request_first_line(self, request):
- return request.legacy_first_line()
-
- def _assemble_request_headers(self, request):
- headers = request.headers.copy()
- for k in request._headers_to_strip_off:
- headers.pop(k, None)
- if 'host' not in headers and request.scheme and request.host and request.port:
- headers["Host"] = utils.hostport(
- request.scheme,
- request.host,
- request.port
- )
-
- # If content is defined (i.e. not None or CONTENT_MISSING), we always
- # add a content-length header.
- if request.body or request.body == "":
- headers["Content-Length"] = str(len(request.body))
-
- return str(headers)
-
- def _assemble_response_first_line(self, response):
- return 'HTTP/%s.%s %s %s' % (
- response.httpversion[0],
- response.httpversion[1],
- response.status_code,
- response.msg,
- )
-
- def _assemble_response_headers(
- self,
- response,
- preserve_transfer_encoding=False,
- ):
- headers = response.headers.copy()
- for k in response._headers_to_strip_off:
- headers.pop(k, None)
- if not preserve_transfer_encoding:
- headers.pop('Transfer-Encoding', None)
-
- # If body is defined (i.e. not None or CONTENT_MISSING), we always
- # add a content-length header.
- if response.body or response.body == "":
- headers["Content-Length"] = str(len(response.body))
-
- return str(headers)
diff --git a/netlib/http/http1/read.py b/netlib/http/http1/read.py
new file mode 100644
index 00000000..573bc739
--- /dev/null
+++ b/netlib/http/http1/read.py
@@ -0,0 +1,346 @@
+from __future__ import absolute_import, print_function, division
+import time
+import sys
+import re
+
+from ... import utils
+from ...exceptions import HttpReadDisconnect, HttpSyntaxException, HttpException
+from .. import Request, Response, Headers
+
+ALPN_PROTO_HTTP1 = 'http/1.1'
+
+
+def read_request(rfile, body_size_limit=None):
+ request = read_request_head(rfile)
+ request.body = read_message_body(rfile, request, limit=body_size_limit)
+ request.timestamp_end = time.time()
+ return request
+
+
+def read_request_head(rfile):
+ """
+ Parse an HTTP request head (request line + headers) from an input stream
+
+ Args:
+ rfile: The input stream
+ body_size_limit (bool): Maximum body size
+
+ Returns:
+ The HTTP request object
+
+ Raises:
+ HttpReadDisconnect: If no bytes can be read from rfile.
+ HttpSyntaxException: If the input is invalid.
+ HttpException: A different error occured.
+ """
+ timestamp_start = time.time()
+ if hasattr(rfile, "reset_timestamps"):
+ rfile.reset_timestamps()
+
+ form, method, scheme, host, port, path, http_version = _read_request_line(rfile)
+ headers = _read_headers(rfile)
+
+ if hasattr(rfile, "first_byte_timestamp"):
+ # more accurate timestamp_start
+ timestamp_start = rfile.first_byte_timestamp
+
+ return Request(
+ form, method, scheme, host, port, path, http_version, headers, None, timestamp_start
+ )
+
+
+def read_response(rfile, request, body_size_limit=None):
+ response = read_response_head(rfile)
+ response.body = read_message_body(rfile, request, response, body_size_limit)
+ response.timestamp_end = time.time()
+ return response
+
+
+def read_response_head(rfile):
+ timestamp_start = time.time()
+ if hasattr(rfile, "reset_timestamps"):
+ rfile.reset_timestamps()
+
+ http_version, status_code, message = _read_response_line(rfile)
+ headers = _read_headers(rfile)
+
+ if hasattr(rfile, "first_byte_timestamp"):
+ # more accurate timestamp_start
+ timestamp_start = rfile.first_byte_timestamp
+
+ return Response(
+ http_version,
+ status_code,
+ message,
+ headers,
+ None,
+ timestamp_start
+ )
+
+
+def read_message_body(*args, **kwargs):
+ chunks = read_message_body_chunked(*args, **kwargs)
+ return b"".join(chunks)
+
+
+def read_message_body_chunked(rfile, request, response=None, limit=None, max_chunk_size=None):
+ """
+ Read an HTTP message body:
+
+ Args:
+ If a request body should be read, only request should be passed.
+ If a response body should be read, both request and response should be passed.
+
+ Raises:
+ HttpException
+ """
+ if not response:
+ headers = request.headers
+ response_code = None
+ is_request = True
+ else:
+ headers = response.headers
+ response_code = response.status_code
+ is_request = False
+
+ if not limit or limit < 0:
+ limit = sys.maxsize
+ if not max_chunk_size:
+ max_chunk_size = limit
+
+ expected_size = expected_http_body_size(
+ headers, is_request, request.method, response_code
+ )
+
+ if expected_size is None:
+ for x in _read_chunked(rfile, limit):
+ yield x
+ elif expected_size >= 0:
+ if limit is not None and expected_size > limit:
+ raise HttpException(
+ "HTTP Body too large. "
+ "Limit is {}, content length was advertised as {}".format(limit, expected_size)
+ )
+ bytes_left = expected_size
+ while bytes_left:
+ chunk_size = min(bytes_left, max_chunk_size)
+ content = rfile.read(chunk_size)
+ yield content
+ bytes_left -= chunk_size
+ else:
+ bytes_left = limit
+ while bytes_left:
+ chunk_size = min(bytes_left, max_chunk_size)
+ content = rfile.read(chunk_size)
+ if not content:
+ return
+ yield content
+ bytes_left -= chunk_size
+ not_done = rfile.read(1)
+ if not_done:
+ raise HttpException("HTTP body too large. Limit is {}.".format(limit))
+
+
+def connection_close(http_version, headers):
+ """
+ Checks the message to see if the client connection should be closed
+ according to RFC 2616 Section 8.1.
+ """
+ # At first, check if we have an explicit Connection header.
+ if b"connection" in headers:
+ toks = utils.get_header_tokens(headers, "connection")
+ if b"close" in toks:
+ return True
+ elif b"keep-alive" in toks:
+ return False
+
+ # If we don't have a Connection header, HTTP 1.1 connections are assumed to
+ # be persistent
+ return http_version != (1, 1)
+
+
+def expected_http_body_size(
+ headers,
+ is_request,
+ request_method,
+ response_code,
+):
+ """
+ Returns the expected body length:
+ - a positive integer, if the size is known in advance
+ - None, if the size in unknown in advance (chunked encoding)
+ - -1, if all data should be read until end of stream.
+
+ Raises:
+ HttpSyntaxException, if the content length header is invalid
+ """
+ # Determine response size according to
+ # http://tools.ietf.org/html/rfc7230#section-3.3
+ if request_method:
+ request_method = request_method.upper()
+
+ is_empty_response = (not is_request and (
+ request_method == b"HEAD" or
+ 100 <= response_code <= 199 or
+ (response_code == 200 and request_method == b"CONNECT") or
+ response_code in (204, 304)
+ ))
+
+ if is_empty_response:
+ return 0
+ if is_request and headers.get(b"expect", b"").lower() == b"100-continue":
+ return 0
+ if b"chunked" in headers.get(b"transfer-encoding", b"").lower():
+ return None
+ if b"content-length" in headers:
+ try:
+ size = int(headers[b"content-length"])
+ if size < 0:
+ raise ValueError()
+ return size
+ except ValueError:
+ raise HttpSyntaxException("Unparseable Content Length")
+ if is_request:
+ return 0
+ return -1
+
+
+def _get_first_line(rfile):
+ line = rfile.readline()
+ if line == b"\r\n" or line == b"\n":
+ # Possible leftover from previous message
+ line = rfile.readline()
+ if not line:
+ raise HttpReadDisconnect()
+ return line
+
+
+def _read_request_line(rfile):
+ line = _get_first_line(rfile)
+
+ try:
+ method, path, http_version = line.strip().split(b" ")
+
+ if path == b"*" or path.startswith(b"/"):
+ form = "relative"
+ path.decode("ascii") # should not raise a ValueError
+ scheme, host, port = None, None, None
+ elif method == b"CONNECT":
+ form = "authority"
+ host, port = _parse_authority_form(path)
+ scheme, path = None, None
+ else:
+ form = "absolute"
+ scheme, host, port, path = utils.parse_url(path)
+
+ except ValueError:
+ raise HttpSyntaxException("Bad HTTP request line: {}".format(line))
+
+ return form, method, scheme, host, port, path, http_version
+
+
+def _parse_authority_form(hostport):
+ """
+ Returns (host, port) if hostport is a valid authority-form host specification.
+ http://tools.ietf.org/html/draft-luotonen-web-proxy-tunneling-01 section 3.1
+
+ Raises:
+ ValueError, if the input is malformed
+ """
+ try:
+ host, port = hostport.split(b":")
+ port = int(port)
+ if not utils.is_valid_host(host) or not utils.is_valid_port(port):
+ raise ValueError()
+ except ValueError:
+ raise ValueError("Invalid host specification: {}".format(hostport))
+
+ return host, port
+
+
+def _read_response_line(rfile):
+ line = _get_first_line(rfile)
+
+ try:
+
+ parts = line.strip().split(b" ")
+ if len(parts) == 2: # handle missing message gracefully
+ parts.append(b"")
+
+ http_version, status_code, message = parts
+ status_code = int(status_code)
+ _check_http_version(http_version)
+
+ except ValueError:
+ raise HttpSyntaxException("Bad HTTP response line: {}".format(line))
+
+ return http_version, status_code, message
+
+
+def _check_http_version(http_version):
+ if not re.match(rb"^HTTP/\d\.\d$", http_version):
+ raise HttpSyntaxException("Unknown HTTP version: {}".format(http_version))
+
+
+def _read_headers(rfile):
+ """
+ Read a set of headers.
+ Stop once a blank line is reached.
+
+ Returns:
+ A headers object
+
+ Raises:
+ HttpSyntaxException
+ """
+ ret = []
+ while True:
+ line = rfile.readline()
+ if not line or line == b"\r\n" or line == b"\n":
+ break
+ if line[0] in b" \t":
+ if not ret:
+ raise HttpSyntaxException("Invalid headers")
+ # continued header
+ ret[-1][1] = ret[-1][1] + b'\r\n ' + line.strip()
+ else:
+ try:
+ name, value = line.split(b":", 1)
+ value = value.strip()
+ ret.append([name, value])
+ except ValueError:
+ raise HttpSyntaxException("Invalid headers")
+ return Headers(ret)
+
+
+def _read_chunked(rfile, limit):
+ """
+ Read a HTTP body with chunked transfer encoding.
+
+ Args:
+ rfile: the input file
+ limit: A positive integer
+ """
+ total = 0
+ while True:
+ line = rfile.readline(128)
+ if line == b"":
+ raise HttpException("Connection closed prematurely")
+ if line != b"\r\n" and line != b"\n":
+ try:
+ length = int(line, 16)
+ except ValueError:
+ raise HttpSyntaxException("Invalid chunked encoding length: {}".format(line))
+ total += length
+ if total > limit:
+ raise HttpException(
+ "HTTP Body too large. Limit is {}, "
+ "chunked content longer than {}".format(limit, total)
+ )
+ chunk = rfile.read(length)
+ suffix = rfile.readline(5)
+ if suffix != b"\r\n":
+ raise HttpSyntaxException("Malformed chunked body")
+ if length == 0:
+ return
+ yield chunk
diff --git a/netlib/http/http2/__init__.py b/netlib/http/http2/__init__.py
index 5acf7696..e69de29b 100644
--- a/netlib/http/http2/__init__.py
+++ b/netlib/http/http2/__init__.py
@@ -1,2 +0,0 @@
-from frame import *
-from protocol import *
diff --git a/netlib/http/http2/protocol.py b/netlib/http/http2/connections.py
index b6d376d3..b6d376d3 100644
--- a/netlib/http/http2/protocol.py
+++ b/netlib/http/http2/connections.py
diff --git a/netlib/http/http2/frame.py b/netlib/http/http2/frames.py
index b36b3adf..b36b3adf 100644
--- a/netlib/http/http2/frame.py
+++ b/netlib/http/http2/frames.py
diff --git a/netlib/http/semantics.py b/netlib/http/models.py
index 5bb098a7..bd5863b1 100644
--- a/netlib/http/semantics.py
+++ b/netlib/http/models.py
@@ -1,20 +1,25 @@
-from __future__ import (absolute_import, print_function, division)
-import UserDict
+from __future__ import absolute_import, print_function, division
import copy
-import urllib
-import urlparse
-from .. import odict
-from . import cookies, exceptions
-from netlib import utils, encoding
+from ..odict import ODict
+from .. import utils, encoding
+from ..utils import always_bytes, always_byte_args
+from . import cookies
-HDR_FORM_URLENCODED = "application/x-www-form-urlencoded"
-HDR_FORM_MULTIPART = "multipart/form-data"
+import six
+from six.moves import urllib
+try:
+ from collections import MutableMapping
+except ImportError:
+ from collections.abc import MutableMapping
+
+HDR_FORM_URLENCODED = b"application/x-www-form-urlencoded"
+HDR_FORM_MULTIPART = b"multipart/form-data"
CONTENT_MISSING = 0
-class Headers(object, UserDict.DictMixin):
+class Headers(MutableMapping, object):
"""
Header class which allows both convenient access to individual headers as well as
direct access to the underlying raw data. Provides a full dictionary interface.
@@ -62,10 +67,12 @@ class Headers(object, UserDict.DictMixin):
For use with the "Set-Cookie" header, see :py:meth:`get_all`.
"""
+ @always_byte_args("ascii")
def __init__(self, fields=None, **headers):
"""
Args:
- fields: (optional) list of ``(name, value)`` header tuples, e.g. ``[("Host","example.com")]``
+ fields: (optional) list of ``(name, value)`` header tuples,
+ e.g. ``[("Host","example.com")]``. All names and values must be bytes.
**headers: Additional headers to set. Will overwrite existing values from `fields`.
For convenience, underscores in header names will be transformed to dashes -
this behaviour does not extend to other methods.
@@ -76,21 +83,25 @@ class Headers(object, UserDict.DictMixin):
# content_type -> content-type
headers = {
- name.replace("_", "-"): value
- for name, value in headers.iteritems()
+ name.encode("ascii").replace(b"_", b"-"): value
+ for name, value in six.iteritems(headers)
}
self.update(headers)
- def __str__(self):
- return "\r\n".join(": ".join(field) for field in self.fields) + "\r\n"
+ def __bytes__(self):
+ return b"\r\n".join(b": ".join(field) for field in self.fields) + b"\r\n"
+
+ if six.PY2:
+ __str__ = __bytes__
+ @always_byte_args("ascii")
def __getitem__(self, name):
values = self.get_all(name)
if not values:
raise KeyError(name)
- else:
- return ", ".join(values)
+ return b", ".join(values)
+ @always_byte_args("ascii")
def __setitem__(self, name, value):
idx = self._index(name)
@@ -101,6 +112,7 @@ class Headers(object, UserDict.DictMixin):
else:
self.fields.append([name, value])
+ @always_byte_args("ascii")
def __delitem__(self, name):
if name not in self:
raise KeyError(name)
@@ -110,6 +122,19 @@ class Headers(object, UserDict.DictMixin):
if name != field[0].lower()
]
+ def __iter__(self):
+ seen = set()
+ for name, _ in self.fields:
+ name_lower = name.lower()
+ if name_lower not in seen:
+ seen.add(name_lower)
+ yield name
+
+ def __len__(self):
+ return len(set(name.lower() for name, _ in self.fields))
+
+ #__hash__ = object.__hash__
+
def _index(self, name):
name = name.lower()
for i, field in enumerate(self.fields):
@@ -117,16 +142,6 @@ class Headers(object, UserDict.DictMixin):
return i
return None
- def keys(self):
- seen = set()
- names = []
- for name, _ in self.fields:
- name_lower = name.lower()
- if name_lower not in seen:
- seen.add(name_lower)
- names.append(name)
- return names
-
def __eq__(self, other):
if isinstance(other, Headers):
return self.fields == other.fields
@@ -135,6 +150,7 @@ class Headers(object, UserDict.DictMixin):
def __ne__(self, other):
return not self.__eq__(other)
+ @always_byte_args("ascii")
def get_all(self, name):
"""
Like :py:meth:`get`, but does not fold multiple headers into a single one.
@@ -142,8 +158,8 @@ class Headers(object, UserDict.DictMixin):
See also: https://tools.ietf.org/html/rfc7230#section-3.2.2
"""
- name = name.lower()
- values = [value for n, value in self.fields if n.lower() == name]
+ name_lower = name.lower()
+ values = [value for n, value in self.fields if n.lower() == name_lower]
return values
def set_all(self, name, values):
@@ -151,6 +167,8 @@ class Headers(object, UserDict.DictMixin):
Explicitly set multiple headers for the given key.
See: :py:meth:`get_all`
"""
+ name = always_bytes(name, "ascii")
+ values = (always_bytes(value, "ascii") for value in values)
if name in self:
del self[name]
self.fields.extend(
@@ -172,28 +190,6 @@ class Headers(object, UserDict.DictMixin):
return cls([list(field) for field in state])
-class ProtocolMixin(object):
- def read_request(self, *args, **kwargs): # pragma: no cover
- raise NotImplementedError
-
- def read_response(self, *args, **kwargs): # pragma: no cover
- raise NotImplementedError
-
- def assemble(self, message):
- if isinstance(message, Request):
- return self.assemble_request(message)
- elif isinstance(message, Response):
- return self.assemble_response(message)
- else:
- raise ValueError("HTTP message not supported.")
-
- def assemble_request(self, *args, **kwargs): # pragma: no cover
- raise NotImplementedError
-
- def assemble_response(self, *args, **kwargs): # pragma: no cover
- raise NotImplementedError
-
-
class Request(object):
# This list is adopted legacy code.
# We probably don't need to strip off keep-alive.
@@ -248,42 +244,14 @@ class Request(object):
return False
def __repr__(self):
- # return "Request(%s - %s, %s)" % (self.method, self.host, self.path)
-
- return "<HTTPRequest: {0}>".format(
- self.legacy_first_line()[:-9]
- )
-
- def legacy_first_line(self, form=None):
- if form is None:
- form = self.form_out
- if form == "relative":
- return '%s %s HTTP/%s.%s' % (
- self.method,
- self.path,
- self.httpversion[0],
- self.httpversion[1],
- )
- elif form == "authority":
- return '%s %s:%s HTTP/%s.%s' % (
- self.method,
- self.host,
- self.port,
- self.httpversion[0],
- self.httpversion[1],
- )
- elif form == "absolute":
- return '%s %s://%s:%s%s HTTP/%s.%s' % (
- self.method,
- self.scheme,
- self.host,
- self.port,
- self.path,
- self.httpversion[0],
- self.httpversion[1],
- )
+ if self.host and self.port:
+ hostport = "{}:{}".format(self.host, self.port)
else:
- raise exceptions.HttpError(400, "Invalid request form")
+ hostport = ""
+ path = self.path or ""
+ return "HTTPRequest({} {}{})".format(
+ self.method, hostport, path
+ )
def anticache(self):
"""
@@ -336,7 +304,7 @@ class Request(object):
return self.get_form_urlencoded()
elif HDR_FORM_MULTIPART in self.headers.get("content-type","").lower():
return self.get_form_multipart()
- return odict.ODict([])
+ return ODict([])
def get_form_urlencoded(self):
"""
@@ -345,16 +313,16 @@ class Request(object):
indicates non-form data.
"""
if self.body and HDR_FORM_URLENCODED in self.headers.get("content-type","").lower():
- return odict.ODict(utils.urldecode(self.body))
- return odict.ODict([])
+ return ODict(utils.urldecode(self.body))
+ return ODict([])
def get_form_multipart(self):
if self.body and HDR_FORM_MULTIPART in self.headers.get("content-type","").lower():
- return odict.ODict(
+ return ODict(
utils.multipartdecode(
self.headers,
self.body))
- return odict.ODict([])
+ return ODict([])
def set_form_urlencoded(self, odict):
"""
@@ -373,8 +341,8 @@ class Request(object):
Components are unquoted.
"""
- _, _, path, _, _, _ = urlparse.urlparse(self.url)
- return [urllib.unquote(i) for i in path.split("/") if i]
+ _, _, path, _, _, _ = urllib.parse.urlparse(self.url)
+ return [urllib.parse.unquote(i) for i in path.split(b"/") if i]
def set_path_components(self, lst):
"""
@@ -382,10 +350,10 @@ class Request(object):
Components are quoted.
"""
- lst = [urllib.quote(i, safe="") for i in lst]
- path = "/" + "/".join(lst)
- scheme, netloc, _, params, query, fragment = urlparse.urlparse(self.url)
- self.url = urlparse.urlunparse(
+ lst = [urllib.parse.quote(i, safe="") for i in lst]
+ path = b"/" + b"/".join(lst)
+ scheme, netloc, _, params, query, fragment = urllib.parse.urlparse(self.url)
+ self.url = urllib.parse.urlunparse(
[scheme, netloc, path, params, query, fragment]
)
@@ -393,18 +361,18 @@ class Request(object):
"""
Gets the request query string. Returns an ODict object.
"""
- _, _, _, _, query, _ = urlparse.urlparse(self.url)
+ _, _, _, _, query, _ = urllib.parse.urlparse(self.url)
if query:
- return odict.ODict(utils.urldecode(query))
- return odict.ODict([])
+ return ODict(utils.urldecode(query))
+ return ODict([])
def set_query(self, odict):
"""
Takes an ODict object, and sets the request query string.
"""
- scheme, netloc, path, params, _, fragment = urlparse.urlparse(self.url)
+ scheme, netloc, path, params, _, fragment = urllib.parse.urlparse(self.url)
query = utils.urlencode(odict.lst)
- self.url = urlparse.urlunparse(
+ self.url = urllib.parse.urlunparse(
[scheme, netloc, path, params, query, fragment]
)
@@ -421,18 +389,13 @@ class Request(object):
but not the resolved name. This is disabled by default, as an
attacker may spoof the host header to confuse an analyst.
"""
- host = None
- if hostheader:
- host = self.headers.get("Host")
- if not host:
- host = self.host
- if host:
+ if hostheader and b"Host" in self.headers:
try:
- return host.encode("idna")
+ return self.headers[b"Host"].decode("idna")
except ValueError:
- return host
- else:
- return None
+ pass
+ if self.host:
+ return self.host.decode("idna")
def pretty_url(self, hostheader):
if self.form_out == "authority": # upstream proxy mode
@@ -446,7 +409,7 @@ class Request(object):
"""
Returns a possibly empty netlib.odict.ODict object.
"""
- ret = odict.ODict()
+ ret = ODict()
for i in self.headers.get_all("cookie"):
ret.extend(cookies.parse_cookie_header(i))
return ret
@@ -477,8 +440,10 @@ class Request(object):
Parses a URL specification, and updates the Request's information
accordingly.
- Returns False if the URL was invalid, True if the request succeeded.
+ Raises:
+ ValueError if the URL was invalid
"""
+ # TODO: Should handle incoming unicode here.
parts = utils.parse_url(url)
if not parts:
raise ValueError("Invalid URL: %s" % url)
@@ -495,32 +460,6 @@ class Request(object):
self.body = content
-class EmptyRequest(Request):
- def __init__(
- self,
- form_in="",
- method="",
- scheme="",
- host="",
- port="",
- path="",
- httpversion=(0, 0),
- headers=None,
- body=""
- ):
- super(EmptyRequest, self).__init__(
- form_in=form_in,
- method=method,
- scheme=scheme,
- host=host,
- port=port,
- path=path,
- httpversion=httpversion,
- headers=headers,
- body=body,
- )
-
-
class Response(object):
_headers_to_strip_off = [
'Proxy-Connection',
@@ -591,7 +530,7 @@ class Response(object):
if v:
name, value, attrs = v
ret.append([name, [value, attrs]])
- return odict.ODict(ret)
+ return ODict(ret)
def set_cookies(self, odict):
"""