aboutsummaryrefslogtreecommitdiffstats
path: root/libmproxy/protocol/http.py
diff options
context:
space:
mode:
Diffstat (limited to 'libmproxy/protocol/http.py')
-rw-r--r--libmproxy/protocol/http.py234
1 files changed, 33 insertions, 201 deletions
diff --git a/libmproxy/protocol/http.py b/libmproxy/protocol/http.py
index 12d09e71..f3240b85 100644
--- a/libmproxy/protocol/http.py
+++ b/libmproxy/protocol/http.py
@@ -1,26 +1,30 @@
from __future__ import (absolute_import, print_function, division)
+
import sys
import traceback
-
import six
from netlib import tcp
from netlib.exceptions import HttpException, HttpReadDisconnect, NetlibException
-from netlib.http import http1, Headers
-from netlib.http import CONTENT_MISSING
-from netlib.tcp import Address
-from netlib.http.http2.connections import HTTP2Protocol
-from netlib.http.http2.frame import GoAwayFrame, PriorityFrame, WindowUpdateFrame
+from netlib.http import Headers, CONTENT_MISSING
+
+from h2.exceptions import H2Error
+
from .. import utils
from ..exceptions import HttpProtocolException, ProtocolException
from ..models import (
- HTTPFlow, HTTPRequest, HTTPResponse, make_error_response, make_connect_response, Error, expect_continue_response
+ HTTPFlow,
+ HTTPResponse,
+ make_error_response,
+ make_connect_response,
+ Error,
+ expect_continue_response
)
+
from .base import Layer, Kill
-class _HttpLayer(Layer):
- supports_streaming = False
+class _HttpTransmissionLayer(Layer):
def read_request(self):
raise NotImplementedError()
@@ -32,37 +36,18 @@ class _HttpLayer(Layer):
raise NotImplementedError()
def read_response(self, request):
- raise NotImplementedError()
-
- def send_response(self, response):
- raise NotImplementedError()
-
- def check_close_connection(self, flow):
- raise NotImplementedError()
-
-
-class _StreamingHttpLayer(_HttpLayer):
- supports_streaming = True
-
- def read_response_headers(self):
- raise NotImplementedError
-
- def read_response_body(self, request, response):
- raise NotImplementedError()
- yield "this is a generator" # pragma: no cover
-
- def read_response(self, request):
response = self.read_response_headers()
response.data.content = b"".join(
self.read_response_body(request, response)
)
return response
- def send_response_headers(self, response):
- raise NotImplementedError
+ def read_response_headers(self):
+ raise NotImplementedError()
- def send_response_body(self, response, chunks):
+ def read_response_body(self, request, response):
raise NotImplementedError()
+ yield "this is a generator" # pragma: no cover
def send_response(self, response):
if response.content == CONTENT_MISSING:
@@ -70,164 +55,14 @@ class _StreamingHttpLayer(_HttpLayer):
self.send_response_headers(response)
self.send_response_body(response, [response.content])
-
-class Http1Layer(_StreamingHttpLayer):
-
- def __init__(self, ctx, mode):
- super(Http1Layer, self).__init__(ctx)
- self.mode = mode
-
- def read_request(self):
- req = http1.read_request(self.client_conn.rfile, body_size_limit=self.config.body_size_limit)
- return HTTPRequest.wrap(req)
-
- def read_request_body(self, request):
- expected_size = http1.expected_http_body_size(request)
- return http1.read_body(self.client_conn.rfile, expected_size, self.config.body_size_limit)
-
- def send_request(self, request):
- self.server_conn.wfile.write(http1.assemble_request(request))
- self.server_conn.wfile.flush()
-
- def read_response_headers(self):
- resp = http1.read_response_head(self.server_conn.rfile)
- return HTTPResponse.wrap(resp)
-
- def read_response_body(self, request, response):
- expected_size = http1.expected_http_body_size(request, response)
- return http1.read_body(self.server_conn.rfile, expected_size, self.config.body_size_limit)
-
def send_response_headers(self, response):
- raw = http1.assemble_response_head(response)
- self.client_conn.wfile.write(raw)
- self.client_conn.wfile.flush()
+ raise NotImplementedError()
def send_response_body(self, response, chunks):
- for chunk in http1.assemble_body(response.headers, chunks):
- self.client_conn.wfile.write(chunk)
- self.client_conn.wfile.flush()
-
- def check_close_connection(self, flow):
- request_close = http1.connection_close(
- flow.request.http_version,
- flow.request.headers
- )
- response_close = http1.connection_close(
- flow.response.http_version,
- flow.response.headers
- )
- read_until_eof = http1.expected_http_body_size(flow.request, flow.response) == -1
- close_connection = request_close or response_close or read_until_eof
- if flow.request.form_in == "authority" and flow.response.status_code == 200:
- # Workaround for https://github.com/mitmproxy/mitmproxy/issues/313:
- # Charles Proxy sends a CONNECT response with HTTP/1.0
- # and no Content-Length header
-
- return False
- return close_connection
-
- def __call__(self):
- layer = HttpLayer(self, self.mode)
- layer()
-
-
-# TODO: The HTTP2 layer is missing multiplexing, which requires a major rewrite.
-class Http2Layer(_HttpLayer):
-
- def __init__(self, ctx, mode):
- super(Http2Layer, self).__init__(ctx)
- self.mode = mode
- self.client_protocol = HTTP2Protocol(self.client_conn, is_server=True,
- unhandled_frame_cb=self.handle_unexpected_frame_from_client)
- self.server_protocol = HTTP2Protocol(self.server_conn, is_server=False,
- unhandled_frame_cb=self.handle_unexpected_frame_from_server)
-
- def read_request(self):
- request = HTTPRequest.from_protocol(
- self.client_protocol,
- body_size_limit=self.config.body_size_limit
- )
- self._stream_id = request.stream_id
- return request
-
- def send_request(self, message):
- # TODO: implement flow control and WINDOW_UPDATE frames
- self.server_conn.send(self.server_protocol.assemble(message))
-
- def read_response(self, request):
- return HTTPResponse.from_protocol(
- self.server_protocol,
- request_method=request.method,
- body_size_limit=self.config.body_size_limit,
- include_body=True,
- stream_id=self._stream_id
- )
-
- def send_response(self, message):
- # TODO: implement flow control to prevent client buffer filling up
- # maintain a send buffer size, and read WindowUpdateFrames from client to increase the send buffer
- self.client_conn.send(self.client_protocol.assemble(message))
+ raise NotImplementedError()
def check_close_connection(self, flow):
- # TODO: add a timer to disconnect after a 10 second timeout
- return False
-
- def connect(self):
- self.ctx.connect()
- self.server_protocol = HTTP2Protocol(self.server_conn, is_server=False,
- unhandled_frame_cb=self.handle_unexpected_frame_from_server)
- self.server_protocol.perform_connection_preface()
-
- def set_server(self, *args, **kwargs):
- self.ctx.set_server(*args, **kwargs)
- self.server_protocol = HTTP2Protocol(self.server_conn, is_server=False,
- unhandled_frame_cb=self.handle_unexpected_frame_from_server)
- self.server_protocol.perform_connection_preface()
-
- def __call__(self):
- self.server_protocol.perform_connection_preface()
- layer = HttpLayer(self, self.mode)
- layer()
-
- # terminate the connection
- self.client_conn.send(GoAwayFrame().to_bytes())
-
- def handle_unexpected_frame_from_client(self, frame):
- if isinstance(frame, WindowUpdateFrame):
- # Clients are sending WindowUpdate frames depending on their flow control algorithm.
- # Since we cannot predict these frames, and we do not need to respond to them,
- # simply accept them, and hide them from the log.
- # Ideally we should keep track of our own flow control window and
- # stall transmission if the outgoing flow control buffer is full.
- return
- if isinstance(frame, PriorityFrame):
- # Clients are sending Priority frames depending on their implementation.
- # The RFC does not clearly state when or which priority preferences should be set.
- # Since we cannot predict these frames, and we do not need to respond to them,
- # simply accept them, and hide them from the log.
- # Ideally we should forward them to the server.
- return
- if isinstance(frame, GoAwayFrame):
- # Client wants to terminate the connection,
- # relay it to the server.
- self.server_conn.send(frame.to_bytes())
- return
- self.log("Unexpected HTTP2 frame from client: %s" % frame.human_readable(), "info")
-
- def handle_unexpected_frame_from_server(self, frame):
- if isinstance(frame, WindowUpdateFrame):
- # Servers are sending WindowUpdate frames depending on their flow control algorithm.
- # Since we cannot predict these frames, and we do not need to respond to them,
- # simply accept them, and hide them from the log.
- # Ideally we should keep track of our own flow control window and
- # stall transmission if the outgoing flow control buffer is full.
- return
- if isinstance(frame, GoAwayFrame):
- # Server wants to terminate the connection,
- # relay it to the client.
- self.client_conn.send(frame.to_bytes())
- return
- self.log("Unexpected HTTP2 frame from server: %s" % frame.human_readable(), "info")
+ raise NotImplementedError()
class ConnectServerConnection(object):
@@ -285,7 +120,7 @@ class UpstreamConnectLayer(Layer):
def set_server(self, address, server_tls=None, sni=None):
if self.ctx.server_conn:
self.ctx.disconnect()
- address = Address.wrap(address)
+ address = tcp.Address.wrap(address)
self.connect_request.host = address.host
self.connect_request.port = address.port
self.server_conn.address = address
@@ -400,7 +235,8 @@ class HttpLayer(Layer):
try:
response = make_error_response(code, message)
self.send_response(response)
- except NetlibException:
+ except (NetlibException, H2Error):
+ self.log(traceback.format_exc(), "debug")
pass
def change_upstream_proxy_server(self, address):
@@ -420,7 +256,7 @@ class HttpLayer(Layer):
layer()
def send_response_to_client(self, flow):
- if not (self.supports_streaming and flow.response.stream):
+ if not flow.response.stream:
# no streaming:
# we already received the full response from the server and can
# send it to the client straight away.
@@ -441,10 +277,7 @@ class HttpLayer(Layer):
def get_response_from_server(self, flow):
def get_response():
self.send_request(flow.request)
- if self.supports_streaming:
- flow.response = self.read_response_headers()
- else:
- flow.response = self.read_response(flow.request)
+ flow.response = self.read_response_headers()
try:
get_response()
@@ -474,15 +307,14 @@ class HttpLayer(Layer):
if flow == Kill:
raise Kill()
- if self.supports_streaming:
- if flow.response.stream:
- flow.response.data.content = CONTENT_MISSING
- else:
- flow.response.data.content = b"".join(self.read_response_body(
- flow.request,
- flow.response
- ))
- flow.response.timestamp_end = utils.timestamp()
+ if flow.response.stream:
+ flow.response.data.content = CONTENT_MISSING
+ else:
+ flow.response.data.content = b"".join(self.read_response_body(
+ flow.request,
+ flow.response
+ ))
+ flow.response.timestamp_end = utils.timestamp()
# no further manipulation of self.server_conn beyond this point
# we can safely set it as the final attribute value here.