diff options
author | Aldo Cortesi <aldo@corte.si> | 2015-08-01 11:38:33 +1200 |
---|---|---|
committer | Aldo Cortesi <aldo@corte.si> | 2015-08-01 11:38:33 +1200 |
commit | c31b6c3c36f681d1dbc3ce11922741b7e1e41837 (patch) | |
tree | f63a0f56534d16437aaa5464cf585cac82f90985 /libmproxy/protocol/http.py | |
parent | cdc84f52d213cb2b2b2a06a17378ebe757908865 (diff) | |
parent | 4f38c6b90e239d192863dee271e267b498c72206 (diff) | |
download | mitmproxy-c31b6c3c36f681d1dbc3ce11922741b7e1e41837.tar.gz mitmproxy-c31b6c3c36f681d1dbc3ce11922741b7e1e41837.tar.bz2 mitmproxy-c31b6c3c36f681d1dbc3ce11922741b7e1e41837.zip |
Merge pull request #698 from Kriechi/http2-wip
[WIP] Protocol Refactoring for HTTP/2
Diffstat (limited to 'libmproxy/protocol/http.py')
-rw-r--r-- | libmproxy/protocol/http.py | 1052 |
1 files changed, 100 insertions, 952 deletions
diff --git a/libmproxy/protocol/http.py b/libmproxy/protocol/http.py index f2ac5acc..35fd7d28 100644 --- a/libmproxy/protocol/http.py +++ b/libmproxy/protocol/http.py @@ -9,16 +9,18 @@ from email.utils import parsedate_tz, formatdate, mktime_tz import netlib from netlib import http, tcp, odict, utils -from netlib.http import cookies, http1 +from netlib.http import cookies, http1, http2 +from netlib.http.semantics import CONTENT_MISSING from .tcp import TCPHandler from .primitives import KILL, ProtocolHandler, Flow, Error from ..proxy.connection import ServerConnection from .. import encoding, utils, controller, stateobject, proxy +from .http_wrappers import decoded, HTTPRequest, HTTPResponse + HDR_FORM_URLENCODED = "application/x-www-form-urlencoded" HDR_FORM_MULTIPART = "multipart/form-data" -CONTENT_MISSING = 0 class KillSignal(Exception): @@ -37,13 +39,14 @@ def send_connect_request(conn, host, port, update_state=True): odict.ODictCaseless(), "" ) - conn.send(upstream_request.assemble()) - resp = HTTPResponse.from_stream(conn.rfile, upstream_request.method) - if resp.code != 200: - raise proxy.ProxyError(resp.code, + protocol = http1.HTTP1Protocol(conn) + conn.send(protocol.assemble(upstream_request)) + resp = HTTPResponse.from_protocol(protocol, upstream_request.method) + if resp.status_code != 200: + raise proxy.ProxyError(resp.status_code, "Cannot establish SSL " + "connection with upstream proxy: \r\n" + - str(resp.assemble())) + repr(resp)) if update_state: conn.state.append(("http", { "state": "connect", @@ -53,884 +56,6 @@ def send_connect_request(conn, host, port, update_state=True): return resp -class decoded(object): - """ - A context manager that decodes a request or response, and then - re-encodes it with the same encoding after execution of the block. - - Example: - with decoded(request): - request.content = request.content.replace("foo", "bar") - """ - - def __init__(self, o): - self.o = o - ce = o.headers.get_first("content-encoding") - if ce in encoding.ENCODINGS: - self.ce = ce - else: - self.ce = None - - def __enter__(self): - if self.ce: - self.o.decode() - - def __exit__(self, type, value, tb): - if self.ce: - self.o.encode(self.ce) - - -class HTTPMessage(stateobject.StateObject): - """ - Base class for HTTPRequest and HTTPResponse - """ - - def __init__(self, httpversion, headers, content, timestamp_start=None, - timestamp_end=None): - self.httpversion = httpversion - self.headers = headers - """@type: odict.ODictCaseless""" - self.content = content - - self.timestamp_start = timestamp_start - self.timestamp_end = timestamp_end - - _stateobject_attributes = dict( - httpversion=tuple, - headers=odict.ODictCaseless, - content=str, - timestamp_start=float, - timestamp_end=float - ) - _stateobject_long_attributes = {"content"} - - def get_state(self, short=False): - ret = super(HTTPMessage, self).get_state(short) - if short: - if self.content: - ret["contentLength"] = len(self.content) - elif self.content == CONTENT_MISSING: - ret["contentLength"] = None - else: - ret["contentLength"] = 0 - return ret - - def get_decoded_content(self): - """ - Returns the decoded content based on the current Content-Encoding - header. - Doesn't change the message iteself or its headers. - """ - ce = self.headers.get_first("content-encoding") - if not self.content or ce not in encoding.ENCODINGS: - return self.content - return encoding.decode(ce, self.content) - - def decode(self): - """ - Decodes content based on the current Content-Encoding header, then - removes the header. If there is no Content-Encoding header, no - action is taken. - - Returns True if decoding succeeded, False otherwise. - """ - ce = self.headers.get_first("content-encoding") - if not self.content or ce not in encoding.ENCODINGS: - return False - data = encoding.decode(ce, self.content) - if data is None: - return False - self.content = data - del self.headers["content-encoding"] - return True - - def encode(self, e): - """ - Encodes content with the encoding e, where e is "gzip", "deflate" - or "identity". - """ - # FIXME: Error if there's an existing encoding header? - self.content = encoding.encode(e, self.content) - self.headers["content-encoding"] = [e] - - def size(self, **kwargs): - """ - Size in bytes of a fully rendered message, including headers and - HTTP lead-in. - """ - hl = len(self._assemble_head(**kwargs)) - if self.content: - return hl + len(self.content) - else: - return hl - - def copy(self): - c = copy.copy(self) - c.headers = self.headers.copy() - return c - - def replace(self, pattern, repl, *args, **kwargs): - """ - Replaces a regular expression pattern with repl in both the headers - and the body of the message. Encoded content will be decoded - before replacement, and re-encoded afterwards. - - Returns the number of replacements made. - """ - with decoded(self): - self.content, c = utils.safe_subn( - pattern, repl, self.content, *args, **kwargs - ) - c += self.headers.replace(pattern, repl, *args, **kwargs) - return c - - def _assemble_first_line(self): - """ - Returns the assembled request/response line - """ - raise NotImplementedError() # pragma: nocover - - def _assemble_headers(self): - """ - Returns the assembled headers - """ - raise NotImplementedError() # pragma: nocover - - def _assemble_head(self): - """ - Returns the assembled request/response line plus headers - """ - raise NotImplementedError() # pragma: nocover - - def assemble(self): - """ - Returns the assembled request/response - """ - raise NotImplementedError() # pragma: nocover - - -class HTTPRequest(HTTPMessage): - """ - An HTTP request. - - Exposes the following attributes: - - method: HTTP method - - scheme: URL scheme (http/https) - - host: Target hostname of the request. This is not neccessarily the - directy upstream server (which could be another proxy), but it's always - the target server we want to reach at the end. This attribute is either - inferred from the request itself (absolute-form, authority-form) or from - the connection metadata (e.g. the host in reverse proxy mode). - - port: Destination port - - path: Path portion of the URL (not present in authority-form) - - httpversion: HTTP version tuple, e.g. (1,1) - - headers: odict.ODictCaseless object - - content: Content of the request, None, or CONTENT_MISSING if there - is content associated, but not present. CONTENT_MISSING evaluates - to False to make checking for the presence of content natural. - - form_in: The request form which mitmproxy has received. The following - values are possible: - - - relative (GET /index.html, OPTIONS *) (covers origin form and - asterisk form) - - absolute (GET http://example.com:80/index.html) - - authority-form (CONNECT example.com:443) - Details: http://tools.ietf.org/html/draft-ietf-httpbis-p1-messaging-25#section-5.3 - - form_out: The request form which mitmproxy will send out to the - destination - - timestamp_start: Timestamp indicating when request transmission started - - timestamp_end: Timestamp indicating when request transmission ended - """ - - def __init__( - self, - form_in, - method, - scheme, - host, - port, - path, - httpversion, - headers, - content, - timestamp_start=None, - timestamp_end=None, - form_out=None - ): - assert isinstance(headers, odict.ODictCaseless) or not headers - HTTPMessage.__init__( - self, - httpversion, - headers, - content, - timestamp_start, - timestamp_end - ) - self.form_in = form_in - self.method = method - self.scheme = scheme - self.host = host - self.port = port - self.path = path - self.httpversion = httpversion - self.form_out = form_out or form_in - - # Have this request's cookies been modified by sticky cookies or auth? - self.stickycookie = False - self.stickyauth = False - # Is this request replayed? - self.is_replay = False - - _stateobject_attributes = HTTPMessage._stateobject_attributes.copy() - _stateobject_attributes.update( - form_in=str, - method=str, - scheme=str, - host=str, - port=int, - path=str, - form_out=str, - is_replay=bool - ) - - @property - def body(self): - return self.content - - @classmethod - def from_state(cls, state): - f = cls( - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None) - f.load_state(state) - return f - - def __repr__(self): - return "<HTTPRequest: {0}>".format( - self._assemble_first_line(self.form_in)[:-9] - ) - - @classmethod - def from_stream( - cls, - rfile, - include_body=True, - body_size_limit=None, - wfile=None): - """ - Parse an HTTP request from a file stream - - Args: - rfile (file): Input file to read from - include_body (bool): Read response body as well - body_size_limit (bool): Maximum body size - wfile (file): If specified, HTTP Expect headers are handled automatically. - by writing a HTTP 100 CONTINUE response to the stream. - - Returns: - HTTPRequest: The HTTP request - - Raises: - HttpError: If the input is invalid. - """ - timestamp_start, timestamp_end = None, None - - timestamp_start = utils.timestamp() - if hasattr(rfile, "reset_timestamps"): - rfile.reset_timestamps() - - protocol = http1.HTTP1Protocol(rfile=rfile, wfile=wfile) - req = protocol.read_request( - include_body = include_body, - body_size_limit = body_size_limit, - ) - - if hasattr(rfile, "first_byte_timestamp"): - # more accurate timestamp_start - timestamp_start = rfile.first_byte_timestamp - - timestamp_end = utils.timestamp() - return HTTPRequest( - req.form_in, - req.method, - req.scheme, - req.host, - req.port, - req.path, - req.httpversion, - req.headers, - req.body, - timestamp_start, - timestamp_end - ) - - def _assemble_first_line(self, form=None): - form = form or self.form_out - - if form == "relative": - request_line = '%s %s HTTP/%s.%s' % ( - self.method, self.path, self.httpversion[0], self.httpversion[1] - ) - elif form == "authority": - request_line = '%s %s:%s HTTP/%s.%s' % ( - self.method, self.host, self.port, self.httpversion[0], - self.httpversion[1] - ) - elif form == "absolute": - request_line = '%s %s://%s:%s%s HTTP/%s.%s' % ( - self.method, self.scheme, self.host, - self.port, self.path, self.httpversion[0], - self.httpversion[1] - ) - else: - raise http.HttpError(400, "Invalid request form") - return request_line - - # This list is adopted legacy code. - # We probably don't need to strip off keep-alive. - _headers_to_strip_off = ['Proxy-Connection', - 'Keep-Alive', - 'Connection', - 'Transfer-Encoding', - 'Upgrade'] - - def _assemble_headers(self): - headers = self.headers.copy() - for k in self._headers_to_strip_off: - del headers[k] - if 'host' not in headers and self.scheme and self.host and self.port: - headers["Host"] = [utils.hostport(self.scheme, - self.host, - self.port)] - - # If content is defined (i.e. not None or CONTENT_MISSING), we always - # add a content-length header. - if self.content or self.content == "": - headers["Content-Length"] = [str(len(self.content))] - - return headers.format() - - def _assemble_head(self, form=None): - return "%s\r\n%s\r\n" % ( - self._assemble_first_line(form), self._assemble_headers() - ) - - def assemble(self, form=None): - """ - Assembles the request for transmission to the server. We make some - modifications to make sure interception works properly. - - Raises an Exception if the request cannot be assembled. - """ - if self.content == CONTENT_MISSING: - raise proxy.ProxyError( - 502, - "Cannot assemble flow with CONTENT_MISSING" - ) - head = self._assemble_head(form) - if self.content: - return head + self.content - else: - return head - - def __hash__(self): - return id(self) - - def anticache(self): - """ - Modifies this request to remove headers that might produce a cached - response. That is, we remove ETags and If-Modified-Since headers. - """ - delheaders = [ - "if-modified-since", - "if-none-match", - ] - for i in delheaders: - del self.headers[i] - - def anticomp(self): - """ - Modifies this request to remove headers that will compress the - resource's data. - """ - self.headers["accept-encoding"] = ["identity"] - - def constrain_encoding(self): - """ - Limits the permissible Accept-Encoding values, based on what we can - decode appropriately. - """ - if self.headers["accept-encoding"]: - self.headers["accept-encoding"] = [ - ', '.join( - e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0])] - - def update_host_header(self): - """ - Update the host header to reflect the current target. - """ - self.headers["Host"] = [self.host] - - def get_form(self): - """ - Retrieves the URL-encoded or multipart form data, returning an ODict object. - Returns an empty ODict if there is no data or the content-type - indicates non-form data. - """ - if self.content: - if self.headers.in_any("content-type", HDR_FORM_URLENCODED, True): - return self.get_form_urlencoded() - elif self.headers.in_any("content-type", HDR_FORM_MULTIPART, True): - return self.get_form_multipart() - return odict.ODict([]) - - def get_form_urlencoded(self): - """ - Retrieves the URL-encoded form data, returning an ODict object. - Returns an empty ODict if there is no data or the content-type - indicates non-form data. - """ - if self.content and self.headers.in_any( - "content-type", - HDR_FORM_URLENCODED, - True): - return odict.ODict(utils.urldecode(self.content)) - return odict.ODict([]) - - def get_form_multipart(self): - if self.content and self.headers.in_any( - "content-type", - HDR_FORM_MULTIPART, - True): - return odict.ODict( - utils.multipartdecode( - self.headers, - self.content)) - return odict.ODict([]) - - def set_form_urlencoded(self, odict): - """ - Sets the body to the URL-encoded form data, and adds the - appropriate content-type header. Note that this will destory the - existing body if there is one. - """ - # FIXME: If there's an existing content-type header indicating a - # url-encoded form, leave it alone. - self.headers["Content-Type"] = [HDR_FORM_URLENCODED] - self.content = utils.urlencode(odict.lst) - - def get_path_components(self): - """ - Returns the path components of the URL as a list of strings. - - Components are unquoted. - """ - _, _, path, _, _, _ = urlparse.urlparse(self.url) - return [urllib.unquote(i) for i in path.split("/") if i] - - def set_path_components(self, lst): - """ - Takes a list of strings, and sets the path component of the URL. - - Components are quoted. - """ - lst = [urllib.quote(i, safe="") for i in lst] - path = "/" + "/".join(lst) - scheme, netloc, _, params, query, fragment = urlparse.urlparse(self.url) - self.url = urlparse.urlunparse( - [scheme, netloc, path, params, query, fragment] - ) - - def get_query(self): - """ - Gets the request query string. Returns an ODict object. - """ - _, _, _, _, query, _ = urlparse.urlparse(self.url) - if query: - return odict.ODict(utils.urldecode(query)) - return odict.ODict([]) - - def set_query(self, odict): - """ - Takes an ODict object, and sets the request query string. - """ - scheme, netloc, path, params, _, fragment = urlparse.urlparse(self.url) - query = utils.urlencode(odict.lst) - self.url = urlparse.urlunparse( - [scheme, netloc, path, params, query, fragment] - ) - - def pretty_host(self, hostheader): - """ - Heuristic to get the host of the request. - - Note that pretty_host() does not always return the TCP destination - of the request, e.g. if an upstream proxy is in place - - If hostheader is set to True, the Host: header will be used as - additional (and preferred) data source. This is handy in - transparent mode, where only the IO of the destination is known, - but not the resolved name. This is disabled by default, as an - attacker may spoof the host header to confuse an analyst. - """ - host = None - if hostheader: - host = self.headers.get_first("host") - if not host: - host = self.host - if host: - try: - return host.encode("idna") - except ValueError: - return host - else: - return None - - def pretty_url(self, hostheader): - if self.form_out == "authority": # upstream proxy mode - return "%s:%s" % (self.pretty_host(hostheader), self.port) - return utils.unparse_url(self.scheme, - self.pretty_host(hostheader), - self.port, - self.path).encode('ascii') - - @property - def url(self): - """ - Returns a URL string, constructed from the Request's URL components. - """ - return utils.unparse_url( - self.scheme, - self.host, - self.port, - self.path - ).encode('ascii') - - @url.setter - def url(self, url): - """ - Parses a URL specification, and updates the Request's information - accordingly. - - Returns False if the URL was invalid, True if the request succeeded. - """ - parts = http.parse_url(url) - if not parts: - raise ValueError("Invalid URL: %s" % url) - self.scheme, self.host, self.port, self.path = parts - - def get_cookies(self): - """ - - Returns a possibly empty netlib.odict.ODict object. - """ - ret = odict.ODict() - for i in self.headers["cookie"]: - ret.extend(cookies.parse_cookie_header(i)) - return ret - - def set_cookies(self, odict): - """ - Takes an netlib.odict.ODict object. Over-writes any existing Cookie - headers. - """ - v = cookies.format_cookie_header(odict) - self.headers["Cookie"] = [v] - - def replace(self, pattern, repl, *args, **kwargs): - """ - Replaces a regular expression pattern with repl in the headers, the - request path and the body of the request. Encoded content will be - decoded before replacement, and re-encoded afterwards. - - Returns the number of replacements made. - """ - c = HTTPMessage.replace(self, pattern, repl, *args, **kwargs) - self.path, pc = utils.safe_subn( - pattern, repl, self.path, *args, **kwargs - ) - c += pc - return c - - -class HTTPResponse(HTTPMessage): - """ - An HTTP response. - - Exposes the following attributes: - - httpversion: HTTP version tuple, e.g. (1,1) - - code: HTTP response code - - msg: HTTP response message - - headers: ODict object - - content: Content of the request, None, or CONTENT_MISSING if there - is content associated, but not present. CONTENT_MISSING evaluates - to False to make checking for the presence of content natural. - - timestamp_start: Timestamp indicating when request transmission started - - timestamp_end: Timestamp indicating when request transmission ended - """ - - def __init__( - self, - httpversion, - code, - msg, - headers, - content, - timestamp_start=None, - timestamp_end=None): - assert isinstance(headers, odict.ODictCaseless) or headers is None - HTTPMessage.__init__( - self, - httpversion, - headers, - content, - timestamp_start, - timestamp_end - ) - - self.code = code - self.msg = msg - - # Is this request replayed? - self.is_replay = False - self.stream = False - - _stateobject_attributes = HTTPMessage._stateobject_attributes.copy() - _stateobject_attributes.update( - code=int, - msg=str - ) - - - @property - def body(self): - return self.content - - - @classmethod - def from_state(cls, state): - f = cls(None, None, None, None, None) - f.load_state(state) - return f - - def __repr__(self): - if self.content: - size = netlib.utils.pretty_size(len(self.content)) - else: - size = "content missing" - return "<HTTPResponse: {code} {msg} ({contenttype}, {size})>".format( - code=self.code, - msg=self.msg, - contenttype=self.headers.get_first( - "content-type", "unknown content type" - ), - size=size - ) - - @classmethod - def from_stream( - cls, - rfile, - request_method, - include_body=True, - body_size_limit=None): - """ - Parse an HTTP response from a file stream - """ - - timestamp_start = utils.timestamp() - - if hasattr(rfile, "reset_timestamps"): - rfile.reset_timestamps() - - protocol = http1.HTTP1Protocol(rfile=rfile) - resp = protocol.read_response( - request_method, - body_size_limit, - include_body=include_body - ) - - if hasattr(rfile, "first_byte_timestamp"): - # more accurate timestamp_start - timestamp_start = rfile.first_byte_timestamp - - if include_body: - timestamp_end = utils.timestamp() - else: - timestamp_end = None - - return HTTPResponse( - resp.httpversion, - resp.status_code, - resp.msg, - resp.headers, - resp.body, - timestamp_start, - timestamp_end - ) - - def _assemble_first_line(self): - return 'HTTP/%s.%s %s %s' % \ - (self.httpversion[0], self.httpversion[1], self.code, self.msg) - - _headers_to_strip_off = ['Proxy-Connection', - 'Alternate-Protocol', - 'Alt-Svc'] - - def _assemble_headers(self, preserve_transfer_encoding=False): - headers = self.headers.copy() - for k in self._headers_to_strip_off: - del headers[k] - if not preserve_transfer_encoding: - del headers['Transfer-Encoding'] - - # If content is defined (i.e. not None or CONTENT_MISSING), we always - # add a content-length header. - if self.content or self.content == "": - headers["Content-Length"] = [str(len(self.content))] - - return headers.format() - - def _assemble_head(self, preserve_transfer_encoding=False): - return '%s\r\n%s\r\n' % ( - self._assemble_first_line(), - self._assemble_headers( - preserve_transfer_encoding=preserve_transfer_encoding - ) - ) - - def assemble(self): - """ - Assembles the response for transmission to the client. We make some - modifications to make sure interception works properly. - - Raises an Exception if the request cannot be assembled. - """ - if self.content == CONTENT_MISSING: - raise proxy.ProxyError( - 502, - "Cannot assemble flow with CONTENT_MISSING" - ) - head = self._assemble_head() - if self.content: - return head + self.content - else: - return head - - def _refresh_cookie(self, c, delta): - """ - Takes a cookie string c and a time delta in seconds, and returns - a refreshed cookie string. - """ - c = Cookie.SimpleCookie(str(c)) - for i in c.values(): - if "expires" in i: - d = parsedate_tz(i["expires"]) - if d: - d = mktime_tz(d) + delta - i["expires"] = formatdate(d) - else: - # This can happen when the expires tag is invalid. - # reddit.com sends a an expires tag like this: "Thu, 31 Dec - # 2037 23:59:59 GMT", which is valid RFC 1123, but not - # strictly correct according to the cookie spec. Browsers - # appear to parse this tolerantly - maybe we should too. - # For now, we just ignore this. - del i["expires"] - return c.output(header="").strip() - - def refresh(self, now=None): - """ - This fairly complex and heuristic function refreshes a server - response for replay. - - - It adjusts date, expires and last-modified headers. - - It adjusts cookie expiration. - """ - if not now: - now = time.time() - delta = now - self.timestamp_start - refresh_headers = [ - "date", - "expires", - "last-modified", - ] - for i in refresh_headers: - if i in self.headers: - d = parsedate_tz(self.headers[i][0]) - if d: - new = mktime_tz(d) + delta - self.headers[i] = [formatdate(new)] - c = [] - for i in self.headers["set-cookie"]: - c.append(self._refresh_cookie(i, delta)) - if c: - self.headers["set-cookie"] = c - - def get_cookies(self): - """ - Get the contents of all Set-Cookie headers. - - Returns a possibly empty ODict, where keys are cookie name strings, - and values are [value, attr] lists. Value is a string, and attr is - an ODictCaseless containing cookie attributes. Within attrs, unary - attributes (e.g. HTTPOnly) are indicated by a Null value. - """ - ret = [] - for header in self.headers["set-cookie"]: - v = http.cookies.parse_set_cookie_header(header) - if v: - name, value, attrs = v - ret.append([name, [value, attrs]]) - return odict.ODict(ret) - - def set_cookies(self, odict): - """ - Set the Set-Cookie headers on this response, over-writing existing - headers. - - Accepts an ODict of the same format as that returned by get_cookies. - """ - values = [] - for i in odict.lst: - values.append( - http.cookies.format_set_cookie_header( - i[0], - i[1][0], - i[1][1] - ) - ) - self.headers["Set-Cookie"] = values - - class HTTPFlow(Flow): """ A HTTPFlow is a collection of objects representing a single HTTP @@ -1049,14 +174,19 @@ class HTTPHandler(ProtocolHandler): def get_response_from_server(self, flow): self.c.establish_server_connection() - request_raw = flow.request.assemble() for attempt in (0, 1): try: - self.c.server_conn.send(request_raw) + if not self.c.server_conn.protocol: + # instantiate new protocol if connection does not have one yet + self.c.server_conn.protocol = http2.HTTP2Protocol(self.c.server_conn) + self.c.server_conn.protocol.perform_connection_preface() + + self.c.server_conn.send(self.c.server_conn.protocol.assemble(flow.request)) + # Only get the headers at first... - flow.response = HTTPResponse.from_stream( - self.c.server_conn.rfile, + flow.response = HTTPResponse.from_protocol( + flow.server_conn.protocol, flow.request.method, body_size_limit=self.c.config.body_size_limit, include_body=False @@ -1094,24 +224,28 @@ class HTTPHandler(ProtocolHandler): if flow.response.stream: flow.response.content = CONTENT_MISSING else: - protocol = http1.HTTP1Protocol(rfile=self.c.server_conn.rfile) - flow.response.content = protocol.read_http_body( - flow.response.headers, - self.c.config.body_size_limit, - flow.request.method, - flow.response.code, - False - ) + if isinstance(flow.server_conn.protocol, http1.HTTP1Protocol): + flow.response.content = flow.server_conn.protocol.read_http_body( + flow.response.headers, + self.c.config.body_size_limit, + flow.request.method, + flow.response.code, + False + ) flow.response.timestamp_end = utils.timestamp() def handle_flow(self): flow = HTTPFlow(self.c.client_conn, self.c.server_conn, self.live) + try: try: - req = HTTPRequest.from_stream( - self.c.client_conn.rfile, - body_size_limit=self.c.config.body_size_limit, - wfile=self.c.client_conn.wfile + if not flow.client_conn.protocol: + # instantiate new protocol if connection does not have one yet + flow.client_conn.protocol = http1.HTTP1Protocol(self.c.client_conn) + + req = HTTPRequest.from_protocol( + flow.client_conn.protocol, + body_size_limit=self.c.config.body_size_limit ) except tcp.NetLibError: # don't throw an error for disconnects that happen @@ -1120,12 +254,18 @@ class HTTPHandler(ProtocolHandler): self.c.log( "request", "debug", - [req._assemble_first_line(req.form_in)] + [repr(req)] ) ret = self.process_request(flow, req) + if ret: + # CONNECT successful - upgrade to HTTP/2 + # instantiate new protocol if connection does not have one yet + flow.client_conn.protocol = http2.HTTP2Protocol(self.c.client_conn, is_server=True) if ret is not None: return ret + print("still here: %s" % flow.client_conn.protocol.__class__) + # Be careful NOT to assign the request to the flow before # process_request completes. This is because the call can raise an # exception. If the request object is already attached, this results @@ -1149,8 +289,10 @@ class HTTPHandler(ProtocolHandler): flow.server_conn = self.c.server_conn self.c.log( - "response", "debug", [ - flow.response._assemble_first_line()]) + "response", + "debug", + [repr(flow.response)] + ) response_reply = self.c.channel.ask("response", flow) if response_reply is None or response_reply == KILL: raise KillSignal() @@ -1247,30 +389,31 @@ class HTTPHandler(ProtocolHandler): pass def send_error(self, code, message, headers): - response = http.status_codes.RESPONSES.get(code, "Unknown") - html_content = """ - <html> - <head> - <title>%d %s</title> - </head> - <body>%s</body> - </html> - """ % (code, response, message) - self.c.client_conn.wfile.write("HTTP/1.1 %s %s\r\n" % (code, response)) - self.c.client_conn.wfile.write( - "Server: %s\r\n" % self.c.config.server_version - ) - self.c.client_conn.wfile.write("Content-type: text/html\r\n") - self.c.client_conn.wfile.write( - "Content-Length: %d\r\n" % len(html_content) - ) - if headers: - for key, value in headers.items(): - self.c.client_conn.wfile.write("%s: %s\r\n" % (key, value)) - self.c.client_conn.wfile.write("Connection: close\r\n") - self.c.client_conn.wfile.write("\r\n") - self.c.client_conn.wfile.write(html_content) - self.c.client_conn.wfile.flush() + raise NotImplementedError("todo - adapt for HTTP/2 - make use of make_error_reponse from pathod") + # response = http.status_codes.RESPONSES.get(code, "Unknown") + # html_content = """ + # <html> + # <head> + # <title>%d %s</title> + # </head> + # <body>%s</body> + # </html> + # """ % (code, response, message) + # self.c.client_conn.wfile.write("HTTP/1.1 %s %s\r\n" % (code, response)) + # self.c.client_conn.wfile.write( + # "Server: %s\r\n" % self.c.config.server_version + # ) + # self.c.client_conn.wfile.write("Content-type: text/html\r\n") + # self.c.client_conn.wfile.write( + # "Content-Length: %d\r\n" % len(html_content) + # ) + # if headers: + # for key, value in headers.items(): + # self.c.client_conn.wfile.write("%s: %s\r\n" % (key, value)) + # self.c.client_conn.wfile.write("Connection: close\r\n") + # self.c.client_conn.wfile.write("\r\n") + # self.c.client_conn.wfile.write(html_content) + # self.c.client_conn.wfile.flush() def process_request(self, flow, request): """ @@ -1426,30 +569,33 @@ class HTTPHandler(ProtocolHandler): # no streaming: # we already received the full response from the server and can # send it to the client straight away. - self.c.client_conn.send(flow.response.assemble()) + self.c.client_conn.send(self.c.client_conn.protocol.assemble(flow.response)) else: + raise NotImplementedError("HTTP streaming is currently not supported.") + # TODO: implement it according to new protocols and messages + # streaming: # First send the headers and then transfer the response # incrementally: - h = flow.response._assemble_head(preserve_transfer_encoding=True) - self.c.client_conn.send(h) - - protocol = http1.HTTP1Protocol(rfile=self.c.server_conn.rfile) - chunks = protocol.read_http_body_chunked( - flow.response.headers, - self.c.config.body_size_limit, - flow.request.method, - flow.response.code, - False, - 4096 - ) - if callable(flow.response.stream): - chunks = flow.response.stream(chunks) - for chunk in chunks: - for part in chunk: - self.c.client_conn.wfile.write(part) - self.c.client_conn.wfile.flush() - flow.response.timestamp_end = utils.timestamp() + # h = flow.response._assemble_head(preserve_transfer_encoding=True) + # self.c.client_conn.send(h) + # + # protocol = http1.HTTP1Protocol(rfile=self.c.server_conn.rfile) + # chunks = protocol.read_http_body_chunked( + # flow.response.headers, + # self.c.config.body_size_limit, + # flow.request.method, + # flow.response.code, + # False, + # 4096 + # ) + # if callable(flow.response.stream): + # chunks = flow.response.stream(chunks) + # for chunk in chunks: + # for part in chunk: + # self.c.client_conn.wfile.write(part) + # self.c.client_conn.wfile.flush() + # flow.response.timestamp_end = utils.timestamp() def check_close_connection(self, flow): """ @@ -1599,12 +745,14 @@ class RequestReplayThread(threading.Thread): sni=self.flow.server_conn.sni ) r.form_out = "relative" - server.send(r.assemble()) + + server.send(self.flow.server_conn.protocol.assemble(r)) self.flow.server_conn = server - self.flow.response = HTTPResponse.from_stream( - server.rfile, + + self.flow.response = HTTPResponse.from_protocol( + self.flow.server_conn.protocol, r.method, - body_size_limit=self.config.body_size_limit + body_size_limit=self.config.body_size_limit, ) if self.channel: response_reply = self.channel.ask("response", self.flow) |