aboutsummaryrefslogtreecommitdiffstats
path: root/examples
diff options
context:
space:
mode:
Diffstat (limited to 'examples')
-rw-r--r--examples/addons/commands-paths.py6
-rw-r--r--examples/complex/dns_spoofing.py2
-rw-r--r--examples/complex/har_dump.py5
-rw-r--r--examples/complex/sslstrip.py4
-rwxr-xr-xexamples/complex/xss_scanner.py2
-rw-r--r--examples/simple/README.md1
-rw-r--r--examples/simple/link_expander.py28
-rw-r--r--examples/simple/websocket_messages.py13
8 files changed, 53 insertions, 8 deletions
diff --git a/examples/addons/commands-paths.py b/examples/addons/commands-paths.py
index f37a0fbc..4d9535b9 100644
--- a/examples/addons/commands-paths.py
+++ b/examples/addons/commands-paths.py
@@ -20,9 +20,9 @@ class MyAddon:
for f in flows:
totals[f.request.host] = totals.setdefault(f.request.host, 0) + 1
- fp = open(path, "w+")
- for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]):
- fp.write("%s: %s\n" % (cnt, dom))
+ with open(path, "w+") as fp:
+ for cnt, dom in sorted([(v, k) for (k, v) in totals.items()]):
+ fp.write("%s: %s\n" % (cnt, dom))
ctx.log.alert("done")
diff --git a/examples/complex/dns_spoofing.py b/examples/complex/dns_spoofing.py
index a3c1a017..63222eae 100644
--- a/examples/complex/dns_spoofing.py
+++ b/examples/complex/dns_spoofing.py
@@ -15,7 +15,7 @@ Usage:
# Used as the target location if neither SNI nor host header are present.
--mode reverse:http://example.com/
# To avoid auto rewriting of host header by the reverse proxy target.
- --set keep-host-header
+ --set keep_host_header
mitmdump
-p 80
--mode reverse:http://localhost:443/
diff --git a/examples/complex/har_dump.py b/examples/complex/har_dump.py
index 33a2f79f..e3cea9fd 100644
--- a/examples/complex/har_dump.py
+++ b/examples/complex/har_dump.py
@@ -87,7 +87,10 @@ def response(flow):
}
# HAR timings are integers in ms, so we re-encode the raw timings to that format.
- timings = dict([(k, int(1000 * v)) for k, v in timings_raw.items()])
+ timings = {
+ k: int(1000 * v) if v != -1 else -1
+ for k, v in timings_raw.items()
+ }
# full_time is the sum of all timings.
# Timings set to -1 will be ignored as per spec.
diff --git a/examples/complex/sslstrip.py b/examples/complex/sslstrip.py
index c862536f..69b9ea9e 100644
--- a/examples/complex/sslstrip.py
+++ b/examples/complex/sslstrip.py
@@ -38,7 +38,7 @@ def response(flow: http.HTTPFlow) -> None:
flow.response.content = flow.response.content.replace(b'https://', b'http://')
# strip meta tag upgrade-insecure-requests in response body
- csp_meta_tag_pattern = b'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
+ csp_meta_tag_pattern = br'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
flow.response.content = re.sub(csp_meta_tag_pattern, b'', flow.response.content, flags=re.IGNORECASE)
# strip links in 'Location' header
@@ -52,7 +52,7 @@ def response(flow: http.HTTPFlow) -> None:
# strip upgrade-insecure-requests in Content-Security-Policy header
if re.search('upgrade-insecure-requests', flow.response.headers.get('Content-Security-Policy', ''), flags=re.IGNORECASE):
csp = flow.response.headers['Content-Security-Policy']
- flow.response.headers['Content-Security-Policy'] = re.sub('upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
+ flow.response.headers['Content-Security-Policy'] = re.sub(r'upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
# strip secure flag from 'Set-Cookie' headers
cookies = flow.response.headers.get_all('Set-Cookie')
diff --git a/examples/complex/xss_scanner.py b/examples/complex/xss_scanner.py
index cdaaf478..97e94ed4 100755
--- a/examples/complex/xss_scanner.py
+++ b/examples/complex/xss_scanner.py
@@ -1,4 +1,4 @@
-"""
+r"""
__ __ _____ _____ _____
\ \ / // ____/ ____| / ____|
diff --git a/examples/simple/README.md b/examples/simple/README.md
index 2fafdd5a..66a05b30 100644
--- a/examples/simple/README.md
+++ b/examples/simple/README.md
@@ -8,6 +8,7 @@
| filter_flows.py | This script demonstrates how to use mitmproxy's filter pattern in scripts. |
| io_read_dumpfile.py | Read a dumpfile generated by mitmproxy. |
| io_write_dumpfile.py | Only write selected flows into a mitmproxy dumpfile. |
+| link_expander.py | Discover relative links in HTML traffic and replace them with absolute paths |
| log_events.py | Use mitmproxy's logging API. |
| modify_body_inject_iframe.py | Inject configurable iframe into pages. |
| modify_form.py | Modify HTTP form submissions. |
diff --git a/examples/simple/link_expander.py b/examples/simple/link_expander.py
new file mode 100644
index 00000000..0edf7c98
--- /dev/null
+++ b/examples/simple/link_expander.py
@@ -0,0 +1,28 @@
+# This script determines if request is an HTML webpage and if so seeks out
+# relative links (<a href="./about.html">) and expands them to absolute links
+# In practice this can be used to front an indexing spider that may not have the capability to expand relative page links.
+# Usage: mitmdump -s link_expander.py or mitmproxy -s link_expander.py
+
+import re
+from urllib.parse import urljoin
+
+
+def response(flow):
+
+ if "Content-Type" in flow.response.headers and flow.response.headers["Content-Type"].find("text/html") != -1:
+ pageUrl = flow.request.url
+ pageText = flow.response.text
+ pattern = (r"<a\s+(?:[^>]*?\s+)?href=(?P<delimiter>[\"'])"
+ r"(?P<link>(?!https?:\/\/|ftps?:\/\/|\/\/|#|javascript:|mailto:).*?)(?P=delimiter)")
+ rel_matcher = re.compile(pattern, flags=re.IGNORECASE)
+ rel_matches = rel_matcher.finditer(pageText)
+ map_dict = {}
+ for match_num, match in enumerate(rel_matches):
+ (delimiter, rel_link) = match.group("delimiter", "link")
+ abs_link = urljoin(pageUrl, rel_link)
+ map_dict["{0}{1}{0}".format(delimiter, rel_link)] = "{0}{1}{0}".format(delimiter, abs_link)
+ for map in map_dict.items():
+ pageText = pageText.replace(*map)
+ # Uncomment the following to print the expansion mapping
+ # print("{0} -> {1}".format(*map))
+ flow.response.text = pageText \ No newline at end of file
diff --git a/examples/simple/websocket_messages.py b/examples/simple/websocket_messages.py
new file mode 100644
index 00000000..719e7b10
--- /dev/null
+++ b/examples/simple/websocket_messages.py
@@ -0,0 +1,13 @@
+import re
+from mitmproxy import ctx
+
+
+def websocket_message(flow):
+ # get the latest message
+ message = flow.messages[-1]
+
+ # simply print the content of the message
+ ctx.log.info(message.content)
+
+ # manipulate the message content
+ message.content = re.sub(r'^Hello', 'HAPPY', message.content)