diff --git a/check_coding_style.sh b/check_coding_style.sh
new file mode 100755
index 000000000..5b38e003e
--- /dev/null
+++ b/check_coding_style.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+autopep8 -i -r -a -a .
+if [[ -n "$(git status -s)" ]]; then
+ echo "autopep8 yielded the following changes:"
+ git status -s
+ git --no-pager diff
+ exit 1
+fi
+
+autoflake -i -r --remove-all-unused-imports --remove-unused-variables .
+if [[ -n "$(git status -s)" ]]; then
+ echo "autoflake yielded the following changes:"
+ git status -s
+ git --no-pager diff
+ exit 1
+fi
+
+echo "Coding style seems to be ok."
+exit 0
diff --git a/doc-src/dev/index.py b/doc-src/dev/index.py
index 0f2a64943..ddf100d0c 100644
--- a/doc-src/dev/index.py
+++ b/doc-src/dev/index.py
@@ -1,8 +1,8 @@
from countershape import Page
pages = [
- Page("testing.html", "Testing"),
- Page("architecture.html", "Architecture"),
- Page("sslkeylogfile.html", "TLS Master Secrets"),
-# Page("addingviews.html", "Writing Content Views"),
+ Page("testing.html", "Testing"),
+ Page("architecture.html", "Architecture"),
+ Page("sslkeylogfile.html", "TLS Master Secrets"),
+ # Page("addingviews.html", "Writing Content Views"),
]
diff --git a/doc-src/features/index.py b/doc-src/features/index.py
index 693b4439b..fdab1714c 100644
--- a/doc-src/features/index.py
+++ b/doc-src/features/index.py
@@ -16,4 +16,4 @@ pages = [
Page("tcpproxy.html", "TCP Proxy"),
Page("upstreamcerts.html", "Upstream Certs"),
Page("upstreamproxy.html", "Upstream proxy mode"),
-]
\ No newline at end of file
+]
diff --git a/doc-src/index.py b/doc-src/index.py
index 4fefc8660..2ae9fafef 100644
--- a/doc-src/index.py
+++ b/doc-src/index.py
@@ -37,7 +37,7 @@ def mpath(p):
def example(s):
d = file(mpath(s)).read().rstrip()
extemp = """
"""
- return extemp%(countershape.template.Syntax("py")(d), s)
+ return extemp % (countershape.template.Syntax("py")(d), s)
ns.example = example
@@ -52,7 +52,8 @@ def nav(page, current, state):
else:
pre = ""
p = state.application.getPage(page)
- return pre + '%s'%(countershape.widgets.UrlTo(page), p.title)
+ return pre + \
+ '%s' % (countershape.widgets.UrlTo(page), p.title)
ns.nav = nav
ns.navbar = countershape.template.File(None, "_nav.html")
diff --git a/doc-src/tutorials/index.py b/doc-src/tutorials/index.py
index 1cb04679a..3bfe7c124 100644
--- a/doc-src/tutorials/index.py
+++ b/doc-src/tutorials/index.py
@@ -4,4 +4,4 @@ pages = [
Page("30second.html", "Client playback: a 30 second example"),
Page("gamecenter.html", "Setting highscores on Apple's GameCenter"),
Page("transparent-dhcp.html", "Transparently proxify virtual machines")
-]
\ No newline at end of file
+]
diff --git a/examples/add_header.py b/examples/add_header.py
index 291741cb0..0c0593d1f 100644
--- a/examples/add_header.py
+++ b/examples/add_header.py
@@ -1,2 +1,2 @@
def response(context, flow):
- flow.response.headers["newheader"] = ["foo"]
\ No newline at end of file
+ flow.response.headers["newheader"] = ["foo"]
diff --git a/examples/change_upstream_proxy.py b/examples/change_upstream_proxy.py
index 74a43bd0d..7782dd848 100644
--- a/examples/change_upstream_proxy.py
+++ b/examples/change_upstream_proxy.py
@@ -1,10 +1,13 @@
# This scripts demonstrates how mitmproxy can switch to a second/different upstream proxy
# in upstream proxy mode.
#
-# Usage: mitmdump -U http://default-upstream-proxy.local:8080/ -s "change_upstream_proxy.py host"
+# Usage: mitmdump -U http://default-upstream-proxy.local:8080/ -s
+# "change_upstream_proxy.py host"
from libmproxy.protocol.http import send_connect_request
alternative_upstream_proxy = ("localhost", 8082)
+
+
def should_redirect(flow):
return flow.request.host == "example.com"
@@ -15,7 +18,12 @@ def request(context, flow):
# If you want to change the target server, you should modify flow.request.host and flow.request.port
# flow.live.change_server should only be used by inline scripts to change the upstream proxy,
# unless you are sure that you know what you are doing.
- server_changed = flow.live.change_server(alternative_upstream_proxy, persistent_change=True)
+ server_changed = flow.live.change_server(
+ alternative_upstream_proxy,
+ persistent_change=True)
if flow.request.scheme == "https" and server_changed:
- send_connect_request(flow.live.c.server_conn, flow.request.host, flow.request.port)
+ send_connect_request(
+ flow.live.c.server_conn,
+ flow.request.host,
+ flow.request.port)
flow.live.c.establish_ssl(server=True)
diff --git a/examples/dns_spoofing.py b/examples/dns_spoofing.py
index cfba7c54b..dddf172cc 100644
--- a/examples/dns_spoofing.py
+++ b/examples/dns_spoofing.py
@@ -25,11 +25,13 @@ mitmproxy -p 443 -R https2http://localhost:8000
def request(context, flow):
if flow.client_conn.ssl_established:
# TLS SNI or Host header
- flow.request.host = flow.client_conn.connection.get_servername() or flow.request.pretty_host(hostheader=True)
+ flow.request.host = flow.client_conn.connection.get_servername(
+ ) or flow.request.pretty_host(hostheader=True)
- # If you use a https2http location as default destination, these attributes need to be corrected as well:
+ # If you use a https2http location as default destination, these
+ # attributes need to be corrected as well:
flow.request.port = 443
flow.request.scheme = "https"
else:
# Host header
- flow.request.host = flow.request.pretty_host(hostheader=True)
\ No newline at end of file
+ flow.request.host = flow.request.pretty_host(hostheader=True)
diff --git a/examples/dup_and_replay.py b/examples/dup_and_replay.py
index 3d9279ccf..9ba91d3bf 100644
--- a/examples/dup_and_replay.py
+++ b/examples/dup_and_replay.py
@@ -1,4 +1,4 @@
def request(context, flow):
f = context.duplicate_flow(flow)
f.request.path = "/changed"
- context.replay_request(f)
\ No newline at end of file
+ context.replay_request(f)
diff --git a/examples/filt.py b/examples/filt.py
index cce2a48ad..d2daf9a28 100644
--- a/examples/filt.py
+++ b/examples/filt.py
@@ -3,12 +3,14 @@
from libmproxy import filt
+
def start(context, argv):
- if len(argv) != 2:
- raise ValueError("Usage: -s 'filt.py FILTER'")
- context.filter = filt.parse(argv[1])
+ if len(argv) != 2:
+ raise ValueError("Usage: -s 'filt.py FILTER'")
+ context.filter = filt.parse(argv[1])
+
def response(context, flow):
- if flow.match(context.filter):
- print("Flow matches filter:")
- print(flow)
+ if flow.match(context.filter):
+ print("Flow matches filter:")
+ print(flow)
diff --git a/examples/flowbasic b/examples/flowbasic
index c71debc93..083d7663b 100755
--- a/examples/flowbasic
+++ b/examples/flowbasic
@@ -36,7 +36,8 @@ class MyMaster(flow.FlowMaster):
config = proxy.ProxyConfig(
port=8080,
- cadir="~/.mitmproxy/" # use ~/.mitmproxy/mitmproxy-ca.pem as default CA file.
+ # use ~/.mitmproxy/mitmproxy-ca.pem as default CA file.
+ cadir="~/.mitmproxy/"
)
state = flow.State()
server = ProxyServer(config)
diff --git a/examples/flowwriter.py b/examples/flowwriter.py
index f411ec452..be2f285ed 100644
--- a/examples/flowwriter.py
+++ b/examples/flowwriter.py
@@ -17,4 +17,4 @@ def start(context, argv):
def response(context, flow):
if random.choice([True, False]):
- context.flow_writer.add(flow)
\ No newline at end of file
+ context.flow_writer.add(flow)
diff --git a/examples/har_extractor.py b/examples/har_extractor.py
index 5c228ece6..1a76fa1f9 100644
--- a/examples/har_extractor.py
+++ b/examples/har_extractor.py
@@ -83,7 +83,8 @@ def response(context, flow):
# Calculate the connect_time for this server_conn. Afterwards add it to
# seen list, in order to avoid the connect_time being present in entries
# that use an existing connection.
- connect_time = flow.server_conn.timestamp_tcp_setup - flow.server_conn.timestamp_start
+ connect_time = flow.server_conn.timestamp_tcp_setup - \
+ flow.server_conn.timestamp_start
context.seen_server.add(flow.server_conn)
if flow.server_conn.timestamp_ssl_setup is not None:
@@ -91,7 +92,8 @@ def response(context, flow):
# the start of the successful tcp setup and the successful ssl
# setup. If no ssl setup has been made it is left as -1 since it
# doesn't apply to this connection.
- ssl_time = flow.server_conn.timestamp_ssl_setup - flow.server_conn.timestamp_tcp_setup
+ ssl_time = flow.server_conn.timestamp_ssl_setup - \
+ flow.server_conn.timestamp_tcp_setup
# Calculate the raw timings from the different timestamps present in the
# request and response object. For lack of a way to measure it dns timings
@@ -110,7 +112,8 @@ def response(context, flow):
# HAR timings are integers in ms, so we have to re-encode the raw timings to
# that format.
- timings = dict([(key, int(1000 * value)) for key, value in timings_raw.iteritems()])
+ timings = dict([(key, int(1000 * value))
+ for key, value in timings_raw.iteritems()])
# The full_time is the sum of all timings. Timings set to -1 will be ignored
# as per spec.
@@ -119,20 +122,27 @@ def response(context, flow):
if item > -1:
full_time += item
- started_date_time = datetime.fromtimestamp(flow.request.timestamp_start, tz=utc).isoformat()
+ started_date_time = datetime.fromtimestamp(
+ flow.request.timestamp_start,
+ tz=utc).isoformat()
- request_query_string = [{"name": k, "value": v} for k, v in flow.request.get_query()]
+ request_query_string = [{"name": k, "value": v}
+ for k, v in flow.request.get_query()]
request_http_version = ".".join([str(v) for v in flow.request.httpversion])
# Cookies are shaped as tuples by MITMProxy.
- request_cookies = [{"name": k.strip(), "value": v[0]} for k, v in (flow.request.get_cookies() or {}).iteritems()]
+ request_cookies = [{"name": k.strip(), "value": v[0]}
+ for k, v in (flow.request.get_cookies() or {}).iteritems()]
request_headers = [{"name": k, "value": v} for k, v in flow.request.headers]
request_headers_size = len(str(flow.request.headers))
request_body_size = len(flow.request.content)
- response_http_version = ".".join([str(v) for v in flow.response.httpversion])
+ response_http_version = ".".join(
+ [str(v) for v in flow.response.httpversion])
# Cookies are shaped as tuples by MITMProxy.
- response_cookies = [{"name": k.strip(), "value": v[0]} for k, v in (flow.response.get_cookies() or {}).iteritems()]
- response_headers = [{"name": k, "value": v} for k, v in flow.response.headers]
+ response_cookies = [{"name": k.strip(), "value": v[0]}
+ for k, v in (flow.response.get_cookies() or {}).iteritems()]
+ response_headers = [{"name": k, "value": v}
+ for k, v in flow.response.headers]
response_headers_size = len(str(flow.response.headers))
response_body_size = len(flow.response.content)
response_body_decoded_size = len(flow.response.get_decoded_content())
@@ -140,33 +150,43 @@ def response(context, flow):
response_mime_type = flow.response.headers.get_first('Content-Type', '')
response_redirect_url = flow.response.headers.get_first('Location', '')
- entry = HAR.entries({"startedDateTime": started_date_time,
- "time": full_time,
- "request": {"method": flow.request.method,
- "url": flow.request.url,
- "httpVersion": request_http_version,
- "cookies": request_cookies,
- "headers": request_headers,
- "queryString": request_query_string,
- "headersSize": request_headers_size,
- "bodySize": request_body_size, },
- "response": {"status": flow.response.code,
- "statusText": flow.response.msg,
- "httpVersion": response_http_version,
- "cookies": response_cookies,
- "headers": response_headers,
- "content": {"size": response_body_size,
- "compression": response_body_compression,
- "mimeType": response_mime_type},
- "redirectURL": response_redirect_url,
- "headersSize": response_headers_size,
- "bodySize": response_body_size, },
- "cache": {},
- "timings": timings, })
+ entry = HAR.entries(
+ {
+ "startedDateTime": started_date_time,
+ "time": full_time,
+ "request": {
+ "method": flow.request.method,
+ "url": flow.request.url,
+ "httpVersion": request_http_version,
+ "cookies": request_cookies,
+ "headers": request_headers,
+ "queryString": request_query_string,
+ "headersSize": request_headers_size,
+ "bodySize": request_body_size,
+ },
+ "response": {
+ "status": flow.response.code,
+ "statusText": flow.response.msg,
+ "httpVersion": response_http_version,
+ "cookies": response_cookies,
+ "headers": response_headers,
+ "content": {
+ "size": response_body_size,
+ "compression": response_body_compression,
+ "mimeType": response_mime_type},
+ "redirectURL": response_redirect_url,
+ "headersSize": response_headers_size,
+ "bodySize": response_body_size,
+ },
+ "cache": {},
+ "timings": timings,
+ })
# If the current url is in the page list of context.HARLog or does not have
# a referrer we add it as a new pages object.
- if flow.request.url in context.HARLog.get_page_list() or flow.request.headers.get('Referer', None) is None:
+ if flow.request.url in context.HARLog.get_page_list() or flow.request.headers.get(
+ 'Referer',
+ None) is None:
page_id = context.HARLog.create_page_id()
context.HARLog.add(
HAR.pages({
@@ -231,4 +251,4 @@ def print_attributes(obj, filter_string=None, hide_privates=False):
if filter_string is not None and filter_string not in attr:
continue
value = getattr(obj, attr)
- print "%s.%s" % ('obj', attr), value, type(value)
\ No newline at end of file
+ print "%s.%s" % ('obj', attr), value, type(value)
diff --git a/examples/iframe_injector.py b/examples/iframe_injector.py
index 72563bed1..b2fa2d26f 100644
--- a/examples/iframe_injector.py
+++ b/examples/iframe_injector.py
@@ -16,7 +16,12 @@ def response(context, flow):
with decoded(flow.response): # Remove content encoding (gzip, ...)
html = BeautifulSoup(flow.response.content)
if html.body:
- iframe = html.new_tag("iframe", src=context.iframe_url, frameborder=0, height=0, width=0)
+ iframe = html.new_tag(
+ "iframe",
+ src=context.iframe_url,
+ frameborder=0,
+ height=0,
+ width=0)
html.body.insert(0, iframe)
flow.response.content = str(html)
- context.log("Iframe inserted.")
\ No newline at end of file
+ context.log("Iframe inserted.")
diff --git a/examples/ignore_websocket.py b/examples/ignore_websocket.py
index f7a94bdf3..b52f18f83 100644
--- a/examples/ignore_websocket.py
+++ b/examples/ignore_websocket.py
@@ -24,6 +24,7 @@ def done(context):
HTTPRequest._headers_to_strip_off.append("Connection")
HTTPRequest._headers_to_strip_off.append("Upgrade")
+
@concurrent
def response(context, flow):
value = flow.response.headers.get_first("Connection", None)
@@ -32,4 +33,4 @@ def response(context, flow):
flow.client_conn.send(flow.response.assemble())
# ...and then delegate to tcp passthrough.
TCPHandler(flow.live.c, log=False).handle_messages()
- flow.reply(KILL)
\ No newline at end of file
+ flow.reply(KILL)
diff --git a/examples/mitmproxywrapper.py b/examples/mitmproxywrapper.py
index 2f3750e9f..239642d7a 100755
--- a/examples/mitmproxywrapper.py
+++ b/examples/mitmproxywrapper.py
@@ -14,23 +14,29 @@ import contextlib
import os
import sys
+
class Wrapper(object):
-
+
def __init__(self, port, extra_arguments=None):
self.port = port
self.extra_arguments = extra_arguments
def run_networksetup_command(self, *arguments):
- return subprocess.check_output(['sudo', 'networksetup'] + list(arguments))
+ return subprocess.check_output(
+ ['sudo', 'networksetup'] + list(arguments))
def proxy_state_for_service(self, service):
- state = self.run_networksetup_command('-getwebproxy', service).splitlines()
+ state = self.run_networksetup_command(
+ '-getwebproxy',
+ service).splitlines()
return dict([re.findall(r'([^:]+): (.*)', line)[0] for line in state])
def enable_proxy_for_service(self, service):
print 'Enabling proxy on {}...'.format(service)
for subcommand in ['-setwebproxy', '-setsecurewebproxy']:
- self.run_networksetup_command(subcommand, service, '127.0.0.1', str(self.port))
+ self.run_networksetup_command(
+ subcommand, service, '127.0.0.1', str(
+ self.port))
def disable_proxy_for_service(self, service):
print 'Disabling proxy on {}...'.format(service)
@@ -39,14 +45,20 @@ class Wrapper(object):
def interface_name_to_service_name_map(self):
order = self.run_networksetup_command('-listnetworkserviceorder')
- mapping = re.findall(r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$', order, re.MULTILINE)
+ mapping = re.findall(
+ r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$',
+ order,
+ re.MULTILINE)
return dict([(b, a) for (a, b) in mapping])
def run_command_with_input(self, command, input):
- popen = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ popen = subprocess.Popen(
+ command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
(stdout, stderr) = popen.communicate(input)
return stdout
-
+
def primary_interace_name(self):
scutil_script = 'get State:/Network/Global/IPv4\nd.show\n'
stdout = self.run_command_with_input('/usr/sbin/scutil', scutil_script)
@@ -54,13 +66,15 @@ class Wrapper(object):
return interface
def primary_service_name(self):
- return self.interface_name_to_service_name_map()[self.primary_interace_name()]
+ return self.interface_name_to_service_name_map()[
+ self.primary_interace_name()]
def proxy_enabled_for_service(self, service):
return self.proxy_state_for_service(service)['Enabled'] == 'Yes'
def toggle_proxy(self):
- new_state = not self.proxy_enabled_for_service(self.primary_service_name())
+ new_state = not self.proxy_enabled_for_service(
+ self.primary_service_name())
for service_name in self.connected_service_names():
if self.proxy_enabled_for_service(service_name) and not new_state:
self.disable_proxy_for_service(service_name)
@@ -74,8 +88,11 @@ class Wrapper(object):
service_names = []
for service_id in service_ids:
- scutil_script = 'show Setup:/Network/Service/{}\n'.format(service_id)
- stdout = self.run_command_with_input('/usr/sbin/scutil', scutil_script)
+ scutil_script = 'show Setup:/Network/Service/{}\n'.format(
+ service_id)
+ stdout = self.run_command_with_input(
+ '/usr/sbin/scutil',
+ scutil_script)
service_name, = re.findall(r'UserDefinedName\s*:\s*(.+)', stdout)
service_names.append(service_name)
@@ -102,7 +119,7 @@ class Wrapper(object):
for service_name in connected_service_names:
if not self.proxy_enabled_for_service(service_name):
self.enable_proxy_for_service(service_name)
-
+
yield
for service_name in connected_service_names:
@@ -119,15 +136,23 @@ class Wrapper(object):
def main(cls):
parser = argparse.ArgumentParser(
description='Helper tool for OS X proxy configuration and mitmproxy.',
- epilog='Any additional arguments will be passed on unchanged to mitmproxy.'
- )
- parser.add_argument('-t', '--toggle', action='store_true', help='just toggle the proxy configuration')
+ epilog='Any additional arguments will be passed on unchanged to mitmproxy.')
+ parser.add_argument(
+ '-t',
+ '--toggle',
+ action='store_true',
+ help='just toggle the proxy configuration')
# parser.add_argument('--honeyproxy', action='store_true', help='run honeyproxy instead of mitmproxy')
- parser.add_argument('-p', '--port', type=int, help='override the default port of 8080', default=8080)
+ parser.add_argument(
+ '-p',
+ '--port',
+ type=int,
+ help='override the default port of 8080',
+ default=8080)
args, extra_arguments = parser.parse_known_args()
wrapper = cls(port=args.port, extra_arguments=extra_arguments)
-
+
if args.toggle:
wrapper.toggle_proxy()
# elif args.honeyproxy:
@@ -139,4 +164,3 @@ class Wrapper(object):
if __name__ == '__main__':
Wrapper.ensure_superuser()
Wrapper.main()
-
diff --git a/examples/modify_form.py b/examples/modify_form.py
index 3d93e392d..37ba2faca 100644
--- a/examples/modify_form.py
+++ b/examples/modify_form.py
@@ -1,6 +1,7 @@
def request(context, flow):
- if "application/x-www-form-urlencoded" in flow.request.headers["content-type"]:
+ if "application/x-www-form-urlencoded" in flow.request.headers[
+ "content-type"]:
form = flow.request.get_form_urlencoded()
form["mitmproxy"] = ["rocks"]
- flow.request.set_form_urlencoded(form)
\ No newline at end of file
+ flow.request.set_form_urlencoded(form)
diff --git a/examples/modify_querystring.py b/examples/modify_querystring.py
index 1dd4807ae..7f31a48fb 100644
--- a/examples/modify_querystring.py
+++ b/examples/modify_querystring.py
@@ -3,4 +3,4 @@ def request(context, flow):
q = flow.request.get_query()
if q:
q["mitmproxy"] = ["rocks"]
- flow.request.set_query(q)
\ No newline at end of file
+ flow.request.set_query(q)
diff --git a/examples/modify_response_body.py b/examples/modify_response_body.py
index 4afd04214..68d3d4abd 100644
--- a/examples/modify_response_body.py
+++ b/examples/modify_response_body.py
@@ -6,10 +6,13 @@ from libmproxy.protocol.http import decoded
def start(context, argv):
if len(argv) != 3:
raise ValueError('Usage: -s "modify-response-body.py old new"')
- # You may want to use Python's argparse for more sophisticated argument parsing.
+ # You may want to use Python's argparse for more sophisticated argument
+ # parsing.
context.old, context.new = argv[1], argv[2]
def response(context, flow):
with decoded(flow.response): # automatically decode gzipped responses.
- flow.response.content = flow.response.content.replace(context.old, context.new)
\ No newline at end of file
+ flow.response.content = flow.response.content.replace(
+ context.old,
+ context.new)
diff --git a/examples/nonblocking.py b/examples/nonblocking.py
index 481c0407f..f96b7f40a 100644
--- a/examples/nonblocking.py
+++ b/examples/nonblocking.py
@@ -6,4 +6,4 @@ from libmproxy.script import concurrent
def request(context, flow):
print "handle request: %s%s" % (flow.request.host, flow.request.path)
time.sleep(5)
- print "start request: %s%s" % (flow.request.host, flow.request.path)
\ No newline at end of file
+ print "start request: %s%s" % (flow.request.host, flow.request.path)
diff --git a/examples/proxapp.py b/examples/proxapp.py
index d777d5225..4d8e7b582 100644
--- a/examples/proxapp.py
+++ b/examples/proxapp.py
@@ -21,4 +21,4 @@ def start(context, argv):
# SSL works too, but the magic domain needs to be resolvable from the mitmproxy machine due to mitmproxy's design.
# mitmproxy will connect to said domain and use serve its certificate (unless --no-upstream-cert is set)
# but won't send any data.
- context.app_registry.add(app, "example.com", 443)
\ No newline at end of file
+ context.app_registry.add(app, "example.com", 443)
diff --git a/examples/read_dumpfile b/examples/read_dumpfile
index f58184835..9da604cc8 100644
--- a/examples/read_dumpfile
+++ b/examples/read_dumpfile
@@ -4,7 +4,8 @@
#
from libmproxy import flow
-import json, sys
+import json
+import sys
with open("logfile", "rb") as logfile:
freader = flow.FlowReader(logfile)
@@ -14,5 +15,5 @@ with open("logfile", "rb") as logfile:
print(f.request.host)
json.dump(f.get_state(), sys.stdout, indent=4)
print ""
- except flow.FlowReadError, v:
+ except flow.FlowReadError as v:
print "Flow file corrupted. Stopped loading."
diff --git a/examples/redirect_requests.py b/examples/redirect_requests.py
index d9a3bfc5b..48512f1bb 100644
--- a/examples/redirect_requests.py
+++ b/examples/redirect_requests.py
@@ -8,7 +8,8 @@ This example shows two ways to redirect flows to other destinations.
def request(context, flow):
# pretty_host(hostheader=True) takes the Host: header of the request into account,
- # which is useful in transparent mode where we usually only have the IP otherwise.
+ # which is useful in transparent mode where we usually only have the IP
+ # otherwise.
# Method 1: Answer with a locally generated response
if flow.request.pretty_host(hostheader=True).endswith("example.com"):
diff --git a/examples/stream.py b/examples/stream.py
index 7d5efc1e9..3adbe4372 100644
--- a/examples/stream.py
+++ b/examples/stream.py
@@ -2,4 +2,4 @@ def responseheaders(context, flow):
"""
Enables streaming for all responses.
"""
- flow.response.stream = True
\ No newline at end of file
+ flow.response.stream = True
diff --git a/examples/stream_modify.py b/examples/stream_modify.py
index 56d26e6d9..e3f1f3cf7 100644
--- a/examples/stream_modify.py
+++ b/examples/stream_modify.py
@@ -11,7 +11,7 @@ Be aware that content replacement isn't trivial:
def modify(chunks):
"""
chunks is a generator that can be used to iterate over all chunks.
- Each chunk is a (prefix, content, suffix) tuple.
+ Each chunk is a (prefix, content, suffix) tuple.
For example, in the case of chunked transfer encoding: ("3\r\n","foo","\r\n")
"""
for prefix, content, suffix in chunks:
@@ -19,4 +19,4 @@ def modify(chunks):
def responseheaders(context, flow):
- flow.response.stream = modify
\ No newline at end of file
+ flow.response.stream = modify
diff --git a/examples/stub.py b/examples/stub.py
index c5cdad9cc..d5502a47c 100644
--- a/examples/stub.py
+++ b/examples/stub.py
@@ -1,12 +1,15 @@
"""
This is a script stub, with definitions for all events.
"""
+
+
def start(context, argv):
"""
Called once on script startup, before any other events.
"""
context.log("start")
+
def clientconnect(context, conn_handler):
"""
Called when a client initiates a connection to the proxy. Note that a
@@ -14,6 +17,7 @@ def clientconnect(context, conn_handler):
"""
context.log("clientconnect")
+
def serverconnect(context, conn_handler):
"""
Called when the proxy initiates a connection to the target server. Note that a
@@ -21,6 +25,7 @@ def serverconnect(context, conn_handler):
"""
context.log("serverconnect")
+
def request(context, flow):
"""
Called when a client request has been received.
@@ -36,12 +41,14 @@ def responseheaders(context, flow):
"""
context.log("responseheaders")
+
def response(context, flow):
"""
Called when a server response has been received.
"""
context.log("response")
+
def error(context, flow):
"""
Called when a flow error has occured, e.g. invalid server responses, or
@@ -50,12 +57,14 @@ def error(context, flow):
"""
context.log("error")
+
def clientdisconnect(context, conn_handler):
"""
Called when a client disconnects from the proxy.
"""
context.log("clientdisconnect")
+
def done(context):
"""
Called once on script shutdown, after any other events.
diff --git a/examples/upsidedownternet.py b/examples/upsidedownternet.py
index 738eb11ff..a6de97e4c 100644
--- a/examples/upsidedownternet.py
+++ b/examples/upsidedownternet.py
@@ -2,6 +2,7 @@ import cStringIO
from PIL import Image
from libmproxy.protocol.http import decoded
+
def response(context, flow):
if flow.response.headers.get_first("content-type", "").startswith("image"):
with decoded(flow.response): # automatically decode gzipped responses.
@@ -13,4 +14,4 @@ def response(context, flow):
flow.response.content = s2.getvalue()
flow.response.headers["content-type"] = ["image/png"]
except: # Unknown image types etc.
- pass
\ No newline at end of file
+ pass
diff --git a/libmproxy/cmdline.py b/libmproxy/cmdline.py
index c68745a9c..eb24bed7a 100644
--- a/libmproxy/cmdline.py
+++ b/libmproxy/cmdline.py
@@ -65,7 +65,7 @@ def parse_replace_hook(s):
patt, regex, replacement = _parse_hook(s)
try:
re.compile(regex)
- except re.error, e:
+ except re.error as e:
raise ParseException("Malformed replacement regex: %s" % str(e.message))
return patt, regex, replacement
@@ -127,7 +127,6 @@ def parse_server_spec_special(url):
return ret
-
def get_common_options(options):
stickycookie, stickyauth = None, None
if options.stickycookie_filt:
@@ -142,17 +141,17 @@ def get_common_options(options):
for i in options.replace:
try:
p = parse_replace_hook(i)
- except ParseException, e:
+ except ParseException as e:
raise configargparse.ArgumentTypeError(e.message)
reps.append(p)
for i in options.replace_file:
try:
patt, rex, path = parse_replace_hook(i)
- except ParseException, e:
+ except ParseException as e:
raise configargparse.ArgumentTypeError(e.message)
try:
v = open(path, "rb").read()
- except IOError, e:
+ except IOError as e:
raise configargparse.ArgumentTypeError(
"Could not read replace file: %s" % path
)
@@ -162,7 +161,7 @@ def get_common_options(options):
for i in options.setheader:
try:
p = parse_setheader(i)
- except ParseException, e:
+ except ParseException as e:
raise configargparse.ArgumentTypeError(e.message)
setheaders.append(p)
@@ -221,7 +220,7 @@ def common_options(parser):
parser.add_argument(
"--cadir",
action="store", type=str, dest="cadir", default=config.CA_DIR,
- help="Location of the default mitmproxy CA files. (%s)"%config.CA_DIR
+ help="Location of the default mitmproxy CA files. (%s)" % config.CA_DIR
)
parser.add_argument(
"--host",
@@ -482,9 +481,10 @@ def common_options(parser):
)
group.add_argument(
"--replay-ignore-host",
- action="store_true", dest="replay_ignore_host", default=False,
- help="Ignore request's destination host while searching for a saved flow to replay"
- )
+ action="store_true",
+ dest="replay_ignore_host",
+ default=False,
+ help="Ignore request's destination host while searching for a saved flow to replay")
group = parser.add_argument_group(
"Replacements",
diff --git a/libmproxy/console/__init__.py b/libmproxy/console/__init__.py
index 527ed07d0..8f39e2835 100644
--- a/libmproxy/console/__init__.py
+++ b/libmproxy/console/__init__.py
@@ -85,10 +85,10 @@ class ConsoleState(flow.State):
return self.view[pos], pos
def get_next(self, pos):
- return self.get_from_pos(pos+1)
+ return self.get_from_pos(pos + 1)
def get_prev(self, pos):
- return self.get_from_pos(pos-1)
+ return self.get_from_pos(pos - 1)
def delete_flow(self, f):
if f in self.view and self.view.index(f) <= self.focus:
@@ -255,7 +255,7 @@ class ConsoleMaster(flow.FlowMaster):
try:
f = file(path, mode)
self.start_stream(f, None)
- except IOError, v:
+ except IOError as v:
return str(v)
self.stream_path = path
@@ -263,22 +263,24 @@ class ConsoleMaster(flow.FlowMaster):
status, val = s.run(method, f)
if val:
if status:
- self.add_event("Method %s return: %s"%(method, val), "debug")
+ self.add_event("Method %s return: %s" % (method, val), "debug")
else:
- self.add_event("Method %s error: %s"%(method, val[1]), "error")
+ self.add_event(
+ "Method %s error: %s" %
+ (method, val[1]), "error")
def run_script_once(self, command, f):
if not command:
return
- self.add_event("Running script on flow: %s"%command, "debug")
+ self.add_event("Running script on flow: %s" % command, "debug")
try:
s = script.Script(command, self)
- except script.ScriptError, v:
+ except script.ScriptError as v:
signals.status_message.send(
message = "Error loading script."
)
- self.add_event("Error loading script:\n%s"%v.args[0], "error")
+ self.add_event("Error loading script:\n%s" % v.args[0], "error")
return
if f.request:
@@ -562,7 +564,7 @@ class ConsoleMaster(flow.FlowMaster):
for i in flows:
fw.add(i)
f.close()
- except IOError, v:
+ except IOError as v:
signals.status_message.send(message=v.strerror)
def save_one_flow(self, path, flow):
@@ -575,13 +577,13 @@ class ConsoleMaster(flow.FlowMaster):
if not path:
return
ret = self.load_flows_path(path)
- return ret or "Flows loaded from %s"%path
+ return ret or "Flows loaded from %s" % path
def load_flows_path(self, path):
reterr = None
try:
flow.FlowMaster.load_flows_file(self, path)
- except flow.FlowReadError, v:
+ except flow.FlowReadError as v:
reterr = str(v)
signals.flowlist_change.send(self)
return reterr
@@ -652,7 +654,8 @@ class ConsoleMaster(flow.FlowMaster):
)
def process_flow(self, f):
- if self.state.intercept and f.match(self.state.intercept) and not f.request.is_replay:
+ if self.state.intercept and f.match(
+ self.state.intercept) and not f.request.is_replay:
f.intercept(self)
else:
f.reply()
@@ -674,7 +677,7 @@ class ConsoleMaster(flow.FlowMaster):
self.eventlist.append(e)
if len(self.eventlist) > EVENTLOG_SIZE:
self.eventlist.pop(0)
- self.eventlist.set_focus(len(self.eventlist)-1)
+ self.eventlist.set_focus(len(self.eventlist) - 1)
# Handlers
def handle_error(self, f):
diff --git a/libmproxy/console/common.py b/libmproxy/console/common.py
index b920a11fe..3180170d5 100644
--- a/libmproxy/console/common.py
+++ b/libmproxy/console/common.py
@@ -164,7 +164,7 @@ def raw_format_flow(f, focus, extended, padding):
4: "code_400",
5: "code_500",
}
- ccol = codes.get(f["resp_code"]/100, "code_other")
+ ccol = codes.get(f["resp_code"] / 100, "code_other")
resp.append(fcol(SYMBOL_RETURN, ccol))
if f["resp_is_replay"]:
resp.append(fcol(SYMBOL_REPLAY, "replay"))
@@ -200,7 +200,7 @@ def save_data(path, data, master, state):
try:
with file(path, "wb") as f:
f.write(data)
- except IOError, v:
+ except IOError as v:
signals.status_message.send(message=v.strerror)
@@ -214,7 +214,7 @@ def ask_save_overwite(path, data, master, state):
save_data(path, data, master, state)
signals.status_prompt_onekey.send(
- prompt = "'"+path+"' already exists. Overwite?",
+ prompt = "'" + path + "' already exists. Overwite?",
keys = (
("yes", "y"),
("no", "n"),
diff --git a/libmproxy/console/contentview.py b/libmproxy/console/contentview.py
index a121dfab3..2b3c6defe 100644
--- a/libmproxy/console/contentview.py
+++ b/libmproxy/console/contentview.py
@@ -21,12 +21,12 @@ from ..contrib.wbxml.ASCommandResponse import ASCommandResponse
try:
import pyamf
from pyamf import remoting, flex
-except ImportError: # pragma nocover
+except ImportError: # pragma nocover
pyamf = None
try:
import cssutils
-except ImportError: # pragma nocover
+except ImportError: # pragma nocover
cssutils = None
else:
cssutils.log.setLevel(logging.CRITICAL)
@@ -36,7 +36,7 @@ else:
cssutils.ser.prefs.indentClosingBrace = False
cssutils.ser.prefs.validOnly = False
-VIEW_CUTOFF = 1024*50
+VIEW_CUTOFF = 1024 * 50
def _view_text(content, total, limit):
@@ -59,7 +59,7 @@ def trailer(clen, txt, limit):
txt.append(
urwid.Text(
[
- ("highlight", "... %s of data not shown. Press "%netlib.utils.pretty_size(rem)),
+ ("highlight", "... %s of data not shown. Press " % netlib.utils.pretty_size(rem)),
("key", "f"),
("highlight", " to load all data.")
]
@@ -76,7 +76,7 @@ class ViewAuto:
ctype = hdrs.get_first("content-type")
if ctype:
ct = utils.parse_content_type(ctype) if ctype else None
- ct = "%s/%s"%(ct[0], ct[1])
+ ct = "%s/%s" % (ct[0], ct[1])
if ct in content_types_map:
return content_types_map[ct][0](hdrs, content, limit)
elif utils.isXML(content):
@@ -227,7 +227,7 @@ class ViewURLEncoded:
lines = utils.urldecode(content)
if lines:
body = common.format_keyvals(
- [(k+":", v) for (k, v) in lines],
+ [(k + ":", v) for (k, v) in lines],
key = "header",
val = "text"
)
@@ -304,7 +304,6 @@ if pyamf:
if not envelope:
return None
-
txt = []
for target, message in iter(envelope):
if isinstance(message, pyamf.remoting.Request):
@@ -315,13 +314,13 @@ if pyamf:
else:
txt.append(urwid.Text([
("header", "Response: "),
- ("text", "%s, code %s"%(target, message.status)),
+ ("text", "%s, code %s" % (target, message.status)),
]))
s = json.dumps(self.unpack(message), indent=4)
txt.extend(_view_text(s[:limit], len(s), limit))
- return "AMF v%s"%envelope.amfVersion, txt
+ return "AMF v%s" % envelope.amfVersion, txt
class ViewJavaScript:
@@ -375,7 +374,7 @@ class ViewImage:
return None
parts = [
("Format", str(img.format_description)),
- ("Size", "%s x %s px"%img.size),
+ ("Size", "%s x %s px" % img.size),
("Mode", str(img.mode)),
]
for i in sorted(img.info.keys()):
@@ -401,7 +400,7 @@ class ViewImage:
key = "header",
val = "text"
)
- return "%s image"%img.format, fmt
+ return "%s image" % img.format, fmt
class ViewProtobuf:
@@ -526,7 +525,7 @@ def get_content_view(viewmode, hdrItems, content, limit, logfunc, is_request):
decoded = encoding.decode(enc, content)
if decoded:
content = decoded
- msg.append("[decoded %s]"%enc)
+ msg.append("[decoded %s]" % enc)
try:
ret = viewmode(hdrs, content, limit)
# Third-party viewers can fail in unexpected ways...
diff --git a/libmproxy/console/flowdetailview.py b/libmproxy/console/flowdetailview.py
index 48845a62e..40769c953 100644
--- a/libmproxy/console/flowdetailview.py
+++ b/libmproxy/console/flowdetailview.py
@@ -34,7 +34,7 @@ def flowdetails(state, flow):
if c:
text.append(urwid.Text([("head", "Server Certificate:")]))
parts = [
- ["Type", "%s, %s bits"%c.keyinfo],
+ ["Type", "%s, %s bits" % c.keyinfo],
["SHA1 digest", c.digest("sha1")],
["Valid to", str(c.notafter)],
["Valid from", str(c.notbefore)],
diff --git a/libmproxy/console/flowlist.py b/libmproxy/console/flowlist.py
index 6ab45bad7..fd0715690 100644
--- a/libmproxy/console/flowlist.py
+++ b/libmproxy/console/flowlist.py
@@ -50,7 +50,7 @@ class EventListBox(urwid.ListBox):
elif key == "G":
self.set_focus(0)
elif key == "g":
- self.set_focus(len(self.master.eventlist)-1)
+ self.set_focus(len(self.master.eventlist) - 1)
return urwid.ListBox.keypress(self, size, key)
@@ -76,7 +76,8 @@ class BodyPile(urwid.Pile):
def keypress(self, size, key):
if key == "tab":
- self.focus_position = (self.focus_position + 1)%len(self.widget_list)
+ self.focus_position = (
+ self.focus_position + 1) % len(self.widget_list)
if self.focus_position == 1:
self.widget_list[1].header = self.active_header
else:
@@ -157,7 +158,8 @@ class ConnectionItem(urwid.WidgetWrap):
callback = self.master.server_playback_path
)
- def keypress(self, (maxcol,), key):
+ def keypress(self, xxx_todo_changeme, key):
+ (maxcol,) = xxx_todo_changeme
key = common.shortcuts(key)
if key == "a":
self.flow.accept_intercept(self.master)
diff --git a/libmproxy/console/flowview.py b/libmproxy/console/flowview.py
index 632b725ef..43a40d695 100644
--- a/libmproxy/console/flowview.py
+++ b/libmproxy/console/flowview.py
@@ -24,42 +24,42 @@ def _mkhelp():
("e", "edit request/response"),
("f", "load full body data"),
("m", "change body display mode for this entity"),
- (None,
- common.highlight_key("automatic", "a") +
- [("text", ": automatic detection")]
- ),
- (None,
- common.highlight_key("hex", "e") +
- [("text", ": Hex")]
- ),
- (None,
- common.highlight_key("html", "h") +
- [("text", ": HTML")]
- ),
- (None,
- common.highlight_key("image", "i") +
- [("text", ": Image")]
- ),
- (None,
- common.highlight_key("javascript", "j") +
- [("text", ": JavaScript")]
- ),
- (None,
- common.highlight_key("json", "s") +
- [("text", ": JSON")]
- ),
- (None,
- common.highlight_key("urlencoded", "u") +
- [("text", ": URL-encoded data")]
- ),
- (None,
- common.highlight_key("raw", "r") +
- [("text", ": raw data")]
- ),
- (None,
- common.highlight_key("xml", "x") +
- [("text", ": XML")]
- ),
+ (None,
+ common.highlight_key("automatic", "a") +
+ [("text", ": automatic detection")]
+ ),
+ (None,
+ common.highlight_key("hex", "e") +
+ [("text", ": Hex")]
+ ),
+ (None,
+ common.highlight_key("html", "h") +
+ [("text", ": HTML")]
+ ),
+ (None,
+ common.highlight_key("image", "i") +
+ [("text", ": Image")]
+ ),
+ (None,
+ common.highlight_key("javascript", "j") +
+ [("text", ": JavaScript")]
+ ),
+ (None,
+ common.highlight_key("json", "s") +
+ [("text", ": JSON")]
+ ),
+ (None,
+ common.highlight_key("urlencoded", "u") +
+ [("text", ": URL-encoded data")]
+ ),
+ (None,
+ common.highlight_key("raw", "r") +
+ [("text", ": raw data")]
+ ),
+ (None,
+ common.highlight_key("xml", "x") +
+ [("text", ": XML")]
+ ),
("M", "change default body display mode"),
("p", "previous flow"),
("P", "copy response(content/headers) to clipboard"),
@@ -123,13 +123,13 @@ class FlowView(tabs.Tabs):
def __init__(self, master, state, flow, tab_offset):
self.master, self.state, self.flow = master, state, flow
tabs.Tabs.__init__(self,
- [
- (self.tab_request, self.view_request),
- (self.tab_response, self.view_response),
- (self.tab_details, self.view_details),
- ],
- tab_offset
- )
+ [
+ (self.tab_request, self.view_request),
+ (self.tab_response, self.view_response),
+ (self.tab_details, self.view_details),
+ ],
+ tab_offset
+ )
self.show()
self.last_displayed_body = None
signals.flow_change.connect(self.sig_flow_change)
@@ -173,7 +173,7 @@ class FlowView(tabs.Tabs):
False
)
if full:
- limit = sys.maxint
+ limit = sys.maxsize
else:
limit = contentview.VIEW_CUTOFF
description, text_objects = cache.get(
@@ -197,7 +197,7 @@ class FlowView(tabs.Tabs):
def conn_text(self, conn):
if conn:
txt = common.format_keyvals(
- [(h+":", v) for (h, v) in conn.headers.lst],
+ [(h + ":", v) for (h, v) in conn.headers.lst],
key = "header",
val = "text"
)
@@ -217,7 +217,7 @@ class FlowView(tabs.Tabs):
" ",
('heading', "["),
('heading_key', "m"),
- ('heading', (":%s]"%viewmode.name)),
+ ('heading', (":%s]" % viewmode.name)),
],
align="right"
)
@@ -272,8 +272,9 @@ class FlowView(tabs.Tabs):
except ValueError:
return None
import BaseHTTPServer
- if BaseHTTPServer.BaseHTTPRequestHandler.responses.has_key(int(code)):
- response.msg = BaseHTTPServer.BaseHTTPRequestHandler.responses[int(code)][0]
+ if int(code) in BaseHTTPServer.BaseHTTPRequestHandler.responses:
+ response.msg = BaseHTTPServer.BaseHTTPRequestHandler.responses[
+ int(code)][0]
signals.flow_change.send(self, flow = self.flow)
def set_resp_msg(self, msg):
@@ -494,7 +495,7 @@ class FlowView(tabs.Tabs):
elif key == "d":
if self.state.flow_count() == 1:
self.master.view_flowlist()
- elif self.state.view.index(self.flow) == len(self.state.view)-1:
+ elif self.state.view.index(self.flow) == len(self.state.view) - 1:
self.view_prev_flow(self.flow)
else:
self.view_next_flow(self.flow)
@@ -615,7 +616,7 @@ class FlowView(tabs.Tabs):
if conn.content:
t = conn.headers["content-type"] or [None]
t = t[0]
- if os.environ.has_key("EDITOR") or os.environ.has_key("PAGER"):
+ if "EDITOR" in os.environ or "PAGER" in os.environ:
self.master.spawn_external_viewer(conn.content, t)
else:
signals.status_message.send(
diff --git a/libmproxy/console/grideditor.py b/libmproxy/console/grideditor.py
index 5a2da59f9..b20e54e49 100644
--- a/libmproxy/console/grideditor.py
+++ b/libmproxy/console/grideditor.py
@@ -175,6 +175,7 @@ class GridWalker(urwid.ListWalker):
and errors is a set with an entry of each offset in rows that is an
error.
"""
+
def __init__(self, lst, editor):
self.lst = [(i, set([])) for i in lst]
self.editor = editor
@@ -225,7 +226,7 @@ class GridWalker(urwid.ListWalker):
def delete_focus(self):
if self.lst:
del self.lst[self.focus]
- self.focus = min(len(self.lst)-1, self.focus)
+ self.focus = min(len(self.lst) - 1, self.focus)
self._modified()
def _insert(self, pos):
@@ -266,14 +267,14 @@ class GridWalker(urwid.ListWalker):
self._modified()
def right(self):
- self.focus_col = min(self.focus_col + 1, len(self.editor.columns)-1)
+ self.focus_col = min(self.focus_col + 1, len(self.editor.columns) - 1)
self._modified()
def tab_next(self):
self.stop_edit()
- if self.focus_col < len(self.editor.columns)-1:
+ if self.focus_col < len(self.editor.columns) - 1:
self.focus_col += 1
- elif self.focus != len(self.lst)-1:
+ elif self.focus != len(self.lst) - 1:
self.focus_col = 0
self.focus += 1
self._modified()
@@ -297,14 +298,14 @@ class GridWalker(urwid.ListWalker):
self._modified()
def get_next(self, pos):
- if pos+1 >= len(self.lst):
+ if pos + 1 >= len(self.lst):
return None, None
- return GridRow(None, False, self.editor, self.lst[pos+1]), pos+1
+ return GridRow(None, False, self.editor, self.lst[pos + 1]), pos + 1
def get_prev(self, pos):
- if pos-1 < 0:
+ if pos - 1 < 0:
return None, None
- return GridRow(None, False, self.editor, self.lst[pos-1]), pos-1
+ return GridRow(None, False, self.editor, self.lst[pos - 1]), pos - 1
class GridListBox(urwid.ListBox):
@@ -387,7 +388,7 @@ class GridEditor(urwid.WidgetWrap):
d = file(p, "rb").read()
self.walker.set_current_value(d, unescaped)
self.walker._modified()
- except IOError, v:
+ except IOError as v:
return str(v)
def set_subeditor_value(self, val, focus, focus_col):
@@ -418,7 +419,7 @@ class GridEditor(urwid.WidgetWrap):
elif key == "G":
self.walker.set_focus(0)
elif key == "g":
- self.walker.set_focus(len(self.walker.lst)-1)
+ self.walker.set_focus(len(self.walker.lst) - 1)
elif key in ["h", "left"]:
self.walker.left()
elif key in ["l", "right"]:
@@ -633,7 +634,7 @@ class ScriptEditor(GridEditor):
def is_error(self, col, val):
try:
script.Script.parse_command(val)
- except script.ScriptError, v:
+ except script.ScriptError as v:
return str(v)
diff --git a/libmproxy/console/help.py b/libmproxy/console/help.py
index cbd5bef8b..4e81a566b 100644
--- a/libmproxy/console/help.py
+++ b/libmproxy/console/help.py
@@ -6,7 +6,7 @@ from . import common, signals
from .. import filt, version
footer = [
- ("heading", 'mitmproxy v%s '%version.VERSION),
+ ("heading", 'mitmproxy v%s ' % version.VERSION),
('heading_key', "q"), ":back ",
]
@@ -33,7 +33,12 @@ class HelpView(urwid.ListBox):
("pg up/down", "page up/down"),
("arrows", "up, down, left, right"),
]
- text.extend(common.format_keyvals(keys, key="key", val="text", indent=4))
+ text.extend(
+ common.format_keyvals(
+ keys,
+ key="key",
+ val="text",
+ indent=4))
text.append(urwid.Text([("head", "\n\nGlobal keys:\n")]))
keys = [
@@ -52,15 +57,15 @@ class HelpView(urwid.ListBox):
f = []
for i in filt.filt_unary:
f.append(
- ("~%s"%i.code, i.help)
+ ("~%s" % i.code, i.help)
)
for i in filt.filt_rex:
f.append(
- ("~%s regex"%i.code, i.help)
+ ("~%s regex" % i.code, i.help)
)
for i in filt.filt_int:
f.append(
- ("~%s int"%i.code, i.help)
+ ("~%s int" % i.code, i.help)
)
f.sort()
f.extend(
@@ -75,7 +80,7 @@ class HelpView(urwid.ListBox):
text.append(
urwid.Text(
- [
+ [
"\n",
("text", " Regexes are Python-style.\n"),
("text", " Regexes can be specified as quoted strings.\n"),
@@ -83,13 +88,13 @@ class HelpView(urwid.ListBox):
("text", " Expressions with no operators are regex matches against URL.\n"),
("text", " Default binary operator is &.\n"),
("head", "\n Examples:\n"),
- ]
+ ]
)
)
examples = [
- ("google\.com", "Url containing \"google.com"),
- ("~q ~b test", "Requests where body contains \"test\""),
- ("!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
+ ("google\.com", "Url containing \"google.com"),
+ ("~q ~b test", "Requests where body contains \"test\""),
+ ("!(~q & ~t \"text/html\")", "Anything but requests with a text/html content type."),
]
text.extend(
common.format_keyvals(examples, key="key", val="text", indent=4)
diff --git a/libmproxy/console/options.py b/libmproxy/console/options.py
index c728123f7..58a4d4698 100644
--- a/libmproxy/console/options.py
+++ b/libmproxy/console/options.py
@@ -8,6 +8,7 @@ footer = [
('heading_key', "C"), ":clear all ",
]
+
def _mkhelp():
text = []
keys = [
diff --git a/libmproxy/console/palettes.py b/libmproxy/console/palettes.py
index 6490eb73f..ea3d1b627 100644
--- a/libmproxy/console/palettes.py
+++ b/libmproxy/console/palettes.py
@@ -270,7 +270,7 @@ class SolarizedDark(LowDark):
# Status bar & heading
heading = (sol_base2, sol_base01),
- heading_key = (sol_blue+",bold", sol_base01),
+ heading_key = (sol_blue + ",bold", sol_base01),
heading_inactive = (sol_base1, sol_base02),
# Help
diff --git a/libmproxy/console/pathedit.py b/libmproxy/console/pathedit.py
index 53cda3be5..dccec14a0 100644
--- a/libmproxy/console/pathedit.py
+++ b/libmproxy/console/pathedit.py
@@ -32,7 +32,7 @@ class _PathCompleter:
files = glob.glob(os.path.join(path, "*"))
prefix = txt
else:
- files = glob.glob(path+"*")
+ files = glob.glob(path + "*")
prefix = os.path.dirname(txt)
prefix = prefix or "./"
for f in files:
diff --git a/libmproxy/console/searchable.py b/libmproxy/console/searchable.py
index a9572ae3c..627d595d9 100644
--- a/libmproxy/console/searchable.py
+++ b/libmproxy/console/searchable.py
@@ -37,7 +37,7 @@ class Searchable(urwid.ListBox):
self.set_focus(0)
self.walker._modified()
elif key == "g":
- self.set_focus(len(self.walker)-1)
+ self.set_focus(len(self.walker) - 1)
self.walker._modified()
else:
return super(self.__class__, self).keypress(size, key)
@@ -74,11 +74,11 @@ class Searchable(urwid.ListBox):
return
# Start search at focus + 1
if backwards:
- rng = xrange(len(self.body)-1, -1, -1)
+ rng = xrange(len(self.body) - 1, -1, -1)
else:
rng = xrange(1, len(self.body) + 1)
for i in rng:
- off = (self.focus_position + i)%len(self.body)
+ off = (self.focus_position + i) % len(self.body)
w = self.body[off]
txt = self.get_text(w)
if txt and self.search_term in txt:
diff --git a/libmproxy/console/select.py b/libmproxy/console/select.py
index 61ee50e4c..bf96a785d 100644
--- a/libmproxy/console/select.py
+++ b/libmproxy/console/select.py
@@ -2,6 +2,7 @@ import urwid
from . import common
+
class _OptionWidget(urwid.WidgetWrap):
def __init__(self, option, text, shortcut, active, focus):
self.option = option
@@ -47,14 +48,14 @@ class OptionWalker(urwid.ListWalker):
return self.options[self.focus].render(True), self.focus
def get_next(self, pos):
- if pos >= len(self.options)-1:
+ if pos >= len(self.options) - 1:
return None, None
- return self.options[pos+1].render(False), pos+1
+ return self.options[pos + 1].render(False), pos + 1
def get_prev(self, pos):
if pos <= 0:
return None, None
- return self.options[pos-1].render(False), pos-1
+ return self.options[pos - 1].render(False), pos - 1
class Heading:
@@ -69,6 +70,8 @@ class Heading:
_neg = lambda: False
+
+
class Option:
def __init__(self, text, shortcut, getstate=None, activate=None):
self.text = text
@@ -77,7 +80,12 @@ class Option:
self.activate = activate or _neg
def render(self, focus):
- return _OptionWidget(self, self.text, self.shortcut, self.getstate(), focus)
+ return _OptionWidget(
+ self,
+ self.text,
+ self.shortcut,
+ self.getstate(),
+ focus)
class Select(urwid.ListBox):
@@ -92,7 +100,7 @@ class Select(urwid.ListBox):
for i in options:
if hasattr(i, "shortcut") and i.shortcut:
if i.shortcut in self.keymap:
- raise ValueError("Duplicate shortcut key: %s"%i.shortcut)
+ raise ValueError("Duplicate shortcut key: %s" % i.shortcut)
self.keymap[i.shortcut] = i
def keypress(self, size, key):
diff --git a/libmproxy/console/statusbar.py b/libmproxy/console/statusbar.py
index 37ceef946..7eb2131be 100644
--- a/libmproxy/console/statusbar.py
+++ b/libmproxy/console/statusbar.py
@@ -58,7 +58,7 @@ class ActionBar(urwid.WidgetWrap):
mkup = []
for i, e in enumerate(keys):
mkup.extend(common.highlight_key(e[0], e[1]))
- if i < len(keys)-1:
+ if i < len(keys) - 1:
mkup.append(",")
prompt.extend(mkup)
prompt.append(")? ")
@@ -136,14 +136,14 @@ class StatusBar(urwid.WidgetWrap):
if self.master.client_playback:
r.append("[")
r.append(("heading_key", "cplayback"))
- r.append(":%s to go]"%self.master.client_playback.count())
+ r.append(":%s to go]" % self.master.client_playback.count())
if self.master.server_playback:
r.append("[")
r.append(("heading_key", "splayback"))
if self.master.nopop:
- r.append(":%s in file]"%self.master.server_playback.count())
+ r.append(":%s in file]" % self.master.server_playback.count())
else:
- r.append(":%s to go]"%self.master.server_playback.count())
+ r.append(":%s to go]" % self.master.server_playback.count())
if self.master.get_ignore_filter():
r.append("[")
r.append(("heading_key", "I"))
@@ -155,23 +155,23 @@ class StatusBar(urwid.WidgetWrap):
if self.master.state.intercept_txt:
r.append("[")
r.append(("heading_key", "i"))
- r.append(":%s]"%self.master.state.intercept_txt)
+ r.append(":%s]" % self.master.state.intercept_txt)
if self.master.state.limit_txt:
r.append("[")
r.append(("heading_key", "l"))
- r.append(":%s]"%self.master.state.limit_txt)
+ r.append(":%s]" % self.master.state.limit_txt)
if self.master.stickycookie_txt:
r.append("[")
r.append(("heading_key", "t"))
- r.append(":%s]"%self.master.stickycookie_txt)
+ r.append(":%s]" % self.master.stickycookie_txt)
if self.master.stickyauth_txt:
r.append("[")
r.append(("heading_key", "u"))
- r.append(":%s]"%self.master.stickyauth_txt)
+ r.append(":%s]" % self.master.stickyauth_txt)
if self.master.state.default_body_view.name != "Auto":
r.append("[")
r.append(("heading_key", "M"))
- r.append(":%s]"%self.master.state.default_body_view.name)
+ r.append(":%s]" % self.master.state.default_body_view.name)
opts = []
if self.master.anticache:
@@ -196,22 +196,22 @@ class StatusBar(urwid.WidgetWrap):
)
if opts:
- r.append("[%s]"%(":".join(opts)))
+ r.append("[%s]" % (":".join(opts)))
if self.master.server.config.mode in ["reverse", "upstream"]:
dst = self.master.server.config.mode.dst
scheme = "https" if dst[0] else "http"
if dst[1] != dst[0]:
scheme += "2https" if dst[1] else "http"
- r.append("[dest:%s]"%utils.unparse_url(scheme, *dst[2:]))
+ r.append("[dest:%s]" % utils.unparse_url(scheme, *dst[2:]))
if self.master.scripts:
r.append("[")
r.append(("heading_key", "s"))
- r.append("cripts:%s]"%len(self.master.scripts))
+ r.append("cripts:%s]" % len(self.master.scripts))
# r.append("[lt:%0.3f]"%self.master.looptime)
if self.master.stream:
- r.append("[W:%s]"%self.master.stream_path)
+ r.append("[W:%s]" % self.master.stream_path)
return r
@@ -222,14 +222,14 @@ class StatusBar(urwid.WidgetWrap):
else:
offset = min(self.master.state.focus + 1, fc)
t = [
- ('heading', ("[%s/%s]"%(offset, fc)).ljust(9))
+ ('heading', ("[%s/%s]" % (offset, fc)).ljust(9))
]
if self.master.server.bound:
host = self.master.server.address.host
if host == "0.0.0.0":
host = "*"
- boundaddr = "[%s:%s]"%(host, self.master.server.address.port)
+ boundaddr = "[%s:%s]" % (host, self.master.server.address.port)
else:
boundaddr = ""
t.extend(self.get_status())
diff --git a/libmproxy/console/tabs.py b/libmproxy/console/tabs.py
index 2c46e59e1..953f6b12f 100644
--- a/libmproxy/console/tabs.py
+++ b/libmproxy/console/tabs.py
@@ -1,5 +1,6 @@
import urwid
+
class Tabs(urwid.WidgetWrap):
def __init__(self, tabs, tab_offset=0):
urwid.WidgetWrap.__init__(self, "")
@@ -15,10 +16,10 @@ class Tabs(urwid.WidgetWrap):
def keypress(self, size, key):
if key in ["tab", "l"]:
- self.tab_offset = (self.tab_offset + 1)%(len(self.tabs))
+ self.tab_offset = (self.tab_offset + 1) % (len(self.tabs))
self.show()
elif key == "h":
- self.tab_offset = (self.tab_offset - 1)%(len(self.tabs))
+ self.tab_offset = (self.tab_offset - 1) % (len(self.tabs))
self.show()
return self._w.keypress(size, key)
diff --git a/libmproxy/controller.py b/libmproxy/controller.py
index 9ca89184f..98a3aec78 100644
--- a/libmproxy/controller.py
+++ b/libmproxy/controller.py
@@ -1,11 +1,14 @@
from __future__ import absolute_import
-import Queue, threading
+import Queue
+import threading
+
class DummyReply:
"""
A reply object that does nothing. Useful when we need an object to seem
like it has a channel, and during testing.
"""
+
def __init__(self):
self.acked = False
@@ -19,6 +22,7 @@ class Reply:
This object is used to respond to the message through the return
channel.
"""
+
def __init__(self, obj):
self.obj = obj
self.q = Queue.Queue()
@@ -67,11 +71,13 @@ class Slave(threading.Thread):
Slaves get a channel end-point through which they can send messages to
the master.
"""
+
def __init__(self, channel, server):
self.channel, self.server = channel, server
self.server.set_channel(channel)
threading.Thread.__init__(self)
- self.name = "SlaveThread (%s:%s)" % (self.server.address.host, self.server.address.port)
+ self.name = "SlaveThread (%s:%s)" % (
+ self.server.address.host, self.server.address.port)
def run(self):
self.server.serve_forever()
@@ -81,6 +87,7 @@ class Master(object):
"""
Masters get and respond to messages from slaves.
"""
+
def __init__(self, server):
"""
server may be None if no server is needed.
diff --git a/libmproxy/dump.py b/libmproxy/dump.py
index ac9c273f1..ee8c65a0b 100644
--- a/libmproxy/dump.py
+++ b/libmproxy/dump.py
@@ -53,7 +53,7 @@ class Options(object):
def str_response(resp):
- r = "%s %s"%(resp.code, resp.msg)
+ r = "%s %s" % (resp.code, resp.msg)
if resp.is_replay:
r = "[replay] " + r
return r
@@ -64,7 +64,7 @@ def str_request(f, showhost):
c = f.client_conn.address.host
else:
c = "[replay]"
- r = "%s %s %s"%(c, f.request.method, f.request.pretty_url(showhost))
+ r = "%s %s %s" % (c, f.request.method, f.request.pretty_url(showhost))
if f.request.stickycookie:
r = "[stickycookie] " + r
return r
@@ -102,7 +102,7 @@ class DumpMaster(flow.FlowMaster):
try:
f = file(path, options.outfile[1])
self.start_stream(f, self.filt)
- except IOError, v:
+ except IOError as v:
raise DumpError(v.strerror)
if options.replacements:
@@ -140,7 +140,7 @@ class DumpMaster(flow.FlowMaster):
if options.rfile:
try:
self.load_flows_file(options.rfile)
- except flow.FlowReadError, v:
+ except flow.FlowReadError as v:
self.add_event("Flow file corrupted.", "error")
raise DumpError(v)
@@ -181,12 +181,18 @@ class DumpMaster(flow.FlowMaster):
if not utils.isBin(content):
try:
jsn = json.loads(content)
- print(self.indent(4, json.dumps(jsn, indent=2)), file=self.outfile)
+ print(
+ self.indent(
+ 4,
+ json.dumps(
+ jsn,
+ indent=2)),
+ file=self.outfile)
except ValueError:
print(self.indent(4, content), file=self.outfile)
else:
d = netlib.utils.hexdump(content)
- d = "\n".join("%s\t%s %s"%i for i in d)
+ d = "\n".join("%s\t%s %s" % i for i in d)
print(self.indent(4, d), file=self.outfile)
if self.o.flow_detail >= 2:
print("", file=self.outfile)
@@ -208,7 +214,12 @@ class DumpMaster(flow.FlowMaster):
sz = "(content missing)"
else:
sz = netlib.utils.pretty_size(len(f.response.content))
- print(" << %s %s" % (str_response(f.response), sz), file=self.outfile)
+ print(
+ " << %s %s" %
+ (str_response(
+ f.response),
+ sz),
+ file=self.outfile)
self._print_message(f.response)
if f.error:
diff --git a/libmproxy/encoding.py b/libmproxy/encoding.py
index 0fd908700..f107eb5f4 100644
--- a/libmproxy/encoding.py
+++ b/libmproxy/encoding.py
@@ -3,12 +3,14 @@
"""
from __future__ import absolute_import
import cStringIO
-import gzip, zlib
+import gzip
+import zlib
__ALL__ = ["ENCODINGS"]
ENCODINGS = set(["identity", "gzip", "deflate"])
+
def decode(e, content):
encoding_map = {
"identity": identity,
@@ -19,6 +21,7 @@ def decode(e, content):
return None
return encoding_map[e](content)
+
def encode(e, content):
encoding_map = {
"identity": identity,
@@ -29,6 +32,7 @@ def encode(e, content):
return None
return encoding_map[e](content)
+
def identity(content):
"""
Returns content unchanged. Identity is the default value of
@@ -36,6 +40,7 @@ def identity(content):
"""
return content
+
def decode_gzip(content):
gfile = gzip.GzipFile(fileobj=cStringIO.StringIO(content))
try:
@@ -43,6 +48,7 @@ def decode_gzip(content):
except (IOError, EOFError):
return None
+
def encode_gzip(content):
s = cStringIO.StringIO()
gf = gzip.GzipFile(fileobj=s, mode='wb')
@@ -50,6 +56,7 @@ def encode_gzip(content):
gf.close()
return s.getvalue()
+
def decode_deflate(content):
"""
Returns decompressed data for DEFLATE. Some servers may respond with
@@ -67,6 +74,7 @@ def decode_deflate(content):
except zlib.error:
return None
+
def encode_deflate(content):
"""
Returns compressed content, always including zlib header and checksum.
diff --git a/libmproxy/filt.py b/libmproxy/filt.py
index 40b2f6c95..3081eb943 100644
--- a/libmproxy/filt.py
+++ b/libmproxy/filt.py
@@ -32,16 +32,17 @@
rex Equivalent to ~u rex
"""
from __future__ import absolute_import
-import re, sys
+import re
+import sys
from .contrib import pyparsing as pp
from .protocol.http import decoded
class _Token:
def dump(self, indent=0, fp=sys.stdout):
- print >> fp, "\t"*indent, self.__class__.__name__,
+ print >> fp, "\t" * indent, self.__class__.__name__,
if hasattr(self, "expr"):
- print >> fp, "(%s)"%self.expr,
+ print >> fp, "(%s)" % self.expr,
print >> fp
@@ -54,6 +55,7 @@ class _Action(_Token):
class FErr(_Action):
code = "e"
help = "Match error"
+
def __call__(self, f):
return True if f.error else False
@@ -61,6 +63,7 @@ class FErr(_Action):
class FReq(_Action):
code = "q"
help = "Match request with no response"
+
def __call__(self, f):
if not f.response:
return True
@@ -69,6 +72,7 @@ class FReq(_Action):
class FResp(_Action):
code = "s"
help = "Match response"
+
def __call__(self, f):
return True if f.response else False
@@ -79,7 +83,7 @@ class _Rex(_Action):
try:
self.re = re.compile(self.expr)
except:
- raise ValueError, "Cannot compile expression."
+ raise ValueError("Cannot compile expression.")
def _check_content_type(expr, o):
@@ -100,6 +104,7 @@ class FAsset(_Action):
"image/.*",
"application/x-shockwave-flash"
]
+
def __call__(self, f):
if f.response:
for i in self.ASSET_TYPES:
@@ -111,6 +116,7 @@ class FAsset(_Action):
class FContentType(_Rex):
code = "t"
help = "Content-type header"
+
def __call__(self, f):
if _check_content_type(self.expr, f.request):
return True
@@ -122,6 +128,7 @@ class FContentType(_Rex):
class FRequestContentType(_Rex):
code = "tq"
help = "Request Content-Type header"
+
def __call__(self, f):
return _check_content_type(self.expr, f.request)
@@ -129,6 +136,7 @@ class FRequestContentType(_Rex):
class FResponseContentType(_Rex):
code = "ts"
help = "Response Content-Type header"
+
def __call__(self, f):
if f.response:
return _check_content_type(self.expr, f.response)
@@ -138,6 +146,7 @@ class FResponseContentType(_Rex):
class FHead(_Rex):
code = "h"
help = "Header"
+
def __call__(self, f):
if f.request.headers.match_re(self.expr):
return True
@@ -149,6 +158,7 @@ class FHead(_Rex):
class FHeadRequest(_Rex):
code = "hq"
help = "Request header"
+
def __call__(self, f):
if f.request.headers.match_re(self.expr):
return True
@@ -157,6 +167,7 @@ class FHeadRequest(_Rex):
class FHeadResponse(_Rex):
code = "hs"
help = "Response header"
+
def __call__(self, f):
if f.response and f.response.headers.match_re(self.expr):
return True
@@ -165,6 +176,7 @@ class FHeadResponse(_Rex):
class FBod(_Rex):
code = "b"
help = "Body"
+
def __call__(self, f):
if f.request and f.request.content:
with decoded(f.request):
@@ -180,6 +192,7 @@ class FBod(_Rex):
class FBodRequest(_Rex):
code = "bq"
help = "Request body"
+
def __call__(self, f):
if f.request and f.request.content:
with decoded(f.request):
@@ -190,6 +203,7 @@ class FBodRequest(_Rex):
class FBodResponse(_Rex):
code = "bs"
help = "Response body"
+
def __call__(self, f):
if f.response and f.response.content:
with decoded(f.response):
@@ -200,6 +214,7 @@ class FBodResponse(_Rex):
class FMethod(_Rex):
code = "m"
help = "Method"
+
def __call__(self, f):
return bool(re.search(self.expr, f.request.method, re.IGNORECASE))
@@ -207,6 +222,7 @@ class FMethod(_Rex):
class FDomain(_Rex):
code = "d"
help = "Domain"
+
def __call__(self, f):
return bool(re.search(self.expr, f.request.host, re.IGNORECASE))
@@ -215,6 +231,7 @@ class FUrl(_Rex):
code = "u"
help = "URL"
# FUrl is special, because it can be "naked".
+
@classmethod
def make(klass, s, loc, toks):
if len(toks) > 1:
@@ -233,6 +250,7 @@ class _Int(_Action):
class FCode(_Int):
code = "c"
help = "HTTP response code"
+
def __call__(self, f):
if f.response and f.response.code == self.num:
return True
@@ -243,9 +261,9 @@ class FAnd(_Token):
self.lst = lst
def dump(self, indent=0, fp=sys.stdout):
- print >> fp, "\t"*indent, self.__class__.__name__
+ print >> fp, "\t" * indent, self.__class__.__name__
for i in self.lst:
- i.dump(indent+1, fp)
+ i.dump(indent + 1, fp)
def __call__(self, f):
return all(i(f) for i in self.lst)
@@ -256,9 +274,9 @@ class FOr(_Token):
self.lst = lst
def dump(self, indent=0, fp=sys.stdout):
- print >> fp, "\t"*indent, self.__class__.__name__
+ print >> fp, "\t" * indent, self.__class__.__name__
for i in self.lst:
- i.dump(indent+1, fp)
+ i.dump(indent + 1, fp)
def __call__(self, f):
return any(i(f) for i in self.lst)
@@ -269,7 +287,7 @@ class FNot(_Token):
self.itm = itm[0]
def dump(self, indent=0, fp=sys.stdout):
- print >> fp, "\t"*indent, self.__class__.__name__
+ print >> fp, "\t" * indent, self.__class__.__name__
self.itm.dump(indent + 1, fp)
def __call__(self, f):
@@ -299,26 +317,28 @@ filt_rex = [
filt_int = [
FCode
]
+
+
def _make():
# Order is important - multi-char expressions need to come before narrow
# ones.
parts = []
for klass in filt_unary:
- f = pp.Literal("~%s"%klass.code)
+ f = pp.Literal("~%s" % klass.code)
f.setParseAction(klass.make)
parts.append(f)
- simplerex = "".join(c for c in pp.printables if c not in "()~'\"")
+ simplerex = "".join(c for c in pp.printables if c not in "()~'\"")
rex = pp.Word(simplerex) |\
- pp.QuotedString("\"", escChar='\\') |\
- pp.QuotedString("'", escChar='\\')
+ pp.QuotedString("\"", escChar='\\') |\
+ pp.QuotedString("'", escChar='\\')
for klass in filt_rex:
- f = pp.Literal("~%s"%klass.code) + rex.copy()
+ f = pp.Literal("~%s" % klass.code) + rex.copy()
f.setParseAction(klass.make)
parts.append(f)
for klass in filt_int:
- f = pp.Literal("~%s"%klass.code) + pp.Word(pp.nums)
+ f = pp.Literal("~%s" % klass.code) + pp.Word(pp.nums)
f.setParseAction(klass.make)
parts.append(f)
@@ -328,14 +348,20 @@ def _make():
parts.append(f)
atom = pp.MatchFirst(parts)
- expr = pp.operatorPrecedence(
- atom,
- [
- (pp.Literal("!").suppress(), 1, pp.opAssoc.RIGHT, lambda x: FNot(*x)),
- (pp.Literal("&").suppress(), 2, pp.opAssoc.LEFT, lambda x: FAnd(*x)),
- (pp.Literal("|").suppress(), 2, pp.opAssoc.LEFT, lambda x: FOr(*x)),
- ]
- )
+ expr = pp.operatorPrecedence(atom,
+ [(pp.Literal("!").suppress(),
+ 1,
+ pp.opAssoc.RIGHT,
+ lambda x: FNot(*x)),
+ (pp.Literal("&").suppress(),
+ 2,
+ pp.opAssoc.LEFT,
+ lambda x: FAnd(*x)),
+ (pp.Literal("|").suppress(),
+ 2,
+ pp.opAssoc.LEFT,
+ lambda x: FOr(*x)),
+ ])
expr = pp.OneOrMore(expr)
return expr.setParseAction(lambda x: FAnd(x) if len(x) != 1 else x)
bnf = _make()
@@ -355,15 +381,15 @@ def parse(s):
help = []
for i in filt_unary:
help.append(
- ("~%s"%i.code, i.help)
+ ("~%s" % i.code, i.help)
)
for i in filt_rex:
help.append(
- ("~%s regex"%i.code, i.help)
+ ("~%s regex" % i.code, i.help)
)
for i in filt_int:
help.append(
- ("~%s int"%i.code, i.help)
+ ("~%s int" % i.code, i.help)
)
help.sort()
help.extend(
@@ -373,4 +399,4 @@ help.extend(
("|", "or"),
("(...)", "grouping"),
]
-)
\ No newline at end of file
+)
diff --git a/libmproxy/flow.py b/libmproxy/flow.py
index 4893b725f..6154e3d71 100644
--- a/libmproxy/flow.py
+++ b/libmproxy/flow.py
@@ -162,7 +162,8 @@ class StreamLargeBodies(object):
r.headers, is_request, flow.request.method, code
)
if not (0 <= expected_size <= self.max_size):
- r.stream = r.stream or True # r.stream may already be a callable, which we want to preserve.
+ # r.stream may already be a callable, which we want to preserve.
+ r.stream = r.stream or True
class ClientPlaybackState:
@@ -200,8 +201,16 @@ class ClientPlaybackState:
class ServerPlaybackState:
- def __init__(self, headers, flows, exit, nopop, ignore_params, ignore_content,
- ignore_payload_params, ignore_host):
+ def __init__(
+ self,
+ headers,
+ flows,
+ exit,
+ nopop,
+ ignore_params,
+ ignore_content,
+ ignore_payload_params,
+ ignore_host):
"""
headers: Case-insensitive list of request headers that should be
included in request-response matching.
@@ -532,7 +541,8 @@ class State(object):
def flow_count(self):
return len(self.flows)
- # TODO: All functions regarding flows that don't cause side-effects should be moved into FlowStore.
+ # TODO: All functions regarding flows that don't cause side-effects should
+ # be moved into FlowStore.
def index(self, f):
return self.flows.index(f)
@@ -660,7 +670,7 @@ class FlowMaster(controller.Master):
"""
try:
s = script.Script(command, self)
- except script.ScriptError, v:
+ except script.ScriptError as v:
return v.args[0]
self.scripts.append(s)
@@ -724,8 +734,17 @@ class FlowMaster(controller.Master):
def stop_client_playback(self):
self.client_playback = None
- def start_server_playback(self, flows, kill, headers, exit, nopop, ignore_params,
- ignore_content, ignore_payload_params, ignore_host):
+ def start_server_playback(
+ self,
+ flows,
+ kill,
+ headers,
+ exit,
+ nopop,
+ ignore_params,
+ ignore_content,
+ ignore_payload_params,
+ ignore_host):
"""
flows: List of flows.
kill: Boolean, should we kill requests not part of the replay?
@@ -734,9 +753,15 @@ class FlowMaster(controller.Master):
ignore_payload_params: list of content params to ignore in server replay
ignore_host: true if request host should be ignored in server replay
"""
- self.server_playback = ServerPlaybackState(headers, flows, exit, nopop,
- ignore_params, ignore_content,
- ignore_payload_params, ignore_host)
+ self.server_playback = ServerPlaybackState(
+ headers,
+ flows,
+ exit,
+ nopop,
+ ignore_params,
+ ignore_content,
+ ignore_payload_params,
+ ignore_host)
self.kill_nonreplay = kill
def stop_server_playback(self):
@@ -786,23 +811,36 @@ class FlowMaster(controller.Master):
this method creates a new artificial and minimalist request also adds it to flowlist
"""
c = ClientConnection.from_state(dict(
- address=dict(address=(host, port), use_ipv6=False),
- clientcert=None
- ))
+ address=dict(address=(host, port), use_ipv6=False),
+ clientcert=None
+ ))
s = ServerConnection.from_state(dict(
- address=dict(address=(host, port), use_ipv6=False),
- state=[],
- source_address=None, #source_address=dict(address=(host, port), use_ipv6=False),
- cert=None,
- sni=host,
- ssl_established=True
- ))
- f = http.HTTPFlow(c,s);
+ address=dict(address=(host, port), use_ipv6=False),
+ state=[],
+ source_address=None,
+ # source_address=dict(address=(host, port), use_ipv6=False),
+ cert=None,
+ sni=host,
+ ssl_established=True
+ ))
+ f = http.HTTPFlow(c, s)
headers = ODictCaseless()
- req = http.HTTPRequest("absolute", method, scheme, host, port, path, (1, 1), headers, None,
- None, None, None)
+ req = http.HTTPRequest(
+ "absolute",
+ method,
+ scheme,
+ host,
+ port,
+ path,
+ (1,
+ 1),
+ headers,
+ None,
+ None,
+ None,
+ None)
f.request = req
return self.load_flow(f)
@@ -813,7 +851,8 @@ class FlowMaster(controller.Master):
if self.server and self.server.config.mode == "reverse":
f.request.host, f.request.port = self.server.config.mode.dst[2:]
- f.request.scheme = "https" if self.server.config.mode.dst[1] else "http"
+ f.request.scheme = "https" if self.server.config.mode.dst[
+ 1] else "http"
f.reply = controller.DummyReply()
if f.request:
@@ -840,7 +879,7 @@ class FlowMaster(controller.Master):
try:
f = file(path, "rb")
freader = FlowReader(f)
- except IOError, v:
+ except IOError as v:
raise FlowReadError(v.strerror)
return self.load_flows(freader)
@@ -881,7 +920,8 @@ class FlowMaster(controller.Master):
f.backup()
f.request.is_replay = True
if f.request.content:
- f.request.headers["Content-Length"] = [str(len(f.request.content))]
+ f.request.headers[
+ "Content-Length"] = [str(len(f.request.content))]
f.response = None
f.error = None
self.process_new_request(f)
@@ -1032,7 +1072,7 @@ class FlowReader:
"""
off = 0
try:
- while 1:
+ while True:
data = tnetstring.load(self.fo)
if tuple(data["version"][:2]) != version.IVERSION[:2]:
v = ".".join(str(i) for i in data["version"])
@@ -1041,7 +1081,7 @@ class FlowReader:
)
off = self.fo.tell()
yield handle.protocols[data["type"]]["flow"].from_state(data)
- except ValueError, v:
+ except ValueError as v:
# Error is due to EOF
if self.fo.tell() == off and self.fo.read() == '':
return
diff --git a/libmproxy/main.py b/libmproxy/main.py
index 32ab738e3..73e6c62b0 100644
--- a/libmproxy/main.py
+++ b/libmproxy/main.py
@@ -70,7 +70,7 @@ def get_server(dummy_server, options):
else:
try:
return ProxyServer(options)
- except ProxyServerError, v:
+ except ProxyServerError as v:
print(str(v), file=sys.stderr)
sys.exit(1)
diff --git a/libmproxy/onboarding/app.py b/libmproxy/onboarding/app.py
index 37f05e962..6edd74b1d 100644
--- a/libmproxy/onboarding/app.py
+++ b/libmproxy/onboarding/app.py
@@ -45,7 +45,10 @@ class PEM(tornado.web.RequestHandler):
def get(self):
p = os.path.join(self.request.master.server.config.cadir, self.filename)
self.set_header("Content-Type", "application/x-x509-ca-cert")
- self.set_header("Content-Disposition", "inline; filename={}".format(self.filename))
+ self.set_header(
+ "Content-Disposition",
+ "inline; filename={}".format(
+ self.filename))
with open(p, "rb") as f:
self.write(f.read())
@@ -59,7 +62,10 @@ class P12(tornado.web.RequestHandler):
def get(self):
p = os.path.join(self.request.master.server.config.cadir, self.filename)
self.set_header("Content-Type", "application/x-pkcs12")
- self.set_header("Content-Disposition", "inline; filename={}".format(self.filename))
+ self.set_header(
+ "Content-Disposition",
+ "inline; filename={}".format(
+ self.filename))
with open(p, "rb") as f:
self.write(f.read())
@@ -78,7 +84,6 @@ application = tornado.web.Application(
}
),
],
- #debug=True
+ # debug=True
)
mapp = Adapter(application)
-
diff --git a/libmproxy/platform/linux.py b/libmproxy/platform/linux.py
index d5cfec90f..e60a99504 100644
--- a/libmproxy/platform/linux.py
+++ b/libmproxy/platform/linux.py
@@ -1,4 +1,5 @@
-import socket, struct
+import socket
+import struct
# Python socket module does not have this constant
SO_ORIGINAL_DST = 80
diff --git a/libmproxy/platform/osx.py b/libmproxy/platform/osx.py
index 810e5e5ff..c59228500 100644
--- a/libmproxy/platform/osx.py
+++ b/libmproxy/platform/osx.py
@@ -21,6 +21,7 @@ class Resolver(object):
peer = csock.getpeername()
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
if "sudo: a password is required" in stxt:
- raise RuntimeError("Insufficient privileges to access pfctl. "
- "See http://mitmproxy.org/doc/transparent/osx.html for details.")
+ raise RuntimeError(
+ "Insufficient privileges to access pfctl. "
+ "See http://mitmproxy.org/doc/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
diff --git a/libmproxy/platform/windows.py b/libmproxy/platform/windows.py
index 066a377d9..5133d0569 100644
--- a/libmproxy/platform/windows.py
+++ b/libmproxy/platform/windows.py
@@ -197,9 +197,12 @@ class TransparentProxy(object):
self.driver = WinDivert()
self.driver.register()
- self.request_filter = custom_filter or " or ".join(("tcp.DstPort == %d" % p) for p in redirect_ports)
+ self.request_filter = custom_filter or " or ".join(
+ ("tcp.DstPort == %d" %
+ p) for p in redirect_ports)
self.request_forward_handle = None
- self.request_forward_thread = threading.Thread(target=self.request_forward)
+ self.request_forward_thread = threading.Thread(
+ target=self.request_forward)
self.request_forward_thread.daemon = True
self.addr_pid_map = dict()
@@ -235,17 +238,25 @@ class TransparentProxy(object):
# Block all ICMP requests (which are sent on Windows by default).
# In layman's terms: If we don't do this, our proxy machine tells the client that it can directly connect to the
# real gateway if they are on the same network.
- self.icmp_handle = self.driver.open_handle(filter="icmp", layer=Layer.NETWORK, flags=Flag.DROP)
+ self.icmp_handle = self.driver.open_handle(
+ filter="icmp",
+ layer=Layer.NETWORK,
+ flags=Flag.DROP)
- self.response_handle = self.driver.open_handle(filter=self.response_filter, layer=Layer.NETWORK)
+ self.response_handle = self.driver.open_handle(
+ filter=self.response_filter,
+ layer=Layer.NETWORK)
self.response_thread.start()
if self.mode == "forward" or self.mode == "both":
- self.request_forward_handle = self.driver.open_handle(filter=self.request_filter,
- layer=Layer.NETWORK_FORWARD)
+ self.request_forward_handle = self.driver.open_handle(
+ filter=self.request_filter,
+ layer=Layer.NETWORK_FORWARD)
self.request_forward_thread.start()
if self.mode == "local" or self.mode == "both":
- self.request_local_handle = self.driver.open_handle(filter=self.request_filter, layer=Layer.NETWORK)
+ self.request_local_handle = self.driver.open_handle(
+ filter=self.request_filter,
+ layer=Layer.NETWORK)
self.request_local_thread.start()
def shutdown(self):
@@ -266,14 +277,17 @@ class TransparentProxy(object):
try:
raw_packet, metadata = handle.recv()
return self.driver.parse_packet(raw_packet), metadata
- except WindowsError, e:
+ except WindowsError as e:
if e.winerror == 995:
return None, None
else:
raise
def fetch_pids(self):
- ret = windll.iphlpapi.GetTcpTable2(byref(self.tcptable2), byref(self.tcptable2_size), 0)
+ ret = windll.iphlpapi.GetTcpTable2(
+ byref(
+ self.tcptable2), byref(
+ self.tcptable2_size), 0)
if ret == ERROR_INSUFFICIENT_BUFFER:
self.tcptable2 = MIB_TCPTABLE2(self.tcptable2_size.value)
self.fetch_pids()
@@ -299,7 +313,8 @@ class TransparentProxy(object):
self.fetch_pids()
# If this fails, we most likely have a connection from an external client to
- # a local server on 80/443. In this, case we always want to proxy the request.
+ # a local server on 80/443. In this, case we always want to proxy
+ # the request.
pid = self.addr_pid_map.get(client, None)
if pid not in self.trusted_pids:
@@ -325,7 +340,8 @@ class TransparentProxy(object):
server = (packet.dst_addr, packet.dst_port)
if client in self.client_server_map:
- del self.client_server_map[client] # Force re-add to mark as "newest" entry in the dict.
+ # Force re-add to mark as "newest" entry in the dict.
+ del self.client_server_map[client]
while len(self.client_server_map) > self.connection_cache_size:
self.client_server_map.popitem(False)
@@ -335,7 +351,8 @@ class TransparentProxy(object):
metadata.direction = Direction.INBOUND
packet = self.driver.update_packet_checksums(packet)
- # Use any handle thats on the NETWORK layer - request_local may be unavailable.
+ # Use any handle thats on the NETWORK layer - request_local may be
+ # unavailable.
self.response_handle.send((packet.raw, metadata))
def response(self):
@@ -361,15 +378,32 @@ class TransparentProxy(object):
if __name__ == "__main__":
- parser = configargparse.ArgumentParser(description="Windows Transparent Proxy")
- parser.add_argument('--mode', choices=['forward', 'local', 'both'], default="both",
- help='redirection operation mode: "forward" to only redirect forwarded packets, '
- '"local" to only redirect packets originating from the local machine')
+ parser = configargparse.ArgumentParser(
+ description="Windows Transparent Proxy")
+ parser.add_argument(
+ '--mode',
+ choices=[
+ 'forward',
+ 'local',
+ 'both'],
+ default="both",
+ help='redirection operation mode: "forward" to only redirect forwarded packets, '
+ '"local" to only redirect packets originating from the local machine')
group = parser.add_mutually_exclusive_group()
- group.add_argument("--redirect-ports", nargs="+", type=int, default=[80, 443], metavar="80",
- help="ports that should be forwarded to the proxy")
- group.add_argument("--custom-filter", default=None, metavar="WINDIVERT_FILTER",
- help="Custom WinDivert interception rule.")
+ group.add_argument(
+ "--redirect-ports",
+ nargs="+",
+ type=int,
+ default=[
+ 80,
+ 443],
+ metavar="80",
+ help="ports that should be forwarded to the proxy")
+ group.add_argument(
+ "--custom-filter",
+ default=None,
+ metavar="WINDIVERT_FILTER",
+ help="Custom WinDivert interception rule.")
parser.add_argument("--proxy-addr", default=False,
help="Proxy Server Address")
parser.add_argument("--proxy-port", type=int, default=8080,
@@ -391,4 +425,4 @@ if __name__ == "__main__":
except KeyboardInterrupt:
print(" * Shutting down...")
proxy.shutdown()
- print(" * Shut down.")
\ No newline at end of file
+ print(" * Shut down.")
diff --git a/libmproxy/protocol/__init__.py b/libmproxy/protocol/__init__.py
index f5d6a2d06..bbc20dbab 100644
--- a/libmproxy/protocol/__init__.py
+++ b/libmproxy/protocol/__init__.py
@@ -1 +1 @@
-from .primitives import *
\ No newline at end of file
+from .primitives import *
diff --git a/libmproxy/protocol/handle.py b/libmproxy/protocol/handle.py
index 100c73686..49cb3c1b0 100644
--- a/libmproxy/protocol/handle.py
+++ b/libmproxy/protocol/handle.py
@@ -6,6 +6,7 @@ protocols = {
'tcp': dict(handler=tcp.TCPHandler)
}
+
def protocol_handler(protocol):
"""
@type protocol: str
@@ -14,4 +15,6 @@ def protocol_handler(protocol):
if protocol in protocols:
return protocols[protocol]["handler"]
- raise NotImplementedError("Unknown Protocol: %s" % protocol) # pragma: nocover
\ No newline at end of file
+ raise NotImplementedError(
+ "Unknown Protocol: %s" %
+ protocol) # pragma: nocover
diff --git a/libmproxy/protocol/http.py b/libmproxy/protocol/http.py
index 324a188f3..91e745679 100644
--- a/libmproxy/protocol/http.py
+++ b/libmproxy/protocol/http.py
@@ -305,7 +305,18 @@ class HTTPRequest(HTTPMessage):
@classmethod
def from_state(cls, state):
- f = cls(None, None, None, None, None, None, None, None, None, None, None)
+ f = cls(
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None,
+ None)
f.load_state(state)
return f
@@ -315,7 +326,12 @@ class HTTPRequest(HTTPMessage):
)
@classmethod
- def from_stream(cls, rfile, include_body=True, body_size_limit=None, wfile=None):
+ def from_stream(
+ cls,
+ rfile,
+ include_body=True,
+ body_size_limit=None,
+ wfile=None):
"""
Parse an HTTP request from a file stream
@@ -403,7 +419,8 @@ class HTTPRequest(HTTPMessage):
self.host,
self.port)]
- # If content is defined (i.e. not None or CONTENT_MISSING), we always add a content-length header.
+ # If content is defined (i.e. not None or CONTENT_MISSING), we always
+ # add a content-length header.
if self.content or self.content == "":
headers["Content-Length"] = [str(len(self.content))]
@@ -460,9 +477,9 @@ class HTTPRequest(HTTPMessage):
decode appropriately.
"""
if self.headers["accept-encoding"]:
- self.headers["accept-encoding"] = [', '.join(
- e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0]
- )]
+ self.headers["accept-encoding"] = [
+ ', '.join(
+ e for e in encoding.ENCODINGS if e in self.headers["accept-encoding"][0])]
def update_host_header(self):
"""
@@ -489,13 +506,22 @@ class HTTPRequest(HTTPMessage):
Returns an empty ODict if there is no data or the content-type
indicates non-form data.
"""
- if self.content and self.headers.in_any("content-type", HDR_FORM_URLENCODED, True):
- return odict.ODict(utils.urldecode(self.content))
+ if self.content and self.headers.in_any(
+ "content-type",
+ HDR_FORM_URLENCODED,
+ True):
+ return odict.ODict(utils.urldecode(self.content))
return odict.ODict([])
def get_form_multipart(self):
- if self.content and self.headers.in_any("content-type", HDR_FORM_MULTIPART, True):
- return odict.ODict(utils.multipartdecode(self.headers, self.content))
+ if self.content and self.headers.in_any(
+ "content-type",
+ HDR_FORM_MULTIPART,
+ True):
+ return odict.ODict(
+ utils.multipartdecode(
+ self.headers,
+ self.content))
return odict.ODict([])
def set_form_urlencoded(self, odict):
@@ -664,8 +690,15 @@ class HTTPResponse(HTTPMessage):
timestamp_end: Timestamp indicating when request transmission ended
"""
- def __init__(self, httpversion, code, msg, headers, content, timestamp_start=None,
- timestamp_end=None):
+ def __init__(
+ self,
+ httpversion,
+ code,
+ msg,
+ headers,
+ content,
+ timestamp_start=None,
+ timestamp_end=None):
assert isinstance(headers, odict.ODictCaseless) or headers is None
HTTPMessage.__init__(
self,
@@ -710,7 +743,12 @@ class HTTPResponse(HTTPMessage):
)
@classmethod
- def from_stream(cls, rfile, request_method, include_body=True, body_size_limit=None):
+ def from_stream(
+ cls,
+ rfile,
+ request_method,
+ include_body=True,
+ body_size_limit=None):
"""
Parse an HTTP response from a file stream
"""
@@ -760,7 +798,8 @@ class HTTPResponse(HTTPMessage):
if not preserve_transfer_encoding:
del headers['Transfer-Encoding']
- # If content is defined (i.e. not None or CONTENT_MISSING), we always add a content-length header.
+ # If content is defined (i.e. not None or CONTENT_MISSING), we always
+ # add a content-length header.
if self.content or self.content == "":
headers["Content-Length"] = [str(len(self.content))]
@@ -1008,7 +1047,7 @@ class HTTPHandler(ProtocolHandler):
include_body=False
)
break
- except (tcp.NetLibError, http.HttpErrorConnClosed), v:
+ except (tcp.NetLibError, http.HttpErrorConnClosed) as v:
self.c.log(
"error in server communication: %s" % repr(v),
level="debug"
@@ -1079,7 +1118,8 @@ class HTTPHandler(ProtocolHandler):
if request_reply is None or request_reply == KILL:
raise KillSignal()
- self.process_server_address(flow) # The inline script may have changed request.host
+ # The inline script may have changed request.host
+ self.process_server_address(flow)
if isinstance(request_reply, HTTPResponse):
flow.response = request_reply
@@ -1090,7 +1130,9 @@ class HTTPHandler(ProtocolHandler):
# we can safely set it as the final attribute value here.
flow.server_conn = self.c.server_conn
- self.c.log("response", "debug", [flow.response._assemble_first_line()])
+ self.c.log(
+ "response", "debug", [
+ flow.response._assemble_first_line()])
response_reply = self.c.channel.ask("response", flow)
if response_reply is None or response_reply == KILL:
raise KillSignal()
@@ -1117,7 +1159,8 @@ class HTTPHandler(ProtocolHandler):
}
)
)
- if not self.process_connect_request((flow.request.host, flow.request.port)):
+ if not self.process_connect_request(
+ (flow.request.host, flow.request.port)):
return False
# If the user has changed the target server on this connection,
@@ -1130,7 +1173,7 @@ class HTTPHandler(ProtocolHandler):
http.HttpError,
proxy.ProxyError,
tcp.NetLibError,
- ), e:
+ ) as e:
self.handle_error(e, flow)
except KillSignal:
self.c.log("Connection killed", "info")
@@ -1226,7 +1269,8 @@ class HTTPHandler(ProtocolHandler):
# Determine .scheme, .host and .port attributes
# For absolute-form requests, they are directly given in the request.
# For authority-form requests, we only need to determine the request scheme.
- # For relative-form requests, we need to determine host and port as well.
+ # For relative-form requests, we need to determine host and port as
+ # well.
if not request.scheme:
request.scheme = "https" if flow.server_conn and flow.server_conn.ssl_established else "http"
if not request.host:
@@ -1253,8 +1297,8 @@ class HTTPHandler(ProtocolHandler):
flow.server_conn = self.c.server_conn
self.c.establish_server_connection()
self.c.client_conn.send(
- ('HTTP/%s.%s 200 ' % (request.httpversion[0],request.httpversion[1])) +
- 'Connection established\r\n' +
+ ('HTTP/%s.%s 200 ' % (request.httpversion[0], request.httpversion[1])) +
+ 'Connection established\r\n' +
'Content-Length: 0\r\n' +
('Proxy-agent: %s\r\n' % self.c.config.server_version) +
'\r\n'
@@ -1372,10 +1416,15 @@ class HTTPHandler(ProtocolHandler):
semantics. Returns True, if so.
"""
close_connection = (
- http.connection_close(flow.request.httpversion, flow.request.headers) or
- http.connection_close(flow.response.httpversion, flow.response.headers) or
- http.expected_http_body_size(flow.response.headers, False, flow.request.method,
- flow.response.code) == -1)
+ http.connection_close(
+ flow.request.httpversion,
+ flow.request.headers) or http.connection_close(
+ flow.response.httpversion,
+ flow.response.headers) or http.expected_http_body_size(
+ flow.response.headers,
+ False,
+ flow.request.method,
+ flow.response.code) == -1)
if close_connection:
if flow.request.form_in == "authority" and flow.response.code == 200:
# Workaround for
diff --git a/libmproxy/protocol/primitives.py b/libmproxy/protocol/primitives.py
index f9c22e1ab..2f8ea3e05 100644
--- a/libmproxy/protocol/primitives.py
+++ b/libmproxy/protocol/primitives.py
@@ -24,6 +24,7 @@ class Error(stateobject.StateObject):
msg: Message describing the error
timestamp: Seconds since the epoch
"""
+
def __init__(self, msg, timestamp=None):
"""
@type msg: str
@@ -59,6 +60,7 @@ class Flow(stateobject.StateObject):
A Flow is a collection of objects representing a single transaction.
This class is usually subclassed for each protocol, e.g. HTTPFlow.
"""
+
def __init__(self, type, client_conn, server_conn, live=None):
self.type = type
self.id = str(uuid.uuid4())
@@ -165,12 +167,12 @@ class Flow(stateobject.StateObject):
master.handle_accept_intercept(self)
-
class ProtocolHandler(object):
"""
A ProtocolHandler implements an application-layer protocol, e.g. HTTP.
See: libmproxy.protocol.http.HTTPHandler
"""
+
def __init__(self, c):
self.c = c
"""@type: libmproxy.proxy.server.ConnectionHandler"""
@@ -209,13 +211,20 @@ class LiveConnection(object):
interface with a live connection, without exposing the internals
of the ConnectionHandler.
"""
+
def __init__(self, c):
self.c = c
"""@type: libmproxy.proxy.server.ConnectionHandler"""
self._backup_server_conn = None
"""@type: libmproxy.proxy.connection.ServerConnection"""
- def change_server(self, address, ssl=None, sni=None, force=False, persistent_change=False):
+ def change_server(
+ self,
+ address,
+ ssl=None,
+ sni=None,
+ force=False,
+ persistent_change=False):
"""
Change the server connection to the specified address.
@returns:
diff --git a/libmproxy/protocol/tcp.py b/libmproxy/protocol/tcp.py
index 5314b5779..0feb77c6b 100644
--- a/libmproxy/protocol/tcp.py
+++ b/libmproxy/protocol/tcp.py
@@ -79,7 +79,8 @@ class TCPHandler(ProtocolHandler):
),
"info"
)
- # Do not use dst.connection.send here, which may raise OpenSSL-specific errors.
+ # Do not use dst.connection.send here, which may raise
+ # OpenSSL-specific errors.
dst.send(contents)
else:
# socket.socket.send supports raw bytearrays/memoryviews
diff --git a/libmproxy/proxy/config.py b/libmproxy/proxy/config.py
index dfde2958d..3f5796694 100644
--- a/libmproxy/proxy/config.py
+++ b/libmproxy/proxy/config.py
@@ -81,16 +81,27 @@ class ProxyConfig:
self.check_tcp = HostMatcher(tcp_hosts)
self.authenticator = authenticator
self.cadir = os.path.expanduser(cadir)
- self.certstore = certutils.CertStore.from_store(self.cadir, CONF_BASENAME)
+ self.certstore = certutils.CertStore.from_store(
+ self.cadir,
+ CONF_BASENAME)
for spec, cert in certs:
self.certstore.add_cert_file(spec, cert)
self.certforward = certforward
- self.openssl_method_client, self.openssl_options_client = version_to_openssl(ssl_version_client)
- self.openssl_method_server, self.openssl_options_server = version_to_openssl(ssl_version_server)
+ self.openssl_method_client, self.openssl_options_client = version_to_openssl(
+ ssl_version_client)
+ self.openssl_method_server, self.openssl_options_server = version_to_openssl(
+ ssl_version_server)
self.ssl_ports = ssl_ports
-sslversion_choices = ("all", "secure", "SSLv2", "SSLv3", "TLSv1", "TLSv1_1", "TLSv1_2")
+sslversion_choices = (
+ "all",
+ "secure",
+ "SSLv2",
+ "SSLv3",
+ "TLSv1",
+ "TLSv1_1",
+ "TLSv1_2")
def version_to_openssl(version):
@@ -119,7 +130,8 @@ def process_proxy_options(parser, options):
if options.transparent_proxy:
c += 1
if not platform.resolver:
- return parser.error("Transparent mode not supported on this platform.")
+ return parser.error(
+ "Transparent mode not supported on this platform.")
mode = "transparent"
if options.socks_proxy:
c += 1
@@ -133,28 +145,33 @@ def process_proxy_options(parser, options):
mode = "upstream"
upstream_server = options.upstream_proxy
if c > 1:
- return parser.error("Transparent, SOCKS5, reverse and upstream proxy mode "
- "are mutually exclusive.")
+ return parser.error(
+ "Transparent, SOCKS5, reverse and upstream proxy mode "
+ "are mutually exclusive.")
if options.clientcerts:
options.clientcerts = os.path.expanduser(options.clientcerts)
- if not os.path.exists(options.clientcerts) or not os.path.isdir(options.clientcerts):
+ if not os.path.exists(
+ options.clientcerts) or not os.path.isdir(
+ options.clientcerts):
return parser.error(
- "Client certificate directory does not exist or is not a directory: %s" % options.clientcerts
- )
+ "Client certificate directory does not exist or is not a directory: %s" %
+ options.clientcerts)
if (options.auth_nonanonymous or options.auth_singleuser or options.auth_htpasswd):
if options.auth_singleuser:
if len(options.auth_singleuser.split(':')) != 2:
- return parser.error("Invalid single-user specification. Please use the format username:password")
+ return parser.error(
+ "Invalid single-user specification. Please use the format username:password")
username, password = options.auth_singleuser.split(':')
password_manager = http_auth.PassManSingleUser(username, password)
elif options.auth_nonanonymous:
password_manager = http_auth.PassManNonAnon()
elif options.auth_htpasswd:
try:
- password_manager = http_auth.PassManHtpasswd(options.auth_htpasswd)
- except ValueError, v:
+ password_manager = http_auth.PassManHtpasswd(
+ options.auth_htpasswd)
+ except ValueError as v:
return parser.error(v.message)
authenticator = http_auth.BasicProxyAuth(password_manager, "mitmproxy")
else:
@@ -203,15 +220,18 @@ def process_proxy_options(parser, options):
def ssl_option_group(parser):
group = parser.add_argument_group("SSL")
group.add_argument(
- "--cert", dest='certs', default=[], type=str,
- metavar="SPEC", action="append",
+ "--cert",
+ dest='certs',
+ default=[],
+ type=str,
+ metavar="SPEC",
+ action="append",
help='Add an SSL certificate. SPEC is of the form "[domain=]path". '
- 'The domain may include a wildcard, and is equal to "*" if not specified. '
- 'The file at path is a certificate in PEM format. If a private key is included in the PEM, '
- 'it is used, else the default key in the conf dir is used. '
- 'The PEM file should contain the full certificate chain, with the leaf certificate as the first entry. '
- 'Can be passed multiple times.'
- )
+ 'The domain may include a wildcard, and is equal to "*" if not specified. '
+ 'The file at path is a certificate in PEM format. If a private key is included in the PEM, '
+ 'it is used, else the default key in the conf dir is used. '
+ 'The PEM file should contain the full certificate chain, with the leaf certificate as the first entry. '
+ 'Can be passed multiple times.')
group.add_argument(
"--cert-forward", action="store_true",
dest="certforward", default=False,
@@ -238,11 +258,15 @@ def ssl_option_group(parser):
help="Don't connect to upstream server to look up certificate details."
)
group.add_argument(
- "--ssl-port", action="append", type=int, dest="ssl_ports", default=list(TRANSPARENT_SSL_PORTS),
+ "--ssl-port",
+ action="append",
+ type=int,
+ dest="ssl_ports",
+ default=list(TRANSPARENT_SSL_PORTS),
metavar="PORT",
help="Can be passed multiple times. Specify destination ports which are assumed to be SSL. "
- "Defaults to %s." % str(TRANSPARENT_SSL_PORTS)
- )
+ "Defaults to %s." %
+ str(TRANSPARENT_SSL_PORTS))
group.add_argument(
"--ssl-version-client", dest="ssl_version_client",
default="secure", action="store",
diff --git a/libmproxy/proxy/connection.py b/libmproxy/proxy/connection.py
index 1eeae16fd..5219023b0 100644
--- a/libmproxy/proxy/connection.py
+++ b/libmproxy/proxy/connection.py
@@ -7,7 +7,9 @@ from .. import stateobject, utils
class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
def __init__(self, client_connection, address, server):
- if client_connection: # Eventually, this object is restored from state. We don't have a connection then.
+ # Eventually, this object is restored from state. We don't have a
+ # connection then.
+ if client_connection:
tcp.BaseHandler.__init__(self, client_connection, address, server)
else:
self.connection = None
@@ -39,15 +41,18 @@ class ClientConnection(tcp.BaseHandler, stateobject.StateObject):
def get_state(self, short=False):
d = super(ClientConnection, self).get_state(short)
d.update(
- address={"address": self.address(), "use_ipv6": self.address.use_ipv6},
- clientcert=self.cert.to_pem() if self.clientcert else None
- )
+ address={
+ "address": self.address(),
+ "use_ipv6": self.address.use_ipv6},
+ clientcert=self.cert.to_pem() if self.clientcert else None)
return d
def load_state(self, state):
super(ClientConnection, self).load_state(state)
- self.address = tcp.Address(**state["address"]) if state["address"] else None
- self.clientcert = certutils.SSLCert.from_pem(state["clientcert"]) if state["clientcert"] else None
+ self.address = tcp.Address(
+ **state["address"]) if state["address"] else None
+ self.clientcert = certutils.SSLCert.from_pem(
+ state["clientcert"]) if state["clientcert"] else None
def copy(self):
return copy.copy(self)
@@ -114,7 +119,7 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
address={"address": self.address(),
"use_ipv6": self.address.use_ipv6},
source_address= ({"address": self.source_address(),
- "use_ipv6": self.source_address.use_ipv6} if self.source_address else None),
+ "use_ipv6": self.source_address.use_ipv6} if self.source_address else None),
cert=self.cert.to_pem() if self.cert else None
)
return d
@@ -122,9 +127,12 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
def load_state(self, state):
super(ServerConnection, self).load_state(state)
- self.address = tcp.Address(**state["address"]) if state["address"] else None
- self.source_address = tcp.Address(**state["source_address"]) if state["source_address"] else None
- self.cert = certutils.SSLCert.from_pem(state["cert"]) if state["cert"] else None
+ self.address = tcp.Address(
+ **state["address"]) if state["address"] else None
+ self.source_address = tcp.Address(
+ **state["source_address"]) if state["source_address"] else None
+ self.cert = certutils.SSLCert.from_pem(
+ state["cert"]) if state["cert"] else None
@classmethod
def from_state(cls, state):
@@ -147,7 +155,9 @@ class ServerConnection(tcp.TCPClient, stateobject.StateObject):
def establish_ssl(self, clientcerts, sni, **kwargs):
clientcert = None
if clientcerts:
- path = os.path.join(clientcerts, self.address.host.encode("idna")) + ".pem"
+ path = os.path.join(
+ clientcerts,
+ self.address.host.encode("idna")) + ".pem"
if os.path.exists(path):
clientcert = path
self.convert_to_ssl(cert=clientcert, sni=sni, **kwargs)
diff --git a/libmproxy/proxy/primitives.py b/libmproxy/proxy/primitives.py
index c0ae424df..9e7dae9af 100644
--- a/libmproxy/proxy/primitives.py
+++ b/libmproxy/proxy/primitives.py
@@ -1,6 +1,7 @@
from __future__ import absolute_import
from netlib import socks
+
class ProxyError(Exception):
def __init__(self, code, message, headers=None):
super(ProxyError, self).__init__(message)
@@ -61,7 +62,7 @@ class TransparentProxyMode(ProxyMode):
def get_upstream_server(self, client_conn):
try:
dst = self.resolver.original_addr(client_conn.connection)
- except Exception, e:
+ except Exception as e:
raise ProxyError(502, "Transparent mode failure: %s" % str(e))
if dst[1] in self.sslports:
@@ -87,7 +88,9 @@ class Socks5ProxyMode(ProxyMode):
guess = ""
raise socks.SocksError(
socks.REP.GENERAL_SOCKS_SERVER_FAILURE,
- guess + "Invalid SOCKS version. Expected 0x05, got 0x%x" % msg.ver)
+ guess +
+ "Invalid SOCKS version. Expected 0x05, got 0x%x" %
+ msg.ver)
def get_upstream_server(self, client_conn):
try:
@@ -117,13 +120,15 @@ class Socks5ProxyMode(ProxyMode):
"mitmproxy only supports SOCKS5 CONNECT."
)
- # We do not connect here yet, as the clientconnect event has not been handled yet.
+ # We do not connect here yet, as the clientconnect event has not
+ # been handled yet.
connect_reply = socks.Message(
socks.VERSION.SOCKS5,
socks.REP.SUCCEEDED,
socks.ATYP.DOMAINNAME,
- client_conn.address # dummy value, we don't have an upstream connection yet.
+ # dummy value, we don't have an upstream connection yet.
+ client_conn.address
)
connect_reply.to_file(client_conn.wfile)
client_conn.wfile.flush()
@@ -161,4 +166,4 @@ class UpstreamProxyMode(_ConstDestinationProxyMode):
class Log:
def __init__(self, msg, level="info"):
self.msg = msg
- self.level = level
\ No newline at end of file
+ self.level = level
diff --git a/libmproxy/proxy/server.py b/libmproxy/proxy/server.py
index a72f9aba2..e1587df17 100644
--- a/libmproxy/proxy/server.py
+++ b/libmproxy/proxy/server.py
@@ -34,7 +34,7 @@ class ProxyServer(tcp.TCPServer):
self.config = config
try:
tcp.TCPServer.__init__(self, (config.host, config.port))
- except socket.error, v:
+ except socket.error as v:
raise ProxyServerError('Error starting proxy server: ' + repr(v))
self.channel = None
@@ -46,16 +46,30 @@ class ProxyServer(tcp.TCPServer):
self.channel = channel
def handle_client_connection(self, conn, client_address):
- h = ConnectionHandler(self.config, conn, client_address, self, self.channel)
+ h = ConnectionHandler(
+ self.config,
+ conn,
+ client_address,
+ self,
+ self.channel)
h.handle()
h.finish()
class ConnectionHandler:
- def __init__(self, config, client_connection, client_address, server, channel):
+ def __init__(
+ self,
+ config,
+ client_connection,
+ client_address,
+ server,
+ channel):
self.config = config
"""@type: libmproxy.proxy.config.ProxyConfig"""
- self.client_conn = ClientConnection(client_connection, client_address, server)
+ self.client_conn = ClientConnection(
+ client_connection,
+ client_address,
+ server)
"""@type: libmproxy.proxy.connection.ClientConnection"""
self.server_conn = None
"""@type: libmproxy.proxy.connection.ServerConnection"""
@@ -70,17 +84,23 @@ class ConnectionHandler:
# Can we already identify the target server and connect to it?
client_ssl, server_ssl = False, False
conn_kwargs = dict()
- upstream_info = self.config.mode.get_upstream_server(self.client_conn)
+ upstream_info = self.config.mode.get_upstream_server(
+ self.client_conn)
if upstream_info:
self.set_server_address(upstream_info[2:])
client_ssl, server_ssl = upstream_info[:2]
if self.config.check_ignore(self.server_conn.address):
- self.log("Ignore host: %s:%s" % self.server_conn.address(), "info")
+ self.log(
+ "Ignore host: %s:%s" %
+ self.server_conn.address(),
+ "info")
self.conntype = "tcp"
conn_kwargs["log"] = False
client_ssl, server_ssl = False, False
else:
- pass # No upstream info from the metadata: upstream info in the protocol (e.g. HTTP absolute-form)
+ # No upstream info from the metadata: upstream info in the
+ # protocol (e.g. HTTP absolute-form)
+ pass
self.channel.ask("clientconnect", self)
@@ -92,11 +112,17 @@ class ConnectionHandler:
self.establish_ssl(client=client_ssl, server=server_ssl)
if self.config.check_tcp(self.server_conn.address):
- self.log("Generic TCP mode for host: %s:%s" % self.server_conn.address(), "info")
+ self.log(
+ "Generic TCP mode for host: %s:%s" %
+ self.server_conn.address(),
+ "info")
self.conntype = "tcp"
# Delegate handling to the protocol handler
- protocol_handler(self.conntype)(self, **conn_kwargs).handle_messages()
+ protocol_handler(
+ self.conntype)(
+ self,
+ **conn_kwargs).handle_messages()
self.log("clientdisconnect", "info")
self.channel.tell("clientdisconnect", self)
@@ -104,7 +130,8 @@ class ConnectionHandler:
except ProxyError as e:
protocol_handler(self.conntype)(self, **conn_kwargs).handle_error(e)
except Exception:
- import traceback, sys
+ import traceback
+ import sys
self.log(traceback.format_exc(), "error")
print >> sys.stderr, traceback.format_exc()
@@ -112,7 +139,8 @@ class ConnectionHandler:
print >> sys.stderr, "Please lodge a bug report at: https://github.com/mitmproxy/mitmproxy"
finally:
# Make sure that we close the server connection in any case.
- # The client connection is closed by the ProxyServer and does not have be handled here.
+ # The client connection is closed by the ProxyServer and does not
+ # have be handled here.
self.del_server_connection()
def del_server_connection(self):
@@ -122,8 +150,10 @@ class ConnectionHandler:
if self.server_conn and self.server_conn.connection:
self.server_conn.finish()
self.server_conn.close()
- self.log("serverdisconnect", "debug", ["%s:%s" % (self.server_conn.address.host,
- self.server_conn.address.port)])
+ self.log(
+ "serverdisconnect", "debug", [
+ "%s:%s" %
+ (self.server_conn.address.host, self.server_conn.address.port)])
self.channel.tell("serverdisconnect", self)
self.server_conn = None
@@ -141,7 +171,9 @@ class ConnectionHandler:
if self.server_conn:
self.del_server_connection()
- self.log("Set new server address: %s:%s" % (address.host, address.port), "debug")
+ self.log(
+ "Set new server address: %s:%s" %
+ (address.host, address.port), "debug")
self.server_conn = ServerConnection(address)
def establish_server_connection(self, ask=True):
@@ -155,12 +187,16 @@ class ConnectionHandler:
"""
if self.server_conn.connection:
return
- self.log("serverconnect", "debug", ["%s:%s" % self.server_conn.address()[:2]])
+ self.log(
+ "serverconnect", "debug", [
+ "%s:%s" %
+ self.server_conn.address()[
+ :2]])
if ask:
self.channel.ask("serverconnect", self)
try:
self.server_conn.connect()
- except tcp.NetLibError, v:
+ except tcp.NetLibError as v:
raise ProxyError(502, v)
def establish_ssl(self, client=False, server=False, sni=None):
@@ -237,7 +273,8 @@ class ConnectionHandler:
self.server_conn.state = state
# Receiving new_sni where had_ssl is False is a weird case that happens when the workaround for
- # https://github.com/mitmproxy/mitmproxy/issues/427 is active. In this case, we want to establish SSL as well.
+ # https://github.com/mitmproxy/mitmproxy/issues/427 is active. In this
+ # case, we want to establish SSL as well.
if had_ssl or new_sni:
self.establish_ssl(server=True, sni=sni)
@@ -246,8 +283,10 @@ class ConnectionHandler:
def log(self, msg, level, subs=()):
msg = [
- "%s:%s: %s" % (self.client_conn.address.host, self.client_conn.address.port, msg)
- ]
+ "%s:%s: %s" %
+ (self.client_conn.address.host,
+ self.client_conn.address.port,
+ msg)]
for i in subs:
msg.append(" -> " + i)
msg = "\n".join(msg)
@@ -255,11 +294,13 @@ class ConnectionHandler:
def find_cert(self):
if self.config.certforward and self.server_conn.ssl_established:
- return self.server_conn.cert, self.config.certstore.gen_pkey(self.server_conn.cert), None
+ return self.server_conn.cert, self.config.certstore.gen_pkey(
+ self.server_conn.cert), None
else:
host = self.server_conn.address.host
sans = []
- if self.server_conn.ssl_established and (not self.config.no_upstream_cert):
+ if self.server_conn.ssl_established and (
+ not self.config.no_upstream_cert):
upstream_cert = self.server_conn.cert
sans.extend(upstream_cert.altnames)
if upstream_cert.cn:
@@ -291,8 +332,11 @@ class ConnectionHandler:
# - We established SSL with the server previously
# - We initially wanted to establish SSL with the server,
# but the server refused to negotiate without SNI.
- if self.server_conn.ssl_established or hasattr(self.server_conn, "may_require_sni"):
- self.server_reconnect(sni) # reconnect to upstream server with SNI
+ if self.server_conn.ssl_established or hasattr(
+ self.server_conn,
+ "may_require_sni"):
+ # reconnect to upstream server with SNI
+ self.server_reconnect(sni)
# Now, change client context to reflect changed certificate:
cert, key, chain_file = self.find_cert()
new_context = self.client_conn.create_ssl_context(
@@ -308,4 +352,7 @@ class ConnectionHandler:
# make dang sure it doesn't happen.
except: # pragma: no cover
import traceback
- self.log("Error in handle_sni:\r\n" + traceback.format_exc(), "error")
+ self.log(
+ "Error in handle_sni:\r\n" +
+ traceback.format_exc(),
+ "error")
diff --git a/libmproxy/script.py b/libmproxy/script.py
index be2260046..4c550342b 100644
--- a/libmproxy/script.py
+++ b/libmproxy/script.py
@@ -1,7 +1,11 @@
from __future__ import absolute_import
-import os, traceback, threading, shlex
+import os
+import traceback
+import threading
+import shlex
from . import controller
+
class ScriptError(Exception):
pass
@@ -56,6 +60,7 @@ class Script:
s = Script(argv, master)
s.load()
"""
+
def __init__(self, command, master):
self.command = command
self.argv = self.parse_command(command)
@@ -73,9 +78,11 @@ class Script:
args = shlex.split(command)
args[0] = os.path.expanduser(args[0])
if not os.path.exists(args[0]):
- raise ScriptError(("Script file not found: %s.\r\n"
- "If you script path contains spaces, "
- "make sure to wrap it in additional quotes, e.g. -s \"'./foo bar/baz.py' --args\".") % args[0])
+ raise ScriptError(
+ ("Script file not found: %s.\r\n"
+ "If you script path contains spaces, "
+ "make sure to wrap it in additional quotes, e.g. -s \"'./foo bar/baz.py' --args\".") %
+ args[0])
elif not os.path.isfile(args[0]):
raise ScriptError("Not a file: %s" % args[0])
return args
@@ -90,7 +97,7 @@ class Script:
ns = {}
try:
execfile(self.argv[0], ns, ns)
- except Exception, v:
+ except Exception as v:
raise ScriptError(traceback.format_exc(v))
self.ns = ns
r = self.run("start", self.argv)
@@ -114,7 +121,7 @@ class Script:
if f:
try:
return (True, f(self.ctx, *args, **kwargs))
- except Exception, v:
+ except Exception as v:
return (False, (v, traceback.format_exc(v)))
else:
return (False, None)
@@ -133,7 +140,7 @@ class ReplyProxy(object):
return
self.original_reply(*args, **kwargs)
- def __getattr__ (self, k):
+ def __getattr__(self, k):
return getattr(self.original_reply, k)
@@ -145,7 +152,8 @@ def _handle_concurrent_reply(fn, o, *args, **kwargs):
def run():
fn(*args, **kwargs)
- reply_proxy() # If the script did not call .reply(), we have to do it now.
+ # If the script did not call .reply(), we have to do it now.
+ reply_proxy()
ScriptThread(target=run).start()
@@ -154,8 +162,15 @@ class ScriptThread(threading.Thread):
def concurrent(fn):
- if fn.func_name in ("request", "response", "error", "clientconnect", "serverconnect", "clientdisconnect"):
+ if fn.func_name in (
+ "request",
+ "response",
+ "error",
+ "clientconnect",
+ "serverconnect",
+ "clientdisconnect"):
def _concurrent(ctx, obj):
_handle_concurrent_reply(fn, obj, ctx, obj)
return _concurrent
- raise NotImplementedError("Concurrent decorator not supported for this method.")
+ raise NotImplementedError(
+ "Concurrent decorator not supported for this method.")
diff --git a/libmproxy/tnetstring.py b/libmproxy/tnetstring.py
index 585196754..bed8405ff 100644
--- a/libmproxy/tnetstring.py
+++ b/libmproxy/tnetstring.py
@@ -72,13 +72,14 @@ __ver_major__ = 0
__ver_minor__ = 2
__ver_patch__ = 0
__ver_sub__ = ""
-__version__ = "%d.%d.%d%s" % (__ver_major__,__ver_minor__,__ver_patch__,__ver_sub__)
+__version__ = "%d.%d.%d%s" % (
+ __ver_major__, __ver_minor__, __ver_patch__, __ver_sub__)
from collections import deque
-def dumps(value,encoding=None):
+def dumps(value, encoding=None):
"""dumps(object,encoding=None) -> string
This function dumps a python object as a tnetstring.
@@ -90,21 +91,21 @@ def dumps(value,encoding=None):
# consider the _gdumps() function instead; it's a standard top-down
# generator that's simpler to understand but much less efficient.
q = deque()
- _rdumpq(q,0,value,encoding)
+ _rdumpq(q, 0, value, encoding)
return "".join(q)
-def dump(value,file,encoding=None):
+def dump(value, file, encoding=None):
"""dump(object,file,encoding=None)
This function dumps a python object as a tnetstring and writes it to
the given file.
"""
- file.write(dumps(value,encoding))
+ file.write(dumps(value, encoding))
file.flush()
-def _rdumpq(q,size,value,encoding=None):
+def _rdumpq(q, size, value, encoding=None):
"""Dump value as a tnetstring, to a deque instance, last chunks first.
This function generates the tnetstring representation of the given value,
@@ -129,7 +130,7 @@ def _rdumpq(q,size,value,encoding=None):
if value is False:
write("5:false!")
return size + 8
- if isinstance(value,(int,long)):
+ if isinstance(value, (int, long)):
data = str(value)
ldata = len(data)
span = str(ldata)
@@ -138,7 +139,7 @@ def _rdumpq(q,size,value,encoding=None):
write(":")
write(span)
return size + 2 + len(span) + ldata
- if isinstance(value,(float,)):
+ if isinstance(value, (float,)):
# Use repr() for float rather than str().
# It round-trips more accurately.
# Probably unnecessary in later python versions that
@@ -151,7 +152,7 @@ def _rdumpq(q,size,value,encoding=None):
write(":")
write(span)
return size + 2 + len(span) + ldata
- if isinstance(value,str):
+ if isinstance(value, str):
lvalue = len(value)
span = str(lvalue)
write(",")
@@ -159,26 +160,26 @@ def _rdumpq(q,size,value,encoding=None):
write(":")
write(span)
return size + 2 + len(span) + lvalue
- if isinstance(value,(list,tuple,)):
+ if isinstance(value, (list, tuple,)):
write("]")
init_size = size = size + 1
for item in reversed(value):
- size = _rdumpq(q,size,item,encoding)
+ size = _rdumpq(q, size, item, encoding)
span = str(size - init_size)
write(":")
write(span)
return size + 1 + len(span)
- if isinstance(value,dict):
+ if isinstance(value, dict):
write("}")
init_size = size = size + 1
- for (k,v) in value.iteritems():
- size = _rdumpq(q,size,v,encoding)
- size = _rdumpq(q,size,k,encoding)
+ for (k, v) in value.iteritems():
+ size = _rdumpq(q, size, v, encoding)
+ size = _rdumpq(q, size, k, encoding)
span = str(size - init_size)
write(":")
write(span)
return size + 1 + len(span)
- if isinstance(value,unicode):
+ if isinstance(value, unicode):
if encoding is None:
raise ValueError("must specify encoding to dump unicode strings")
value = value.encode(encoding)
@@ -192,7 +193,7 @@ def _rdumpq(q,size,value,encoding=None):
raise ValueError("unserializable object")
-def _gdumps(value,encoding):
+def _gdumps(value, encoding):
"""Generate fragments of value dumped as a tnetstring.
This is the naive dumping algorithm, implemented as a generator so that
@@ -207,24 +208,24 @@ def _gdumps(value,encoding):
yield "4:true!"
elif value is False:
yield "5:false!"
- elif isinstance(value,(int,long)):
+ elif isinstance(value, (int, long)):
data = str(value)
yield str(len(data))
yield ":"
yield data
yield "#"
- elif isinstance(value,(float,)):
+ elif isinstance(value, (float,)):
data = repr(value)
yield str(len(data))
yield ":"
yield data
yield "^"
- elif isinstance(value,(str,)):
+ elif isinstance(value, (str,)):
yield str(len(value))
yield ":"
yield value
yield ","
- elif isinstance(value,(list,tuple,)):
+ elif isinstance(value, (list, tuple,)):
sub = []
for item in value:
sub.extend(_gdumps(item))
@@ -233,9 +234,9 @@ def _gdumps(value,encoding):
yield ":"
yield sub
yield "]"
- elif isinstance(value,(dict,)):
+ elif isinstance(value, (dict,)):
sub = []
- for (k,v) in value.iteritems():
+ for (k, v) in value.iteritems():
sub.extend(_gdumps(k))
sub.extend(_gdumps(v))
sub = "".join(sub)
@@ -243,7 +244,7 @@ def _gdumps(value,encoding):
yield ":"
yield sub
yield "}"
- elif isinstance(value,(unicode,)):
+ elif isinstance(value, (unicode,)):
if encoding is None:
raise ValueError("must specify encoding to dump unicode strings")
value = value.encode(encoding)
@@ -255,7 +256,7 @@ def _gdumps(value,encoding):
raise ValueError("unserializable object")
-def loads(string,encoding=None):
+def loads(string, encoding=None):
"""loads(string,encoding=None) -> object
This function parses a tnetstring into a python object.
@@ -263,10 +264,10 @@ def loads(string,encoding=None):
# No point duplicating effort here. In the C-extension version,
# loads() is measurably faster then pop() since it can avoid
# the overhead of building a second string.
- return pop(string,encoding)[0]
+ return pop(string, encoding)[0]
-def load(file,encoding=None):
+def load(file, encoding=None):
"""load(file,encoding=None) -> object
This function reads a tnetstring from a file and parses it into a
@@ -324,21 +325,20 @@ def load(file,encoding=None):
if type == "]":
l = []
while data:
- (item,data) = pop(data,encoding)
+ (item, data) = pop(data, encoding)
l.append(item)
return l
if type == "}":
d = {}
while data:
- (key,data) = pop(data,encoding)
- (val,data) = pop(data,encoding)
+ (key, data) = pop(data, encoding)
+ (val, data) = pop(data, encoding)
d[key] = val
return d
raise ValueError("unknown type tag")
-
-def pop(string,encoding=None):
+def pop(string, encoding=None):
"""pop(string,encoding=None) -> (object, remain)
This function parses a tnetstring into a python object.
@@ -347,12 +347,12 @@ def pop(string,encoding=None):
"""
# Parse out data length, type and remaining string.
try:
- (dlen,rest) = string.split(":",1)
+ (dlen, rest) = string.split(":", 1)
dlen = int(dlen)
except ValueError:
raise ValueError("not a tnetstring: missing or invalid length prefix")
try:
- (data,type,remain) = (rest[:dlen],rest[dlen],rest[dlen+1:])
+ (data, type, remain) = (rest[:dlen], rest[dlen], rest[dlen + 1:])
except IndexError:
# This fires if len(rest) < dlen, meaning we don't need
# to further validate that data is the right length.
@@ -360,40 +360,40 @@ def pop(string,encoding=None):
# Parse the data based on the type tag.
if type == ",":
if encoding is not None:
- return (data.decode(encoding),remain)
- return (data,remain)
+ return (data.decode(encoding), remain)
+ return (data, remain)
if type == "#":
try:
- return (int(data),remain)
+ return (int(data), remain)
except ValueError:
raise ValueError("not a tnetstring: invalid integer literal")
if type == "^":
try:
- return (float(data),remain)
+ return (float(data), remain)
except ValueError:
raise ValueError("not a tnetstring: invalid float literal")
if type == "!":
if data == "true":
- return (True,remain)
+ return (True, remain)
elif data == "false":
- return (False,remain)
+ return (False, remain)
else:
raise ValueError("not a tnetstring: invalid boolean literal")
if type == "~":
if data:
raise ValueError("not a tnetstring: invalid null literal")
- return (None,remain)
+ return (None, remain)
if type == "]":
l = []
while data:
- (item,data) = pop(data,encoding)
+ (item, data) = pop(data, encoding)
l.append(item)
- return (l,remain)
+ return (l, remain)
if type == "}":
d = {}
while data:
- (key,data) = pop(data,encoding)
- (val,data) = pop(data,encoding)
+ (key, data) = pop(data, encoding)
+ (val, data) = pop(data, encoding)
d[key] = val
- return (d,remain)
+ return (d, remain)
raise ValueError("unknown type tag")
diff --git a/libmproxy/utils.py b/libmproxy/utils.py
index 7d0e369bf..a29a53f51 100644
--- a/libmproxy/utils.py
+++ b/libmproxy/utils.py
@@ -1,8 +1,14 @@
from __future__ import absolute_import
-import os, datetime, urllib, re
-import time, functools, cgi
+import os
+import datetime
+import urllib
+import re
+import time
+import functools
+import cgi
import json
+
def timestamp():
"""
Returns a serializable UTC timestamp.
@@ -91,7 +97,7 @@ def multipartdecode(hdrs, content):
match = rx.search(parts[1])
if match:
key = match.group(1)
- value = "".join(parts[3+parts[2:].index(""):])
+ value = "".join(parts[3 + parts[2:].index(""):])
r.append((key, value))
return r
return []
@@ -107,8 +113,8 @@ def pretty_duration(secs):
for limit, formatter in formatters:
if secs >= limit:
return formatter.format(secs)
- #less than 1 sec
- return "{:.0f}ms".format(secs*1000)
+ # less than 1 sec
+ return "{:.0f}ms".format(secs * 1000)
class Data:
@@ -126,7 +132,7 @@ class Data:
"""
fullpath = os.path.join(self.dirname, path)
if not os.path.exists(fullpath):
- raise ValueError, "dataPath: %s does not exist."%fullpath
+ raise ValueError("dataPath: %s does not exist." % fullpath)
return fullpath
pkg_data = Data(__name__)
@@ -135,10 +141,11 @@ class LRUCache:
"""
A simple LRU cache for generated values.
"""
+
def __init__(self, size=100):
self.size = size
self.cache = {}
- self.cacheList = []
+ self.cacheList = []
def get(self, gen, *args):
"""
@@ -147,7 +154,7 @@ class LRUCache:
*args: A list of immutable arguments, used to establish identiy by
*the cache, and passed to gen to generate values.
"""
- if self.cache.has_key(args):
+ if args in self.cache:
self.cacheList.remove(args)
self.cacheList.insert(0, args)
return self.cache[args]
@@ -195,14 +202,14 @@ def hostport(scheme, host, port):
if (port, scheme) in [(80, "http"), (443, "https")]:
return host
else:
- return "%s:%s"%(host, port)
+ return "%s:%s" % (host, port)
def unparse_url(scheme, host, port, path=""):
"""
Returns a URL string, constructed from the specified compnents.
"""
- return "%s://%s%s"%(scheme, hostport(scheme, host, port), path)
+ return "%s://%s%s" % (scheme, hostport(scheme, host, port), path)
def clean_hanging_newline(t):
@@ -243,7 +250,7 @@ def parse_size(s):
try:
return int(s) * mult
except ValueError:
- raise ValueError("Invalid size specification: %s"%s)
+ raise ValueError("Invalid size specification: %s" % s)
def safe_subn(pattern, repl, target, *args, **kwargs):
diff --git a/libmproxy/web/__init__.py b/libmproxy/web/__init__.py
index 7016eeb8c..a0af7315c 100644
--- a/libmproxy/web/__init__.py
+++ b/libmproxy/web/__init__.py
@@ -79,6 +79,7 @@ class WebState(flow.State):
data=[]
)
+
class Options(object):
attributes = [
"app",
@@ -128,15 +129,14 @@ class WebMaster(flow.FlowMaster):
if options.rfile:
try:
self.load_flows_file(options.rfile)
- except flow.FlowReadError, v:
+ except flow.FlowReadError as v:
self.add_event(
- "Could not read flow file: %s"%v,
+ "Could not read flow file: %s" % v,
"error"
)
if self.options.app:
self.start_app(self.options.app_host, self.options.app_port)
-
def tick(self):
flow.FlowMaster.tick(self, self.masterq, timeout=0)
@@ -157,7 +157,8 @@ class WebMaster(flow.FlowMaster):
self.shutdown()
def _process_flow(self, f):
- if self.state.intercept and self.state.intercept(f) and not f.request.is_replay:
+ if self.state.intercept and self.state.intercept(
+ f) and not f.request.is_replay:
f.intercept(self)
else:
f.reply()
diff --git a/libmproxy/web/app.py b/libmproxy/web/app.py
index 9582d97b7..69341e76e 100644
--- a/libmproxy/web/app.py
+++ b/libmproxy/web/app.py
@@ -27,7 +27,8 @@ class RequestHandler(tornado.web.RequestHandler):
@property
def json(self):
- if not self.request.headers.get("Content-Type").startswith("application/json"):
+ if not self.request.headers.get(
+ "Content-Type").startswith("application/json"):
return None
return json.loads(self.request.body)
@@ -67,8 +68,10 @@ class FiltHelp(RequestHandler):
commands=filt.help
))
+
class WebSocketEventBroadcaster(tornado.websocket.WebSocketHandler):
- connections = None # raise an error if inherited class doesn't specify its own instance.
+ # raise an error if inherited class doesn't specify its own instance.
+ connections = None
def open(self):
self.connections.add(self)
@@ -264,4 +267,4 @@ class Application(tornado.web.Application):
cookie_secret=os.urandom(256),
debug=debug,
)
- super(Application, self).__init__(handlers, **settings)
\ No newline at end of file
+ super(Application, self).__init__(handlers, **settings)
diff --git a/mitmdump b/mitmdump
index 8cc4c99f8..16087f616 100755
--- a/mitmdump
+++ b/mitmdump
@@ -1,3 +1,3 @@
#!/usr/bin/env python
from libmproxy.main import mitmdump
-mitmdump()
\ No newline at end of file
+mitmdump()
diff --git a/mitmproxy b/mitmproxy
index cd22282b0..b7c5c94ec 100755
--- a/mitmproxy
+++ b/mitmproxy
@@ -1,3 +1,3 @@
#!/usr/bin/env python
from libmproxy.main import mitmproxy
-mitmproxy()
\ No newline at end of file
+mitmproxy()
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 000000000..bc980d562
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,9 @@
+[flake8]
+max-line-length = 80
+max-complexity = 15
+
+[pep8]
+max-line-length = 80
+max-complexity = 15
+exclude = */contrib/*
+ignore = E251,E309
diff --git a/setup.py b/setup.py
index 9509ce7a9..4fa646285 100644
--- a/setup.py
+++ b/setup.py
@@ -67,13 +67,11 @@ setup(
"Topic :: Internet",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: Proxy Servers",
- "Topic :: Software Development :: Testing"
- ],
+ "Topic :: Software Development :: Testing"],
packages=find_packages(),
include_package_data=True,
entry_points={
- 'console_scripts': console_scripts
- },
+ 'console_scripts': console_scripts},
install_requires=list(deps),
extras_require={
'dev': [
@@ -81,20 +79,15 @@ setup(
"nose>=1.3.0",
"nose-cov>=1.6",
"coveralls>=0.4.1",
- "pathod>=%s, <%s" % (
- version.MINORVERSION, version.NEXT_MINORVERSION
- ),
- "countershape"
- ],
+ "pathod>=%s, <%s" %
+ (version.MINORVERSION,
+ version.NEXT_MINORVERSION),
+ "countershape"],
'contentviews': [
"pyamf>=0.6.1",
"protobuf>=2.5.0",
- "cssutils>=1.0"
- ],
+ "cssutils>=1.0"],
'examples': [
"pytz",
"harparser",
- "beautifulsoup4"
- ]
- }
-)
+ "beautifulsoup4"]})
diff --git a/test/mock_urwid.py b/test/mock_urwid.py
index f132e0bdd..191210bf8 100644
--- a/test/mock_urwid.py
+++ b/test/mock_urwid.py
@@ -1,8 +1,10 @@
-import os, sys, mock
+import os
+import sys
+import mock
if os.name == "nt":
m = mock.Mock()
m.__version__ = "1.1.1"
m.Widget = mock.Mock
m.WidgetWrap = mock.Mock
sys.modules['urwid'] = m
- sys.modules['urwid.util'] = mock.Mock()
\ No newline at end of file
+ sys.modules['urwid.util'] = mock.Mock()
diff --git a/test/scripts/a.py b/test/scripts/a.py
index 1d5717b0e..210fea786 100644
--- a/test/scripts/a.py
+++ b/test/scripts/a.py
@@ -4,14 +4,18 @@ parser = argparse.ArgumentParser()
parser.add_argument('--var', type=int)
var = 0
+
+
def start(ctx, argv):
global var
var = parser.parse_args(argv[1:]).var
+
def here(ctx):
global var
var += 1
return var
+
def errargs():
pass
diff --git a/test/scripts/all.py b/test/scripts/all.py
index 3acaf6949..dad2aade7 100644
--- a/test/scripts/all.py
+++ b/test/scripts/all.py
@@ -1,28 +1,36 @@
log = []
+
+
def clientconnect(ctx, cc):
ctx.log("XCLIENTCONNECT")
log.append("clientconnect")
+
def serverconnect(ctx, cc):
ctx.log("XSERVERCONNECT")
log.append("serverconnect")
+
def request(ctx, f):
ctx.log("XREQUEST")
log.append("request")
+
def response(ctx, f):
ctx.log("XRESPONSE")
log.append("response")
+
def responseheaders(ctx, f):
ctx.log("XRESPONSEHEADERS")
log.append("responseheaders")
+
def clientdisconnect(ctx, cc):
ctx.log("XCLIENTDISCONNECT")
log.append("clientdisconnect")
+
def error(ctx, cc):
ctx.log("XERROR")
log.append("error")
diff --git a/test/scripts/concurrent_decorator.py b/test/scripts/concurrent_decorator.py
index 8e1320067..f6feda1d4 100644
--- a/test/scripts/concurrent_decorator.py
+++ b/test/scripts/concurrent_decorator.py
@@ -29,4 +29,4 @@ def error(context, err):
@concurrent
def clientdisconnect(context, dc):
- context.log("clientdisconnect")
\ No newline at end of file
+ context.log("clientdisconnect")
diff --git a/test/scripts/concurrent_decorator_err.py b/test/scripts/concurrent_decorator_err.py
index 781913150..00fd8dade 100644
--- a/test/scripts/concurrent_decorator_err.py
+++ b/test/scripts/concurrent_decorator_err.py
@@ -1,5 +1,6 @@
from libmproxy.script import concurrent
+
@concurrent
def start(context, argv):
- pass
\ No newline at end of file
+ pass
diff --git a/test/scripts/duplicate_flow.py b/test/scripts/duplicate_flow.py
index f1b923095..e13af786d 100644
--- a/test/scripts/duplicate_flow.py
+++ b/test/scripts/duplicate_flow.py
@@ -2,4 +2,3 @@
def request(ctx, f):
f = ctx.duplicate_flow(f)
ctx.replay_request(f)
-
diff --git a/test/scripts/stream_modify.py b/test/scripts/stream_modify.py
index 9a98a7eed..e5c323bec 100644
--- a/test/scripts/stream_modify.py
+++ b/test/scripts/stream_modify.py
@@ -4,4 +4,4 @@ def modify(chunks):
def responseheaders(context, flow):
- flow.response.stream = modify
\ No newline at end of file
+ flow.response.stream = modify
diff --git a/test/test_app.py b/test/test_app.py
index 0b6ed14ca..5fb495633 100644
--- a/test/test_app.py
+++ b/test/test_app.py
@@ -1,8 +1,13 @@
-import mock, socket, os, time
+import mock
+import socket
+import os
+import time
from libmproxy import dump
from netlib import certutils, tcp
from libpathod.pathoc import Pathoc
-import tutils, tservers
+import tutils
+import tservers
+
class TestApp(tservers.HTTPProxTest):
def test_basic(self):
diff --git a/test/test_cmdline.py b/test/test_cmdline.py
index f7bf4612c..eafcbde48 100644
--- a/test/test_cmdline.py
+++ b/test/test_cmdline.py
@@ -37,13 +37,24 @@ def test_parse_replace_hook():
def test_parse_server_spec():
tutils.raises("Invalid server specification", cmdline.parse_server_spec, "")
- assert cmdline.parse_server_spec("http://foo.com:88") == [False, False, "foo.com", 88]
- assert cmdline.parse_server_spec("http://foo.com") == [False, False, "foo.com", 80]
- assert cmdline.parse_server_spec("https://foo.com") == [True, True, "foo.com", 443]
- assert cmdline.parse_server_spec_special("https2http://foo.com") == [True, False, "foo.com", 80]
- assert cmdline.parse_server_spec_special("http2https://foo.com") == [False, True, "foo.com", 443]
- tutils.raises("Invalid server specification", cmdline.parse_server_spec, "foo.com")
- tutils.raises("Invalid server specification", cmdline.parse_server_spec, "http://")
+ assert cmdline.parse_server_spec(
+ "http://foo.com:88") == [False, False, "foo.com", 88]
+ assert cmdline.parse_server_spec(
+ "http://foo.com") == [False, False, "foo.com", 80]
+ assert cmdline.parse_server_spec(
+ "https://foo.com") == [True, True, "foo.com", 443]
+ assert cmdline.parse_server_spec_special(
+ "https2http://foo.com") == [True, False, "foo.com", 80]
+ assert cmdline.parse_server_spec_special(
+ "http2https://foo.com") == [False, True, "foo.com", 443]
+ tutils.raises(
+ "Invalid server specification",
+ cmdline.parse_server_spec,
+ "foo.com")
+ tutils.raises(
+ "Invalid server specification",
+ cmdline.parse_server_spec,
+ "http://")
def test_parse_setheaders():
@@ -103,7 +114,7 @@ def test_common():
)
p = tutils.test_data.path("data/replace")
- opts.replace_file = [("/foo/bar/%s"%p)]
+ opts.replace_file = [("/foo/bar/%s" % p)]
v = cmdline.get_common_options(opts)["replacements"]
assert len(v) == 1
assert v[0][2].strip() == "replacecontents"
@@ -122,5 +133,3 @@ def test_mitmdump():
def test_mitmweb():
ap = cmdline.mitmweb()
assert ap
-
-
diff --git a/test/test_console.py b/test/test_console.py
index 419b94a78..ed8408a5d 100644
--- a/test/test_console.py
+++ b/test/test_console.py
@@ -1,4 +1,7 @@
-import os, sys, mock, gc
+import os
+import sys
+import mock
+import gc
from os.path import normpath
import mock_urwid
from libmproxy import console
@@ -6,6 +9,7 @@ from libmproxy.console import common
import tutils
+
class TestConsoleState:
def test_flow(self):
"""
diff --git a/test/test_console_contentview.py b/test/test_console_contentview.py
index 83dbbb8ff..a296376e5 100644
--- a/test/test_console_contentview.py
+++ b/test/test_console_contentview.py
@@ -31,40 +31,39 @@ class TestContentView:
def test_view_auto(self):
v = cv.ViewAuto()
f = v(
- odict.ODictCaseless(),
- "foo",
- 1000
- )
+ odict.ODictCaseless(),
+ "foo",
+ 1000
+ )
assert f[0] == "Raw"
f = v(
- odict.ODictCaseless(
- [["content-type", "text/html"]],
- ),
- "",
- 1000
- )
+ odict.ODictCaseless(
+ [["content-type", "text/html"]],
+ ),
+ "",
+ 1000
+ )
assert f[0] == "HTML"
f = v(
- odict.ODictCaseless(
- [["content-type", "text/flibble"]],
- ),
- "foo",
- 1000
- )
+ odict.ODictCaseless(
+ [["content-type", "text/flibble"]],
+ ),
+ "foo",
+ 1000
+ )
assert f[0] == "Raw"
f = v(
- odict.ODictCaseless(
- [["content-type", "text/flibble"]],
- ),
- "",
- 1000
- )
+ odict.ODictCaseless(
+ [["content-type", "text/flibble"]],
+ ),
+ "",
+ 1000
+ )
assert f[0].startswith("XML")
-
def test_view_urlencoded(self):
d = utils.urlencode([("one", "two"), ("three", "four")])
v = cv.ViewURLEncoded()
@@ -91,7 +90,7 @@ class TestContentView:
v = cv.ViewJSON()
assert v([], "{}", 1000)
assert not v([], "{", 1000)
- assert v([], "[" + ",".join(["0"]*cv.VIEW_CUTOFF) + "]", 1000)
+ assert v([], "[" + ",".join(["0"] * cv.VIEW_CUTOFF) + "]", 1000)
assert v([], "[1, 2, 3, 4, 5]", 5)
def test_view_xml(self):
@@ -145,18 +144,18 @@ class TestContentView:
def test_view_image(self):
v = cv.ViewImage()
p = tutils.test_data.path("data/image.png")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
p = tutils.test_data.path("data/image.gif")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
p = tutils.test_data.path("data/image-err1.jpg")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
p = tutils.test_data.path("data/image.ico")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
- assert not v([], "flibble", sys.maxint)
+ assert not v([], "flibble", sys.maxsize)
def test_view_multipart(self):
view = cv.ViewMultipart()
@@ -187,71 +186,70 @@ Larry
def test_get_content_view(self):
r = cv.get_content_view(
- cv.get("Raw"),
- [["content-type", "application/json"]],
- "[1, 2, 3]",
- 1000,
- lambda x, l: None,
- False
- )
+ cv.get("Raw"),
+ [["content-type", "application/json"]],
+ "[1, 2, 3]",
+ 1000,
+ lambda x, l: None,
+ False
+ )
assert "Raw" in r[0]
r = cv.get_content_view(
- cv.get("Auto"),
- [["content-type", "application/json"]],
- "[1, 2, 3]",
- 1000,
- lambda x, l: None,
- False
- )
+ cv.get("Auto"),
+ [["content-type", "application/json"]],
+ "[1, 2, 3]",
+ 1000,
+ lambda x, l: None,
+ False
+ )
assert r[0] == "JSON"
r = cv.get_content_view(
- cv.get("Auto"),
- [["content-type", "application/json"]],
- "[1, 2",
- 1000,
- lambda x, l: None,
- False
- )
+ cv.get("Auto"),
+ [["content-type", "application/json"]],
+ "[1, 2",
+ 1000,
+ lambda x, l: None,
+ False
+ )
assert "Raw" in r[0]
r = cv.get_content_view(
- cv.get("AMF"),
- [],
- "[1, 2",
- 1000,
- lambda x, l: None,
- False
- )
+ cv.get("AMF"),
+ [],
+ "[1, 2",
+ 1000,
+ lambda x, l: None,
+ False
+ )
assert "Raw" in r[0]
-
r = cv.get_content_view(
- cv.get("Auto"),
- [
- ["content-type", "application/json"],
- ["content-encoding", "gzip"]
- ],
- encoding.encode('gzip', "[1, 2, 3]"),
- 1000,
- lambda x, l: None,
- False
- )
+ cv.get("Auto"),
+ [
+ ["content-type", "application/json"],
+ ["content-encoding", "gzip"]
+ ],
+ encoding.encode('gzip', "[1, 2, 3]"),
+ 1000,
+ lambda x, l: None,
+ False
+ )
assert "decoded gzip" in r[0]
assert "JSON" in r[0]
r = cv.get_content_view(
- cv.get("XML"),
- [
- ["content-type", "application/json"],
- ["content-encoding", "gzip"]
- ],
- encoding.encode('gzip', "[1, 2, 3]"),
- 1000,
- lambda x, l: None,
- False
- )
+ cv.get("XML"),
+ [
+ ["content-type", "application/json"],
+ ["content-encoding", "gzip"]
+ ],
+ encoding.encode('gzip', "[1, 2, 3]"),
+ 1000,
+ lambda x, l: None,
+ False
+ )
assert "decoded gzip" in r[0]
assert "Raw" in r[0]
@@ -261,24 +259,25 @@ if pyamf:
v = cv.ViewAMF()
p = tutils.test_data.path("data/amf01")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
p = tutils.test_data.path("data/amf02")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
def test_view_amf_response():
v = cv.ViewAMF()
p = tutils.test_data.path("data/amf03")
- assert v([], file(p,"rb").read(), sys.maxint)
+ assert v([], file(p, "rb").read(), sys.maxsize)
if cv.ViewProtobuf.is_available():
def test_view_protobuf_request():
v = cv.ViewProtobuf()
p = tutils.test_data.path("data/protobuf01")
- content_type, output = v([], file(p,"rb").read(), sys.maxint)
+ content_type, output = v([], file(p, "rb").read(), sys.maxsize)
assert content_type == "Protobuf"
assert output[0].text == '1: "3bbc333c-e61c-433b-819a-0b9a8cc103b8"'
+
def test_get_by_shortcut():
assert cv.get_by_shortcut("h")
diff --git a/test/test_console_help.py b/test/test_console_help.py
index 24517439f..a7a8b7455 100644
--- a/test/test_console_help.py
+++ b/test/test_console_help.py
@@ -5,10 +5,12 @@ if os.name == "nt":
import libmproxy.console.help as help
+
class DummyLoop:
def __init__(self):
self.widget = None
+
class DummyMaster:
def __init__(self):
self.loop = DummyLoop()
diff --git a/test/test_controller.py b/test/test_controller.py
index e71a148eb..d287f18d4 100644
--- a/test/test_controller.py
+++ b/test/test_controller.py
@@ -8,5 +8,3 @@ class TestMaster:
msg = mock.MagicMock()
m.handle("type", msg)
assert msg.reply.call_count == 1
-
-
diff --git a/test/test_dump.py b/test/test_dump.py
index 48eeb244a..e3743ac64 100644
--- a/test/test_dump.py
+++ b/test/test_dump.py
@@ -116,7 +116,6 @@ class TestDumpMaster:
0, None, "", verbosity=1, rfile="test_dump.py"
)
-
def test_options(self):
o = dump.Options(verbosity = 2)
assert o.verbosity == 2
@@ -147,21 +146,25 @@ class TestDumpMaster:
def test_basic(self):
for i in (1, 2, 3):
assert "GET" in self._dummy_cycle(1, "~s", "", flow_detail=i)
- assert "GET" in self._dummy_cycle(1, "~s", "\x00\x00\x00", flow_detail=i)
+ assert "GET" in self._dummy_cycle(
+ 1,
+ "~s",
+ "\x00\x00\x00",
+ flow_detail=i)
assert "GET" in self._dummy_cycle(1, "~s", "ascii", flow_detail=i)
def test_write(self):
with tutils.tmpdir() as d:
p = os.path.join(d, "a")
- self._dummy_cycle(1, None, "", outfile=(p,"wb"), verbosity=0)
- assert len(list(flow.FlowReader(open(p,"rb")).stream())) == 1
+ self._dummy_cycle(1, None, "", outfile=(p, "wb"), verbosity=0)
+ assert len(list(flow.FlowReader(open(p, "rb")).stream())) == 1
def test_write_append(self):
with tutils.tmpdir() as d:
p = os.path.join(d, "a.append")
- self._dummy_cycle(1, None, "", outfile=(p,"wb"), verbosity=0)
- self._dummy_cycle(1, None, "", outfile=(p,"ab"), verbosity=0)
- assert len(list(flow.FlowReader(open(p,"rb")).stream())) == 2
+ self._dummy_cycle(1, None, "", outfile=(p, "wb"), verbosity=0)
+ self._dummy_cycle(1, None, "", outfile=(p, "ab"), verbosity=0)
+ assert len(list(flow.FlowReader(open(p, "rb")).stream())) == 2
def test_write_err(self):
tutils.raises(
diff --git a/test/test_encoding.py b/test/test_encoding.py
index 732447e2f..e13f5dced 100644
--- a/test/test_encoding.py
+++ b/test/test_encoding.py
@@ -1,5 +1,6 @@
from libmproxy import encoding
+
def test_identity():
assert "string" == encoding.decode("identity", "string")
assert "string" == encoding.encode("identity", "string")
@@ -8,12 +9,25 @@ def test_identity():
def test_gzip():
- assert "string" == encoding.decode("gzip", encoding.encode("gzip", "string"))
+ assert "string" == encoding.decode(
+ "gzip",
+ encoding.encode(
+ "gzip",
+ "string"))
assert None == encoding.decode("gzip", "bogus")
def test_deflate():
- assert "string" == encoding.decode("deflate", encoding.encode("deflate", "string"))
- assert "string" == encoding.decode("deflate", encoding.encode("deflate", "string")[2:-4])
+ assert "string" == encoding.decode(
+ "deflate",
+ encoding.encode(
+ "deflate",
+ "string"))
+ assert "string" == encoding.decode(
+ "deflate",
+ encoding.encode(
+ "deflate",
+ "string")[
+ 2:-
+ 4])
assert None == encoding.decode("deflate", "bogus")
-
diff --git a/test/test_examples.py b/test/test_examples.py
index fd901b5d1..e9bccd2e5 100644
--- a/test/test_examples.py
+++ b/test/test_examples.py
@@ -21,7 +21,7 @@ def test_load_scripts():
f += " foo bar" # two arguments required
try:
s = script.Script(f, tmaster) # Loads the script file.
- except Exception, v:
+ except Exception as v:
if not "ImportError" in str(v):
raise
else:
diff --git a/test/test_filt.py b/test/test_filt.py
index 97b8e73cf..3ad17dfed 100644
--- a/test/test_filt.py
+++ b/test/test_filt.py
@@ -5,6 +5,7 @@ from libmproxy.protocol import http
from libmproxy.protocol.primitives import Error
import tutils
+
class TestParsing:
def _dump(self, x):
c = cStringIO.StringIO()
@@ -99,7 +100,15 @@ class TestMatching:
headers = odict.ODictCaseless()
headers["header_response"] = ["svalue"]
- f.response = http.HTTPResponse((1, 1), 200, "OK", headers, "content_response", None, None)
+ f.response = http.HTTPResponse(
+ (1,
+ 1),
+ 200,
+ "OK",
+ headers,
+ "content_response",
+ None,
+ None)
return f
diff --git a/test/test_flow.py b/test/test_flow.py
index f5d069066..2609b7cb1 100644
--- a/test/test_flow.py
+++ b/test/test_flow.py
@@ -1,4 +1,6 @@
-import Queue, time, os.path
+import Queue
+import time
+import os.path
from cStringIO import StringIO
import email.utils
import mock
@@ -33,7 +35,6 @@ def test_app_registry():
assert ar.get(r)
-
class TestStickyCookieState:
def _response(self, cookie, host):
s = flow.StickyCookieState(filt.parse(".*"))
@@ -115,7 +116,15 @@ class TestClientPlaybackState:
class TestServerPlaybackState:
def test_hash(self):
- s = flow.ServerPlaybackState(None, [], False, False, None, False, None, False)
+ s = flow.ServerPlaybackState(
+ None,
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ False)
r = tutils.tflow()
r2 = tutils.tflow()
@@ -131,7 +140,15 @@ class TestServerPlaybackState:
assert s._hash(r) != s._hash(r2)
def test_headers(self):
- s = flow.ServerPlaybackState(["foo"], [], False, False, None, False, None, False)
+ s = flow.ServerPlaybackState(
+ ["foo"],
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ False)
r = tutils.tflow(resp=True)
r.request.headers["foo"] = ["bar"]
r2 = tutils.tflow(resp=True)
@@ -152,7 +169,9 @@ class TestServerPlaybackState:
r2 = tutils.tflow(resp=True)
r2.request.headers["key"] = ["two"]
- s = flow.ServerPlaybackState(None, [r, r2], False, False, None, False, None, False)
+ s = flow.ServerPlaybackState(
+ None, [
+ r, r2], False, False, None, False, None, False)
assert s.count() == 2
assert len(s.fmap.keys()) == 1
@@ -173,34 +192,41 @@ class TestServerPlaybackState:
r2 = tutils.tflow(resp=True)
r2.request.headers["key"] = ["two"]
- s = flow.ServerPlaybackState(None, [r, r2], False, True, None, False, None, False)
+ s = flow.ServerPlaybackState(
+ None, [
+ r, r2], False, True, None, False, None, False)
assert s.count() == 2
s.next_flow(r)
assert s.count() == 2
-
def test_ignore_params(self):
- s = flow.ServerPlaybackState(None, [], False, False, ["param1", "param2"], False, None, False)
+ s = flow.ServerPlaybackState(
+ None, [], False, False, [
+ "param1", "param2"], False, None, False)
r = tutils.tflow(resp=True)
- r.request.path="/test?param1=1"
+ r.request.path = "/test?param1=1"
r2 = tutils.tflow(resp=True)
- r2.request.path="/test"
+ r2.request.path = "/test"
assert s._hash(r) == s._hash(r2)
- r2.request.path="/test?param1=2"
+ r2.request.path = "/test?param1=2"
assert s._hash(r) == s._hash(r2)
- r2.request.path="/test?param2=1"
+ r2.request.path = "/test?param2=1"
assert s._hash(r) == s._hash(r2)
- r2.request.path="/test?param3=2"
+ r2.request.path = "/test?param3=2"
assert not s._hash(r) == s._hash(r2)
def test_ignore_payload_params(self):
- s = flow.ServerPlaybackState(None, [], False, False, None, False, ["param1", "param2"], False)
+ s = flow.ServerPlaybackState(
+ None, [], False, False, None, False, [
+ "param1", "param2"], False)
r = tutils.tflow(resp=True)
- r.request.headers["Content-Type"] = ["application/x-www-form-urlencoded"]
+ r.request.headers[
+ "Content-Type"] = ["application/x-www-form-urlencoded"]
r.request.content = "paramx=x¶m1=1"
r2 = tutils.tflow(resp=True)
- r2.request.headers["Content-Type"] = ["application/x-www-form-urlencoded"]
+ r2.request.headers[
+ "Content-Type"] = ["application/x-www-form-urlencoded"]
r2.request.content = "paramx=x¶m1=1"
# same parameters
assert s._hash(r) == s._hash(r2)
@@ -208,20 +234,22 @@ class TestServerPlaybackState:
r2.request.content = "paramx=x¶m1=2"
assert s._hash(r) == s._hash(r2)
# missing parameter
- r2.request.content="paramx=x"
+ r2.request.content = "paramx=x"
assert s._hash(r) == s._hash(r2)
# ignorable parameter added
- r2.request.content="paramx=x¶m1=2"
+ r2.request.content = "paramx=x¶m1=2"
assert s._hash(r) == s._hash(r2)
# not ignorable parameter changed
- r2.request.content="paramx=y¶m1=1"
+ r2.request.content = "paramx=y¶m1=1"
assert not s._hash(r) == s._hash(r2)
# not ignorable parameter missing
- r2.request.content="param1=1"
+ r2.request.content = "param1=1"
assert not s._hash(r) == s._hash(r2)
def test_ignore_payload_params_other_content_type(self):
- s = flow.ServerPlaybackState(None, [], False, False, None, False, ["param1", "param2"], False)
+ s = flow.ServerPlaybackState(
+ None, [], False, False, None, False, [
+ "param1", "param2"], False)
r = tutils.tflow(resp=True)
r.request.headers["Content-Type"] = ["application/json"]
r.request.content = '{"param1":"1"}'
@@ -235,19 +263,31 @@ class TestServerPlaybackState:
assert not s._hash(r) == s._hash(r2)
def test_ignore_payload_wins_over_params(self):
- #NOTE: parameters are mutually exclusive in options
- s = flow.ServerPlaybackState(None, [], False, False, None, True, ["param1", "param2"], False)
+ # NOTE: parameters are mutually exclusive in options
+ s = flow.ServerPlaybackState(
+ None, [], False, False, None, True, [
+ "param1", "param2"], False)
r = tutils.tflow(resp=True)
- r.request.headers["Content-Type"] = ["application/x-www-form-urlencoded"]
+ r.request.headers[
+ "Content-Type"] = ["application/x-www-form-urlencoded"]
r.request.content = "paramx=y"
r2 = tutils.tflow(resp=True)
- r2.request.headers["Content-Type"] = ["application/x-www-form-urlencoded"]
+ r2.request.headers[
+ "Content-Type"] = ["application/x-www-form-urlencoded"]
r2.request.content = "paramx=x"
# same parameters
assert s._hash(r) == s._hash(r2)
def test_ignore_content(self):
- s = flow.ServerPlaybackState(None, [], False, False, None, False, None, False)
+ s = flow.ServerPlaybackState(
+ None,
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ False)
r = tutils.tflow(resp=True)
r2 = tutils.tflow(resp=True)
@@ -257,8 +297,16 @@ class TestServerPlaybackState:
r2.request.content = "bar"
assert not s._hash(r) == s._hash(r2)
- #now ignoring content
- s = flow.ServerPlaybackState(None, [], False, False, None, True, None, False)
+ # now ignoring content
+ s = flow.ServerPlaybackState(
+ None,
+ [],
+ False,
+ False,
+ None,
+ True,
+ None,
+ False)
r = tutils.tflow(resp=True)
r2 = tutils.tflow(resp=True)
r.request.content = "foo"
@@ -272,14 +320,22 @@ class TestServerPlaybackState:
assert s._hash(r) == s._hash(r2)
def test_ignore_host(self):
- s = flow.ServerPlaybackState(None, [], False, False, None, False, None, True)
+ s = flow.ServerPlaybackState(
+ None,
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ True)
r = tutils.tflow(resp=True)
r2 = tutils.tflow(resp=True)
- r.request.host="address"
- r2.request.host="address"
+ r.request.host = "address"
+ r2.request.host = "address"
assert s._hash(r) == s._hash(r2)
- r2.request.host="wrong_address"
+ r2.request.host = "wrong_address"
assert s._hash(r) == s._hash(r2)
@@ -343,12 +399,14 @@ class TestFlow:
def test_getset_state(self):
f = tutils.tflow(resp=True)
state = f.get_state()
- assert f.get_state() == protocol.http.HTTPFlow.from_state(state).get_state()
+ assert f.get_state() == protocol.http.HTTPFlow.from_state(
+ state).get_state()
f.response = None
f.error = Error("error")
state = f.get_state()
- assert f.get_state() == protocol.http.HTTPFlow.from_state(state).get_state()
+ assert f.get_state() == protocol.http.HTTPFlow.from_state(
+ state).get_state()
f2 = f.copy()
f2.id = f.id # copy creates a different uuid
@@ -430,7 +488,6 @@ class TestFlow:
assert f.response.content == "abarb"
-
class TestState:
def test_backup(self):
c = flow.State()
@@ -519,7 +576,7 @@ class TestState:
assert c.intercept_txt == "~q"
assert "Invalid" in c.set_intercept("~")
assert not c.set_intercept(None)
- assert c.intercept_txt == None
+ assert c.intercept_txt is None
def _add_request(self, state):
f = tutils.tflow()
@@ -608,7 +665,13 @@ class TestSerialize:
def test_load_flows_reverse(self):
r = self._treader()
s = flow.State()
- conf = ProxyConfig(mode="reverse", upstream_server=[True,True,"use-this-domain",80])
+ conf = ProxyConfig(
+ mode="reverse",
+ upstream_server=[
+ True,
+ True,
+ "use-this-domain",
+ 80])
fm = flow.FlowMaster(DummyServer(conf), s)
fm.load_flows(r)
assert s.flows[0].request.host == "use-this-domain"
@@ -630,7 +693,6 @@ class TestSerialize:
r = flow.FlowReader(sio)
assert len(list(r.stream()))
-
def test_error(self):
sio = StringIO()
sio.write("bogus")
@@ -661,7 +723,8 @@ class TestFlowMaster:
assert not fm.load_script(tutils.test_data.path("scripts/a.py"))
assert not fm.unload_scripts()
assert fm.load_script("nonexistent")
- assert "ValueError" in fm.load_script(tutils.test_data.path("scripts/starterr.py"))
+ assert "ValueError" in fm.load_script(
+ tutils.test_data.path("scripts/starterr.py"))
assert len(fm.scripts) == 0
def test_getset_ignore(self):
@@ -707,14 +770,14 @@ class TestFlowMaster:
assert fm.scripts[0].ns["log"][-1] == "request"
fm.handle_response(f)
assert fm.scripts[0].ns["log"][-1] == "response"
- #load second script
+ # load second script
assert not fm.load_script(tutils.test_data.path("scripts/all.py"))
assert len(fm.scripts) == 2
fm.handle_clientdisconnect(f.server_conn)
assert fm.scripts[0].ns["log"][-1] == "clientdisconnect"
assert fm.scripts[1].ns["log"][-1] == "clientdisconnect"
- #unload first script
+ # unload first script
fm.unload_scripts()
assert len(fm.scripts) == 0
assert not fm.load_script(tutils.test_data.path("scripts/all.py"))
@@ -765,7 +828,16 @@ class TestFlowMaster:
f = tutils.tflow(resp=True)
pb = [tutils.tflow(resp=True), f]
fm = flow.FlowMaster(DummyServer(ProxyConfig()), s)
- assert not fm.start_server_playback(pb, False, [], False, False, None, False, None, False)
+ assert not fm.start_server_playback(
+ pb,
+ False,
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ False)
assert not fm.start_client_playback(pb, False)
fm.client_playback.testing = True
@@ -788,16 +860,43 @@ class TestFlowMaster:
fm.refresh_server_playback = True
assert not fm.do_server_playback(tutils.tflow())
- fm.start_server_playback(pb, False, [], False, False, None, False, None, False)
+ fm.start_server_playback(
+ pb,
+ False,
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ False)
assert fm.do_server_playback(tutils.tflow())
- fm.start_server_playback(pb, False, [], True, False, None, False, None, False)
+ fm.start_server_playback(
+ pb,
+ False,
+ [],
+ True,
+ False,
+ None,
+ False,
+ None,
+ False)
r = tutils.tflow()
r.request.content = "gibble"
assert not fm.do_server_playback(r)
assert fm.do_server_playback(tutils.tflow())
- fm.start_server_playback(pb, False, [], True, False, None, False, None, False)
+ fm.start_server_playback(
+ pb,
+ False,
+ [],
+ True,
+ False,
+ None,
+ False,
+ None,
+ False)
q = Queue.Queue()
fm.tick(q, 0)
assert fm.should_exit.is_set()
@@ -812,7 +911,16 @@ class TestFlowMaster:
pb = [f]
fm = flow.FlowMaster(None, s)
fm.refresh_server_playback = True
- fm.start_server_playback(pb, True, [], False, False, None, False, None, False)
+ fm.start_server_playback(
+ pb,
+ True,
+ [],
+ False,
+ False,
+ None,
+ False,
+ None,
+ False)
f = tutils.tflow()
f.request.host = "nonexistent"
@@ -862,8 +970,9 @@ class TestFlowMaster:
def test_stream(self):
with tutils.tmpdir() as tdir:
p = os.path.join(tdir, "foo")
+
def r():
- r = flow.FlowReader(open(p,"rb"))
+ r = flow.FlowReader(open(p, "rb"))
return list(r.stream())
s = flow.State()
@@ -884,6 +993,7 @@ class TestFlowMaster:
assert not r()[1].response
+
class TestRequest:
def test_simple(self):
f = tutils.tflow()
@@ -919,7 +1029,7 @@ class TestRequest:
r.host = "address"
r.port = 22
- assert r.url== "https://address:22/path"
+ assert r.url == "https://address:22/path"
assert r.pretty_url(True) == "https://address:22/path"
r.headers["Host"] = ["foo.com"]
@@ -1062,6 +1172,7 @@ class TestRequest:
resp.headers = h
assert resp.headers.get_first("content-type") == "text/plain"
+
class TestResponse:
def test_simple(self):
f = tutils.tflow(resp=True)
@@ -1077,7 +1188,9 @@ class TestResponse:
assert resp.size() == len(resp.assemble())
resp.content = CONTENT_MISSING
- tutils.raises("Cannot assemble flow with CONTENT_MISSING", resp.assemble)
+ tutils.raises(
+ "Cannot assemble flow with CONTENT_MISSING",
+ resp.assemble)
def test_refresh(self):
r = tutils.tresp()
@@ -1086,14 +1199,15 @@ class TestResponse:
pre = r.headers["date"]
r.refresh(n)
assert pre == r.headers["date"]
- r.refresh(n+60)
+ r.refresh(n + 60)
d = email.utils.parsedate_tz(r.headers["date"][0])
d = email.utils.mktime_tz(d)
# Weird that this is not exact...
- assert abs(60-(d-n)) <= 1
+ assert abs(60 - (d - n)) <= 1
- r.headers["set-cookie"] = ["MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure"]
+ r.headers[
+ "set-cookie"] = ["MOO=BAR; Expires=Tue, 08-Mar-2011 00:20:38 GMT; Path=foo.com; Secure"]
r.refresh()
def test_refresh_cookie(self):
@@ -1146,7 +1260,7 @@ class TestResponse:
def test_header_size(self):
r = tutils.tresp()
result = len(r._assemble_headers())
- assert result==44
+ assert result == 44
def test_get_content_type(self):
h = odict.ODictCaseless()
@@ -1178,7 +1292,7 @@ class TestClientConnection:
c = tutils.tclient_conn()
assert ClientConnection.from_state(c.get_state()).get_state() ==\
- c.get_state()
+ c.get_state()
c2 = tutils.tclient_conn()
c2.address.address = (c2.address.host, 4242)
@@ -1295,7 +1409,6 @@ def test_setheaders():
h.run(f)
assert f.request.content == "foo"
-
h.clear()
h.add("~s", "one", "two")
h.add("~s", "one", "three")
diff --git a/test/test_fuzzing.py b/test/test_fuzzing.py
index 3e5fc1001..5e5115c9d 100644
--- a/test/test_fuzzing.py
+++ b/test/test_fuzzing.py
@@ -5,16 +5,17 @@ import tservers
after being fixed to check for regressions.
"""
+
class TestFuzzy(tservers.HTTPProxTest):
def test_idna_err(self):
req = r'get:"http://localhost:%s":i10,"\xc6"'
p = self.pathoc()
- assert p.request(req%self.server.port).status_code == 400
+ assert p.request(req % self.server.port).status_code == 400
def test_nullbytes(self):
req = r'get:"http://localhost:%s":i19,"\x00"'
p = self.pathoc()
- assert p.request(req%self.server.port).status_code == 400
+ assert p.request(req % self.server.port).status_code == 400
def test_invalid_ports(self):
req = 'get:"http://localhost:999999"'
@@ -24,12 +25,12 @@ class TestFuzzy(tservers.HTTPProxTest):
def test_invalid_ipv6_url(self):
req = 'get:"http://localhost:%s":i13,"["'
p = self.pathoc()
- assert p.request(req%self.server.port).status_code == 400
+ assert p.request(req % self.server.port).status_code == 400
def test_invalid_upstream(self):
req = r"get:'http://localhost:%s/p/200:i10,\x27+\x27'"
p = self.pathoc()
- assert p.request(req%self.server.port).status_code == 502
+ assert p.request(req % self.server.port).status_code == 502
def test_upstream_disconnect(self):
req = r'200:d0'
diff --git a/test/test_platform_pf.py b/test/test_platform_pf.py
index 2c4870f9e..3250b035b 100644
--- a/test/test_platform_pf.py
+++ b/test/test_platform_pf.py
@@ -1,4 +1,5 @@
-import tutils, sys
+import tutils
+import sys
from libmproxy.platform import pf
@@ -6,10 +7,20 @@ class TestLookup:
def test_simple(self):
if sys.platform == "freebsd10":
p = tutils.test_data.path("data/pf02")
- d = open(p,"rb").read()
+ d = open(p, "rb").read()
else:
p = tutils.test_data.path("data/pf01")
- d = open(p,"rb").read()
+ d = open(p, "rb").read()
assert pf.lookup("192.168.1.111", 40000, d) == ("5.5.5.5", 80)
- tutils.raises("Could not resolve original destination", pf.lookup, "192.168.1.112", 40000, d)
- tutils.raises("Could not resolve original destination", pf.lookup, "192.168.1.111", 40001, d)
+ tutils.raises(
+ "Could not resolve original destination",
+ pf.lookup,
+ "192.168.1.112",
+ 40000,
+ d)
+ tutils.raises(
+ "Could not resolve original destination",
+ pf.lookup,
+ "192.168.1.111",
+ 40001,
+ d)
diff --git a/test/test_protocol_http.py b/test/test_protocol_http.py
index c39f9abbc..884a528e4 100644
--- a/test/test_protocol_http.py
+++ b/test/test_protocol_http.py
@@ -61,7 +61,8 @@ class TestHTTPRequest:
assert "Host" in r.headers
def test_expect_header(self):
- s = StringIO("GET / HTTP/1.1\r\nContent-Length: 3\r\nExpect: 100-continue\r\n\r\nfoobar")
+ s = StringIO(
+ "GET / HTTP/1.1\r\nContent-Length: 3\r\nExpect: 100-continue\r\n\r\nfoobar")
w = StringIO()
r = HTTPRequest.from_stream(s, wfile=w)
assert w.getvalue() == "HTTP/1.1 100 Continue\r\n\r\n"
@@ -84,7 +85,8 @@ class TestHTTPRequest:
tutils.raises("Bad HTTP request line", HTTPRequest.from_stream, s)
s = StringIO("GET http://address:22/ HTTP/1.1")
r = HTTPRequest.from_stream(s)
- assert r.assemble() == "GET http://address:22/ HTTP/1.1\r\nHost: address:22\r\nContent-Length: 0\r\n\r\n"
+ assert r.assemble(
+ ) == "GET http://address:22/ HTTP/1.1\r\nHost: address:22\r\nContent-Length: 0\r\n\r\n"
def test_http_options_relative_form_in(self):
"""
@@ -105,10 +107,10 @@ class TestHTTPRequest:
r.host = 'address'
r.port = 80
r.scheme = "http"
- assert r.assemble() == ("OPTIONS http://address:80/secret/resource HTTP/1.1\r\n"
- "Host: address\r\n"
- "Content-Length: 0\r\n\r\n")
-
+ assert r.assemble() == (
+ "OPTIONS http://address:80/secret/resource HTTP/1.1\r\n"
+ "Host: address\r\n"
+ "Content-Length: 0\r\n\r\n")
def test_assemble_unknown_form(self):
r = tutils.treq()
@@ -257,7 +259,8 @@ class TestHTTPResponse:
def test_get_cookies_with_parameters(self):
h = odict.ODictCaseless()
- h["Set-Cookie"] = ["cookiename=cookievalue;domain=example.com;expires=Wed Oct 21 16:29:41 2015;path=/; HttpOnly"]
+ h["Set-Cookie"] = [
+ "cookiename=cookievalue;domain=example.com;expires=Wed Oct 21 16:29:41 2015;path=/; HttpOnly"]
resp = tutils.tresp()
resp.headers = h
result = resp.get_cookies()
diff --git a/test/test_proxy.py b/test/test_proxy.py
index 641b4f471..a618ae6cc 100644
--- a/test/test_proxy.py
+++ b/test/test_proxy.py
@@ -78,7 +78,6 @@ class TestProcessProxyOptions:
def test_no_transparent(self):
self.assert_err("transparent mode not supported", "-T")
-
@mock.patch("libmproxy.platform.resolver")
def test_modes(self, _):
self.assert_noerr("-R", "http://localhost")
@@ -96,28 +95,42 @@ class TestProcessProxyOptions:
def test_client_certs(self):
with tutils.tmpdir() as cadir:
self.assert_noerr("--client-certs", cadir)
- self.assert_err("directory does not exist", "--client-certs", "nonexistent")
+ self.assert_err(
+ "directory does not exist",
+ "--client-certs",
+ "nonexistent")
def test_certs(self):
with tutils.tmpdir() as cadir:
- self.assert_noerr("--cert", tutils.test_data.path("data/testkey.pem"))
+ self.assert_noerr(
+ "--cert",
+ tutils.test_data.path("data/testkey.pem"))
self.assert_err("does not exist", "--cert", "nonexistent")
def test_auth(self):
p = self.assert_noerr("--nonanonymous")
assert p.authenticator
- p = self.assert_noerr("--htpasswd", tutils.test_data.path("data/htpasswd"))
+ p = self.assert_noerr(
+ "--htpasswd",
+ tutils.test_data.path("data/htpasswd"))
assert p.authenticator
- self.assert_err("malformed htpasswd file", "--htpasswd", tutils.test_data.path("data/htpasswd.invalid"))
+ self.assert_err(
+ "malformed htpasswd file",
+ "--htpasswd",
+ tutils.test_data.path("data/htpasswd.invalid"))
p = self.assert_noerr("--singleuser", "test:test")
assert p.authenticator
- self.assert_err("invalid single-user specification", "--singleuser", "test")
+ self.assert_err(
+ "invalid single-user specification",
+ "--singleuser",
+ "test")
class TestProxyServer:
- @tutils.SkipWindows # binding to 0.0.0.0:1 works without special permissions on Windows
+ # binding to 0.0.0.0:1 works without special permissions on Windows
+ @tutils.SkipWindows
def test_err(self):
conf = ProxyConfig(
port=1
@@ -142,6 +155,12 @@ class TestConnectionHandler:
def test_fatal_error(self):
config = mock.Mock()
config.mode.get_upstream_server.side_effect = RuntimeError
- c = ConnectionHandler(config, mock.MagicMock(), ("127.0.0.1", 8080), None, mock.MagicMock())
+ c = ConnectionHandler(
+ config,
+ mock.MagicMock(),
+ ("127.0.0.1",
+ 8080),
+ None,
+ mock.MagicMock())
with tutils.capture_stderr(c.handle) as output:
assert "mitmproxy has crashed" in output
diff --git a/test/test_script.py b/test/test_script.py
index aed7def1c..0a0637404 100644
--- a/test/test_script.py
+++ b/test/test_script.py
@@ -11,7 +11,7 @@ class TestScript:
s = flow.State()
fm = flow.FlowMaster(None, s)
sp = tutils.test_data.path("scripts/a.py")
- p = script.Script("%s --var 40"%sp, fm)
+ p = script.Script("%s --var 40" % sp, fm)
assert "here" in p.ns
assert p.run("here") == (True, 41)
@@ -79,7 +79,9 @@ class TestScript:
def test_concurrent2(self):
s = flow.State()
fm = flow.FlowMaster(None, s)
- s = script.Script(tutils.test_data.path("scripts/concurrent_decorator.py"), fm)
+ s = script.Script(
+ tutils.test_data.path("scripts/concurrent_decorator.py"),
+ fm)
s.load()
m = mock.Mock()
@@ -110,8 +112,9 @@ class TestScript:
fm = flow.FlowMaster(None, s)
tutils.raises(
"decorator not supported for this method",
- script.Script, tutils.test_data.path("scripts/concurrent_decorator_err.py"), fm
- )
+ script.Script,
+ tutils.test_data.path("scripts/concurrent_decorator_err.py"),
+ fm)
def test_command_parsing():
@@ -120,5 +123,3 @@ def test_command_parsing():
absfilepath = os.path.normcase(tutils.test_data.path("scripts/a.py"))
s = script.Script(absfilepath, fm)
assert os.path.isfile(s.argv[0])
-
-
diff --git a/test/test_server.py b/test/test_server.py
index 7f93c7294..2ab48422b 100644
--- a/test/test_server.py
+++ b/test/test_server.py
@@ -1,10 +1,12 @@
-import socket, time
+import socket
+import time
from libmproxy.proxy.config import HostMatcher
import libpathod
from netlib import tcp, http_auth, http
from libpathod import pathoc, pathod
from netlib.certutils import SSLCert
-import tutils, tservers
+import tutils
+import tservers
from libmproxy.protocol import KILL, Error
from libmproxy.protocol.http import CONTENT_MISSING
@@ -16,9 +18,10 @@ from libmproxy.protocol.http import CONTENT_MISSING
for a 200 response.
"""
+
class CommonMixin:
def test_large(self):
- assert len(self.pathod("200:b@50k").content) == 1024*50
+ assert len(self.pathod("200:b@50k").content) == 1024 * 50
@staticmethod
def wait_until_not_live(flow):
@@ -56,7 +59,8 @@ class CommonMixin:
# Port error
l.request.port = 1
# In upstream mode, we get a 502 response from the upstream proxy server.
- # In upstream mode with ssl, the replay will fail as we cannot establish SSL with the upstream proxy.
+ # In upstream mode with ssl, the replay will fail as we cannot establish
+ # SSL with the upstream proxy.
rt = self.master.replay_request(l, block=True)
assert not rt
if isinstance(self, tservers.HTTPUpstreamProxTest) and not self.ssl:
@@ -68,7 +72,9 @@ class CommonMixin:
f = self.pathod("304")
assert f.status_code == 304
- l = self.master.state.view[-1] # In Upstream mode with SSL, we may already have a previous CONNECT request.
+ # In Upstream mode with SSL, we may already have a previous CONNECT
+ # request.
+ l = self.master.state.view[-1]
assert l.client_conn.address
assert "host" in l.request.headers
assert l.response.code == 304
@@ -90,11 +96,13 @@ class CommonMixin:
log = self.server.last_log()
assert log["request"]["sni"] == "testserver.com"
+
class TcpMixin:
def _ignore_on(self):
assert not hasattr(self, "_ignore_backup")
self._ignore_backup = self.config.check_ignore
- self.config.check_ignore = HostMatcher([".+:%s" % self.server.port] + self.config.check_ignore.patterns)
+ self.config.check_ignore = HostMatcher(
+ [".+:%s" % self.server.port] + self.config.check_ignore.patterns)
def _ignore_off(self):
assert hasattr(self, "_ignore_backup")
@@ -125,22 +133,26 @@ class TcpMixin:
# Test Non-HTTP traffic
spec = "200:i0,@100:d0" # this results in just 100 random bytes
- assert self.pathod(spec).status_code == 502 # mitmproxy responds with bad gateway
+ # mitmproxy responds with bad gateway
+ assert self.pathod(spec).status_code == 502
self._ignore_on()
- tutils.raises("invalid server response", self.pathod, spec) # pathoc tries to parse answer as HTTP
+ tutils.raises(
+ "invalid server response",
+ self.pathod,
+ spec) # pathoc tries to parse answer as HTTP
self._ignore_off()
def _tcpproxy_on(self):
assert not hasattr(self, "_tcpproxy_backup")
self._tcpproxy_backup = self.config.check_tcp
- self.config.check_tcp = HostMatcher([".+:%s" % self.server.port] + self.config.check_tcp.patterns)
+ self.config.check_tcp = HostMatcher(
+ [".+:%s" % self.server.port] + self.config.check_tcp.patterns)
def _tcpproxy_off(self):
assert hasattr(self, "_tcpproxy_backup")
self.config.check_ignore = self._tcpproxy_backup
del self._tcpproxy_backup
-
def test_tcp(self):
spec = '304:h"Alternate-Protocol"="mitmproxy-will-remove-this"'
n = self.pathod(spec)
@@ -165,6 +177,7 @@ class TcpMixin:
# Make sure that TCP messages are in the event log.
assert any("mitmproxy-will-remove-this" in m for m in self.master.log)
+
class AppMixin:
def test_app(self):
ret = self.app("/")
@@ -188,30 +201,30 @@ class TestHTTP(tservers.HTTPProxTest, CommonMixin, AppMixin):
def test_upstream_ssl_error(self):
p = self.pathoc()
- ret = p.request("get:'https://localhost:%s/'"%self.server.port)
+ ret = p.request("get:'https://localhost:%s/'" % self.server.port)
assert ret.status_code == 400
def test_connection_close(self):
# Add a body, so we have a content-length header, which combined with
# HTTP1.1 means the connection is kept alive.
- response = '%s/p/200:b@1'%self.server.urlbase
+ response = '%s/p/200:b@1' % self.server.urlbase
# Lets sanity check that the connection does indeed stay open by
# issuing two requests over the same connection
p = self.pathoc()
- assert p.request("get:'%s'"%response)
- assert p.request("get:'%s'"%response)
+ assert p.request("get:'%s'" % response)
+ assert p.request("get:'%s'" % response)
# Now check that the connection is closed as the client specifies
p = self.pathoc()
- assert p.request("get:'%s':h'Connection'='close'"%response)
+ assert p.request("get:'%s':h'Connection'='close'" % response)
# There's a race here, which means we can get any of a number of errors.
# Rather than introduce yet another sleep into the test suite, we just
# relax the Exception specification.
- tutils.raises(Exception, p.request, "get:'%s'"%response)
+ tutils.raises(Exception, p.request, "get:'%s'" % response)
def test_reconnect(self):
- req = "get:'%s/p/200:b@1:da'"%self.server.urlbase
+ req = "get:'%s/p/200:b@1:da'" % self.server.urlbase
p = self.pathoc()
assert p.request(req)
# Server has disconnected. Mitmproxy should detect this, and reconnect.
@@ -225,8 +238,8 @@ class TestHTTP(tservers.HTTPProxTest, CommonMixin, AppMixin):
return True
req = "get:'%s/p/200:b@1'"
p = self.pathoc()
- assert p.request(req%self.server.urlbase)
- assert p.request(req%self.server2.urlbase)
+ assert p.request(req % self.server.urlbase)
+ assert p.request(req % self.server2.urlbase)
assert switched(self.proxy.log)
def test_get_connection_err(self):
@@ -237,7 +250,7 @@ class TestHTTP(tservers.HTTPProxTest, CommonMixin, AppMixin):
def test_blank_leading_line(self):
p = self.pathoc()
req = "get:'%s/p/201':i0,'\r\n'"
- assert p.request(req%self.server.urlbase).status_code == 201
+ assert p.request(req % self.server.urlbase).status_code == 201
def test_invalid_headers(self):
p = self.pathoc()
@@ -251,7 +264,9 @@ class TestHTTP(tservers.HTTPProxTest, CommonMixin, AppMixin):
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.connect(("127.0.0.1", self.proxy.port))
spec = '301:h"Transfer-Encoding"="chunked":r:b"0\\r\\n\\r\\n"'
- connection.send("GET http://localhost:%d/p/%s HTTP/1.1\r\n"%(self.server.port, spec))
+ connection.send(
+ "GET http://localhost:%d/p/%s HTTP/1.1\r\n" %
+ (self.server.port, spec))
connection.send("\r\n")
resp = connection.recv(50000)
connection.close()
@@ -270,13 +285,20 @@ class TestHTTP(tservers.HTTPProxTest, CommonMixin, AppMixin):
self.master.set_stream_large_bodies(None)
def test_stream_modify(self):
- self.master.load_script(tutils.test_data.path("scripts/stream_modify.py"))
+ self.master.load_script(
+ tutils.test_data.path("scripts/stream_modify.py"))
d = self.pathod('200:b"foo"')
assert d.content == "bar"
self.master.unload_scripts()
+
class TestHTTPAuth(tservers.HTTPProxTest):
- authenticator = http_auth.BasicProxyAuth(http_auth.PassManSingleUser("test", "test"), "realm")
+ authenticator = http_auth.BasicProxyAuth(
+ http_auth.PassManSingleUser(
+ "test",
+ "test"),
+ "realm")
+
def test_auth(self):
assert self.pathod("202").status_code == 407
p = self.pathoc()
@@ -284,7 +306,7 @@ class TestHTTPAuth(tservers.HTTPProxTest):
get
'http://localhost:%s/p/202'
h'%s'='%s'
- """%(
+ """ % (
self.server.port,
http_auth.BasicProxyAuth.AUTH_HEADER,
http.assemble_http_basic_auth("basic", "test", "test")
@@ -294,6 +316,7 @@ class TestHTTPAuth(tservers.HTTPProxTest):
class TestHTTPConnectSSLError(tservers.HTTPProxTest):
certfile = True
+
def test_go(self):
self.config.ssl_ports.append(self.proxy.port)
p = self.pathoc_raw()
@@ -306,6 +329,7 @@ class TestHTTPS(tservers.HTTPProxTest, CommonMixin, TcpMixin):
ssl = True
ssloptions = pathod.SSLOptions(request_client_cert=True)
clientcerts = True
+
def test_clientcert(self):
f = self.pathod("304")
assert f.status_code == 304
@@ -319,6 +343,7 @@ class TestHTTPS(tservers.HTTPProxTest, CommonMixin, TcpMixin):
class TestHTTPSCertfile(tservers.HTTPProxTest, CommonMixin):
ssl = True
certfile = True
+
def test_certfile(self):
assert self.pathod("304")
@@ -328,11 +353,12 @@ class TestHTTPSNoCommonName(tservers.HTTPProxTest):
Test what happens if we get a cert without common name back.
"""
ssl = True
- ssloptions=pathod.SSLOptions(
- certs = [
- ("*", tutils.test_data.path("data/no_common_name.pem"))
- ]
- )
+ ssloptions = pathod.SSLOptions(
+ certs = [
+ ("*", tutils.test_data.path("data/no_common_name.pem"))
+ ]
+ )
+
def test_http(self):
f = self.pathod("202")
assert f.sslinfo.certchain[0].get_subject().CN == "127.0.0.1"
@@ -373,7 +399,6 @@ class TestHttps2Http(tservers.ReverseProxTest):
assert p.request("get:'/p/200'").status_code == 400
-
class TestTransparent(tservers.TransparentProxTest, CommonMixin, TcpMixin):
ssl = False
@@ -413,15 +438,19 @@ class TestProxy(tservers.HTTPProxTest):
connection.connect(("127.0.0.1", self.proxy.port))
# call pathod server, wait a second to complete the request
- connection.send("GET http://localhost:%d/p/304:b@1k HTTP/1.1\r\n"%self.server.port)
+ connection.send(
+ "GET http://localhost:%d/p/304:b@1k HTTP/1.1\r\n" %
+ self.server.port)
time.sleep(1)
connection.send("\r\n")
connection.recv(50000)
connection.close()
- request, response = self.master.state.view[0].request, self.master.state.view[0].response
+ request, response = self.master.state.view[
+ 0].request, self.master.state.view[0].response
assert response.code == 304 # sanity test for our low level request
- assert 0.95 < (request.timestamp_end - request.timestamp_start) < 1.2 #time.sleep might be a little bit shorter than a second
+ # time.sleep might be a little bit shorter than a second
+ assert 0.95 < (request.timestamp_end - request.timestamp_start) < 1.2
def test_request_timestamps_not_affected_by_client_time(self):
# test that don't include user wait time in request's timestamps
@@ -441,10 +470,14 @@ class TestProxy(tservers.HTTPProxTest):
# tests that the client_conn a tcp connection has a tcp_setup_timestamp
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.connect(("localhost", self.proxy.port))
- connection.send("GET http://localhost:%d/p/304:b@1k HTTP/1.1\r\n"%self.server.port)
+ connection.send(
+ "GET http://localhost:%d/p/304:b@1k HTTP/1.1\r\n" %
+ self.server.port)
connection.send("\r\n")
connection.recv(5000)
- connection.send("GET http://localhost:%d/p/304:b@1k HTTP/1.1\r\n"%self.server.port)
+ connection.send(
+ "GET http://localhost:%d/p/304:b@1k HTTP/1.1\r\n" %
+ self.server.port)
connection.send("\r\n")
connection.recv(5000)
connection.close()
@@ -462,8 +495,10 @@ class TestProxy(tservers.HTTPProxTest):
f = self.master.state.view[0]
assert f.server_conn.address == ("127.0.0.1", self.server.port)
+
class TestProxySSL(tservers.HTTPProxTest):
- ssl=True
+ ssl = True
+
def test_request_ssl_setup_timestamp_presence(self):
# tests that the ssl timestamp is present when ssl is used
f = self.pathod("304:b@10k")
@@ -479,16 +514,24 @@ class MasterRedirectRequest(tservers.TestMaster):
request = f.request
if request.path == "/p/201":
addr = f.live.c.server_conn.address
- assert f.live.change_server(("127.0.0.1", self.redirect_port), ssl=False)
- assert not f.live.change_server(("127.0.0.1", self.redirect_port), ssl=False)
- tutils.raises("SSL handshake error", f.live.change_server, ("127.0.0.1", self.redirect_port), ssl=True)
+ assert f.live.change_server(
+ ("127.0.0.1", self.redirect_port), ssl=False)
+ assert not f.live.change_server(
+ ("127.0.0.1", self.redirect_port), ssl=False)
+ tutils.raises(
+ "SSL handshake error",
+ f.live.change_server,
+ ("127.0.0.1",
+ self.redirect_port),
+ ssl=True)
assert f.live.change_server(addr, ssl=False)
request.url = "http://127.0.0.1:%s/p/201" % self.redirect_port
tservers.TestMaster.handle_request(self, f)
def handle_response(self, f):
f.response.content = str(f.client_conn.address.port)
- f.response.headers["server-conn-id"] = [str(f.server_conn.source_address.port)]
+ f.response.headers[
+ "server-conn-id"] = [str(f.server_conn.source_address.port)]
tservers.TestMaster.handle_response(self, f)
@@ -502,37 +545,41 @@ class TestRedirectRequest(tservers.HTTPProxTest):
self.server.clear_log()
self.server2.clear_log()
- r1 = p.request("get:'%s/p/200'"%self.server.urlbase)
+ r1 = p.request("get:'%s/p/200'" % self.server.urlbase)
assert r1.status_code == 200
assert self.server.last_log()
assert not self.server2.last_log()
self.server.clear_log()
self.server2.clear_log()
- r2 = p.request("get:'%s/p/201'"%self.server.urlbase)
+ r2 = p.request("get:'%s/p/201'" % self.server.urlbase)
assert r2.status_code == 201
assert not self.server.last_log()
assert self.server2.last_log()
self.server.clear_log()
self.server2.clear_log()
- r3 = p.request("get:'%s/p/202'"%self.server.urlbase)
+ r3 = p.request("get:'%s/p/202'" % self.server.urlbase)
assert r3.status_code == 202
assert self.server.last_log()
assert not self.server2.last_log()
assert r1.content == r2.content == r3.content
- assert r1.headers.get_first("server-conn-id") == r3.headers.get_first("server-conn-id")
+ assert r1.headers.get_first(
+ "server-conn-id") == r3.headers.get_first("server-conn-id")
# Make sure that we actually use the same connection in this test case
+
class MasterStreamRequest(tservers.TestMaster):
"""
Enables the stream flag on the flow for all requests
"""
+
def handle_responseheaders(self, f):
f.response.stream = True
f.reply()
+
class TestStreamRequest(tservers.HTTPProxTest):
masterclass = MasterStreamRequest
@@ -541,7 +588,7 @@ class TestStreamRequest(tservers.HTTPProxTest):
# a request with 100k of data but without content-length
self.server.clear_log()
- r1 = p.request("get:'%s/p/200:r:b@100k:d102400'"%self.server.urlbase)
+ r1 = p.request("get:'%s/p/200:r:b@100k:d102400'" % self.server.urlbase)
assert r1.status_code == 200
assert len(r1.content) > 100000
assert self.server.last_log()
@@ -551,13 +598,13 @@ class TestStreamRequest(tservers.HTTPProxTest):
# simple request with streaming turned on
self.server.clear_log()
- r1 = p.request("get:'%s/p/200'"%self.server.urlbase)
+ r1 = p.request("get:'%s/p/200'" % self.server.urlbase)
assert r1.status_code == 200
assert self.server.last_log()
# now send back 100k of data, streamed but not chunked
self.server.clear_log()
- r1 = p.request("get:'%s/p/200:b@100k'"%self.server.urlbase)
+ r1 = p.request("get:'%s/p/200:b@100k'" % self.server.urlbase)
assert r1.status_code == 200
assert self.server.last_log()
@@ -567,15 +614,27 @@ class TestStreamRequest(tservers.HTTPProxTest):
connection.connect(("127.0.0.1", self.proxy.port))
fconn = connection.makefile()
spec = '200:h"Transfer-Encoding"="chunked":r:b"4\\r\\nthis\\r\\n7\\r\\nisatest\\r\\n0\\r\\n\\r\\n"'
- connection.send("GET %s/p/%s HTTP/1.1\r\n"%(self.server.urlbase, spec))
+ connection.send(
+ "GET %s/p/%s HTTP/1.1\r\n" %
+ (self.server.urlbase, spec))
connection.send("\r\n")
- httpversion, code, msg, headers, content = http.read_response(fconn, "GET", None, include_body=False)
+ httpversion, code, msg, headers, content = http.read_response(
+ fconn, "GET", None, include_body=False)
assert headers["Transfer-Encoding"][0] == 'chunked'
assert code == 200
- chunks = list(content for _, content, _ in http.read_http_body_chunked(fconn, headers, None, "GET", 200, False))
+ chunks = list(
+ content for _,
+ content,
+ _ in http.read_http_body_chunked(
+ fconn,
+ headers,
+ None,
+ "GET",
+ 200,
+ False))
assert chunks == ["this", "isatest", ""]
connection.close()
@@ -589,6 +648,7 @@ class MasterFakeResponse(tservers.TestMaster):
class TestFakeResponse(tservers.HTTPProxTest):
masterclass = MasterFakeResponse
+
def test_fake(self):
f = self.pathod("200")
assert "header_response" in f.headers.keys()
@@ -601,6 +661,7 @@ class MasterKillRequest(tservers.TestMaster):
class TestKillRequest(tservers.HTTPProxTest):
masterclass = MasterKillRequest
+
def test_kill(self):
tutils.raises("server disconnect", self.pathod, "200")
# Nothing should have hit the server
@@ -614,6 +675,7 @@ class MasterKillResponse(tservers.TestMaster):
class TestKillResponse(tservers.HTTPProxTest):
masterclass = MasterKillResponse
+
def test_kill(self):
tutils.raises("server disconnect", self.pathod, "200")
# The server should have seen a request
@@ -627,6 +689,7 @@ class EResolver(tservers.TResolver):
class TestTransparentResolveError(tservers.TransparentProxTest):
resolver = EResolver
+
def test_resolve_error(self):
assert self.pathod("304").status_code == 502
@@ -640,6 +703,7 @@ class MasterIncomplete(tservers.TestMaster):
class TestIncompleteResponse(tservers.HTTPProxTest):
masterclass = MasterIncomplete
+
def test_incomplete(self):
assert self.pathod("200").status_code == 502
@@ -656,10 +720,16 @@ class TestUpstreamProxy(tservers.HTTPUpstreamProxTest, CommonMixin, AppMixin):
ssl = False
def test_order(self):
- self.proxy.tmaster.replacehooks.add("~q", "foo", "bar") # replace in request
+ self.proxy.tmaster.replacehooks.add(
+ "~q",
+ "foo",
+ "bar") # replace in request
self.chain[0].tmaster.replacehooks.add("~q", "bar", "baz")
self.chain[1].tmaster.replacehooks.add("~q", "foo", "oh noes!")
- self.chain[0].tmaster.replacehooks.add("~s", "baz", "ORLY") # replace in response
+ self.chain[0].tmaster.replacehooks.add(
+ "~s",
+ "baz",
+ "ORLY") # replace in response
p = self.pathoc()
req = p.request("get:'%s/p/418:b\"foo\"'" % self.server.urlbase)
@@ -667,7 +737,10 @@ class TestUpstreamProxy(tservers.HTTPUpstreamProxTest, CommonMixin, AppMixin):
assert req.status_code == 418
-class TestUpstreamProxySSL(tservers.HTTPUpstreamProxTest, CommonMixin, TcpMixin):
+class TestUpstreamProxySSL(
+ tservers.HTTPUpstreamProxTest,
+ CommonMixin,
+ TcpMixin):
ssl = True
def _host_pattern_on(self, attr):
@@ -677,7 +750,10 @@ class TestUpstreamProxySSL(tservers.HTTPUpstreamProxTest, CommonMixin, TcpMixin)
assert not hasattr(self, "_ignore_%s_backup" % attr)
backup = []
for proxy in self.chain:
- old_matcher = getattr(proxy.tmaster.server.config, "check_%s" % attr)
+ old_matcher = getattr(
+ proxy.tmaster.server.config,
+ "check_%s" %
+ attr)
backup.append(old_matcher)
setattr(
proxy.tmaster.server.config,
@@ -721,11 +797,14 @@ class TestUpstreamProxySSL(tservers.HTTPUpstreamProxTest, CommonMixin, TcpMixin)
assert req.content == "content"
assert req.status_code == 418
- assert self.proxy.tmaster.state.flow_count() == 2 # CONNECT from pathoc to chain[0],
- # request from pathoc to chain[0]
- assert self.chain[0].tmaster.state.flow_count() == 2 # CONNECT from proxy to chain[1],
- # request from proxy to chain[1]
- assert self.chain[1].tmaster.state.flow_count() == 1 # request from chain[0] (regular proxy doesn't store CONNECTs)
+ # CONNECT from pathoc to chain[0],
+ assert self.proxy.tmaster.state.flow_count() == 2
+ # request from pathoc to chain[0]
+ # CONNECT from proxy to chain[1],
+ assert self.chain[0].tmaster.state.flow_count() == 2
+ # request from proxy to chain[1]
+ # request from chain[0] (regular proxy doesn't store CONNECTs)
+ assert self.chain[1].tmaster.state.flow_count() == 1
def test_closing_connect_response(self):
"""
@@ -755,6 +834,7 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
def kill_requests(master, attr, exclude):
k = [0] # variable scope workaround: put into array
_func = getattr(master, attr)
+
def handler(f):
k[0] += 1
if not (k[0] in exclude):
@@ -766,9 +846,9 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
kill_requests(self.chain[1].tmaster, "handle_request",
exclude=[
- # fail first request
+ # fail first request
2, # allow second request
- ])
+ ])
kill_requests(self.chain[0].tmaster, "handle_request",
exclude=[
@@ -776,16 +856,18 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
# fail first request
3, # reCONNECT
4, # request
- ])
+ ])
p = self.pathoc()
req = p.request("get:'/p/418:b\"content\"'")
assert self.proxy.tmaster.state.flow_count() == 2 # CONNECT and request
- assert self.chain[0].tmaster.state.flow_count() == 4 # CONNECT, failing request,
- # reCONNECT, request
- assert self.chain[1].tmaster.state.flow_count() == 2 # failing request, request
- # (doesn't store (repeated) CONNECTs from chain[0]
- # as it is a regular proxy)
+ # CONNECT, failing request,
+ assert self.chain[0].tmaster.state.flow_count() == 4
+ # reCONNECT, request
+ # failing request, request
+ assert self.chain[1].tmaster.state.flow_count() == 2
+ # (doesn't store (repeated) CONNECTs from chain[0]
+ # as it is a regular proxy)
assert req.content == "content"
assert req.status_code == 418
@@ -795,18 +877,26 @@ class TestProxyChainingSSLReconnect(tservers.HTTPUpstreamProxTest):
assert self.proxy.tmaster.state.flows[0].request.form_in == "authority"
assert self.proxy.tmaster.state.flows[1].request.form_in == "relative"
- assert self.chain[0].tmaster.state.flows[0].request.form_in == "authority"
- assert self.chain[0].tmaster.state.flows[1].request.form_in == "relative"
- assert self.chain[0].tmaster.state.flows[2].request.form_in == "authority"
- assert self.chain[0].tmaster.state.flows[3].request.form_in == "relative"
+ assert self.chain[0].tmaster.state.flows[
+ 0].request.form_in == "authority"
+ assert self.chain[0].tmaster.state.flows[
+ 1].request.form_in == "relative"
+ assert self.chain[0].tmaster.state.flows[
+ 2].request.form_in == "authority"
+ assert self.chain[0].tmaster.state.flows[
+ 3].request.form_in == "relative"
- assert self.chain[1].tmaster.state.flows[0].request.form_in == "relative"
- assert self.chain[1].tmaster.state.flows[1].request.form_in == "relative"
+ assert self.chain[1].tmaster.state.flows[
+ 0].request.form_in == "relative"
+ assert self.chain[1].tmaster.state.flows[
+ 1].request.form_in == "relative"
req = p.request("get:'/p/418:b\"content2\"'")
assert req.status_code == 502
assert self.proxy.tmaster.state.flow_count() == 3 # + new request
- assert self.chain[0].tmaster.state.flow_count() == 6 # + new request, repeated CONNECT from chain[1]
- # (both terminated)
- assert self.chain[1].tmaster.state.flow_count() == 2 # nothing happened here
+ # + new request, repeated CONNECT from chain[1]
+ assert self.chain[0].tmaster.state.flow_count() == 6
+ # (both terminated)
+ # nothing happened here
+ assert self.chain[1].tmaster.state.flow_count() == 2
diff --git a/test/test_utils.py b/test/test_utils.py
index 6b9262a04..0c514f5da 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -48,9 +48,11 @@ def test_urldecode():
s = "one=two&three=four"
assert len(utils.urldecode(s)) == 2
+
def test_multipartdecode():
boundary = 'somefancyboundary'
- headers = odict.ODict([('content-type', ('multipart/form-data; boundary=%s' % boundary))])
+ headers = odict.ODict(
+ [('content-type', ('multipart/form-data; boundary=%s' % boundary))])
content = "--{0}\n" \
"Content-Disposition: form-data; name=\"field1\"\n\n" \
"value1\n" \
@@ -65,6 +67,7 @@ def test_multipartdecode():
assert form[0] == ('field1', 'value1')
assert form[1] == ('field2', 'value2')
+
def test_pretty_duration():
assert utils.pretty_duration(0.00001) == "0ms"
assert utils.pretty_duration(0.0001) == "0ms"
@@ -79,10 +82,13 @@ def test_pretty_duration():
assert utils.pretty_duration(1.123) == "1.12s"
assert utils.pretty_duration(0.123) == "123ms"
+
def test_LRUCache():
cache = utils.LRUCache(2)
+
class Foo:
ran = False
+
def gen(self, x):
self.ran = True
return x
diff --git a/test/tools/bench.py b/test/tools/bench.py
index 1028f61d4..8127d083a 100644
--- a/test/tools/bench.py
+++ b/test/tools/bench.py
@@ -1,5 +1,6 @@
from __future__ import print_function
-import requests, time
+import requests
+import time
n = 100
url = "http://192.168.1.1/"
@@ -7,18 +8,17 @@ proxy = "http://192.168.1.115:8080/"
start = time.time()
for _ in range(n):
- requests.get(url, allow_redirects=False, proxies=dict(http=proxy))
- print(".", end="")
-t_mitmproxy = time.time()-start
+ requests.get(url, allow_redirects=False, proxies=dict(http=proxy))
+ print(".", end="")
+t_mitmproxy = time.time() - start
print("\r\nTotal time with mitmproxy: {}".format(t_mitmproxy))
-
start = time.time()
for _ in range(n):
- requests.get(url, allow_redirects=False)
- print(".", end="")
-t_without = time.time()-start
+ requests.get(url, allow_redirects=False)
+ print(".", end="")
+t_without = time.time() - start
-print("\r\nTotal time without mitmproxy: {}".format(t_without))
\ No newline at end of file
+print("\r\nTotal time without mitmproxy: {}".format(t_without))
diff --git a/test/tools/getcert b/test/tools/getcert
index 8fabefb75..3bd2bec88 100755
--- a/test/tools/getcert
+++ b/test/tools/getcert
@@ -1,7 +1,10 @@
#!/usr/bin/env python
import sys
sys.path.insert(0, "../..")
-import socket, tempfile, ssl, subprocess
+import socket
+import tempfile
+import ssl
+import subprocess
addr = socket.gethostbyname(sys.argv[1])
print ssl.get_server_certificate((addr, 443))
diff --git a/test/tools/passive_close.py b/test/tools/passive_close.py
index d0b36e7f7..7199ea70d 100644
--- a/test/tools/passive_close.py
+++ b/test/tools/passive_close.py
@@ -2,12 +2,14 @@ import SocketServer
from threading import Thread
from time import sleep
+
class service(SocketServer.BaseRequestHandler):
def handle(self):
data = 'dummy'
print "Client connected with ", self.client_address
while True:
- self.request.send("HTTP/1.1 200 OK\r\nConnection: close\r\nContent-Length: 7\r\n\r\ncontent")
+ self.request.send(
+ "HTTP/1.1 200 OK\r\nConnection: close\r\nContent-Length: 7\r\n\r\ncontent")
data = self.request.recv(1024)
if not len(data):
print "Connection closed by remote: ", self.client_address
@@ -17,5 +19,5 @@ class service(SocketServer.BaseRequestHandler):
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
pass
-server = ThreadedTCPServer(('',1520), service)
+server = ThreadedTCPServer(('', 1520), service)
server.serve_forever()
diff --git a/test/tools/yappibench.py b/test/tools/yappibench.py
index b9e4e41db..bae8da448 100644
--- a/test/tools/yappibench.py
+++ b/test/tools/yappibench.py
@@ -2,7 +2,7 @@
# yappi (https://code.google.com/p/yappi/)
#
# Requirements:
-# - Apache Bench "ab" binary
+# - Apache Bench "ab" binary
# - pip install click yappi
from libmproxy.main import mitmdump
@@ -13,14 +13,17 @@ import time
import yappi
import click
-class ApacheBenchThread(Thread):
- def __init__(self, concurrency):
- self.concurrency = concurrency
- super(ApacheBenchThread, self).__init__()
- def run(self):
- time.sleep(2)
- system("ab -n 1024 -c {} -X 127.0.0.1:8080 http://example.com/".format(self.concurrency))
+class ApacheBenchThread(Thread):
+ def __init__(self, concurrency):
+ self.concurrency = concurrency
+ super(ApacheBenchThread, self).__init__()
+
+ def run(self):
+ time.sleep(2)
+ system(
+ "ab -n 1024 -c {} -X 127.0.0.1:8080 http://example.com/".format(self.concurrency))
+
@click.command()
@click.option('--profiler', default="yappi", type=click.Choice(['yappi']))
@@ -28,24 +31,24 @@ class ApacheBenchThread(Thread):
@click.option('--concurrency', default=1, type=click.INT)
def main(profiler, clock_type, concurrency):
- outfile = "callgrind.mitmdump-{}-c{}".format(clock_type, concurrency)
- a = ApacheBenchThread(concurrency)
- a.start()
+ outfile = "callgrind.mitmdump-{}-c{}".format(clock_type, concurrency)
+ a = ApacheBenchThread(concurrency)
+ a.start()
- if profiler == "yappi":
- yappi.set_clock_type(clock_type)
- yappi.start(builtins=True)
+ if profiler == "yappi":
+ yappi.set_clock_type(clock_type)
+ yappi.start(builtins=True)
- print("Start mitmdump...")
- mitmdump(["-k","-q","-S", "1024example"])
- print("mitmdump stopped.")
-
- print("Save profile information...")
- if profiler == "yappi":
- yappi.stop()
- stats = yappi.get_func_stats()
- stats.save(outfile, type='callgrind')
- print("Done.")
+ print("Start mitmdump...")
+ mitmdump(["-k", "-q", "-S", "1024example"])
+ print("mitmdump stopped.")
+
+ print("Save profile information...")
+ if profiler == "yappi":
+ yappi.stop()
+ stats = yappi.get_func_stats()
+ stats.save(outfile, type='callgrind')
+ print("Done.")
if __name__ == '__main__':
- main()
\ No newline at end of file
+ main()
diff --git a/test/tservers.py b/test/tservers.py
index bcda02955..dc14fb37c 100644
--- a/test/tservers.py
+++ b/test/tservers.py
@@ -1,23 +1,28 @@
import os.path
-import threading, Queue
-import shutil, tempfile
+import threading
+import Queue
+import shutil
+import tempfile
import flask
import mock
from libmproxy.proxy.config import ProxyConfig
from libmproxy.proxy.server import ProxyServer
from libmproxy.proxy.primitives import TransparentProxyMode
-import libpathod.test, libpathod.pathoc
+import libpathod.test
+import libpathod.pathoc
from libmproxy import flow, controller
from libmproxy.cmdline import APP_HOST, APP_PORT
import tutils
testapp = flask.Flask(__name__)
+
@testapp.route("/")
def hello():
return "testapp"
+
@testapp.route("/error")
def error():
raise ValueError("An exception...")
@@ -57,7 +62,8 @@ class ProxyThread(threading.Thread):
def __init__(self, tmaster):
threading.Thread.__init__(self)
self.tmaster = tmaster
- self.name = "ProxyThread (%s:%s)" % (tmaster.server.address.host, tmaster.server.address.port)
+ self.name = "ProxyThread (%s:%s)" % (
+ tmaster.server.address.host, tmaster.server.address.port)
controller.should_exit = False
@property
@@ -87,8 +93,12 @@ class ProxTestBase(object):
@classmethod
def setupAll(cls):
- cls.server = libpathod.test.Daemon(ssl=cls.ssl, ssloptions=cls.ssloptions)
- cls.server2 = libpathod.test.Daemon(ssl=cls.ssl, ssloptions=cls.ssloptions)
+ cls.server = libpathod.test.Daemon(
+ ssl=cls.ssl,
+ ssloptions=cls.ssloptions)
+ cls.server2 = libpathod.test.Daemon(
+ ssl=cls.ssl,
+ ssloptions=cls.ssloptions)
cls.config = ProxyConfig(**cls.get_proxy_config())
@@ -151,9 +161,9 @@ class HTTPProxTest(ProxTestBase):
p = self.pathoc(sni=sni)
spec = spec.encode("string_escape")
if self.ssl:
- q = "get:'/p/%s'"%spec
+ q = "get:'/p/%s'" % spec
else:
- q = "get:'%s/p/%s'"%(self.server.urlbase, spec)
+ q = "get:'%s/p/%s'" % (self.server.urlbase, spec)
return p.request(q)
def app(self, page):
@@ -162,10 +172,10 @@ class HTTPProxTest(ProxTestBase):
("127.0.0.1", self.proxy.port), True, fp=None
)
p.connect((APP_HOST, APP_PORT))
- return p.request("get:'%s'"%page)
+ return p.request("get:'%s'" % page)
else:
p = self.pathoc()
- return p.request("get:'http://%s%s'"%(APP_HOST, page))
+ return p.request("get:'http://%s%s'" % (APP_HOST, page))
class TResolver:
@@ -188,7 +198,10 @@ class TransparentProxTest(ProxTestBase):
ports = [cls.server.port, cls.server2.port]
else:
ports = []
- cls.config.mode = TransparentProxyMode(cls.resolver(cls.server.port), ports)
+ cls.config.mode = TransparentProxyMode(
+ cls.resolver(
+ cls.server.port),
+ ports)
@classmethod
def get_proxy_config(cls):
@@ -202,10 +215,10 @@ class TransparentProxTest(ProxTestBase):
"""
if self.ssl:
p = self.pathoc(sni=sni)
- q = "get:'/p/%s'"%spec
+ q = "get:'/p/%s'" % spec
else:
p = self.pathoc()
- q = "get:'/p/%s'"%spec
+ q = "get:'/p/%s'" % spec
return p.request(q)
def pathoc(self, sni=None):
@@ -221,6 +234,7 @@ class TransparentProxTest(ProxTestBase):
class ReverseProxTest(ProxTestBase):
ssl = None
+
@classmethod
def get_proxy_config(cls):
d = ProxTestBase.get_proxy_config()
@@ -249,10 +263,10 @@ class ReverseProxTest(ProxTestBase):
"""
if self.ssl:
p = self.pathoc(sni=sni)
- q = "get:'/p/%s'"%spec
+ q = "get:'/p/%s'" % spec
else:
p = self.pathoc()
- q = "get:'/p/%s'"%spec
+ q = "get:'/p/%s'" % spec
return p.request(q)
@@ -278,8 +292,8 @@ class ChainProxTest(ProxTestBase):
cls.chain.insert(0, proxy)
# Patch the orginal proxy to upstream mode
- cls.config = cls.proxy.tmaster.config = cls.proxy.tmaster.server.config = ProxyConfig(**cls.get_proxy_config())
-
+ cls.config = cls.proxy.tmaster.config = cls.proxy.tmaster.server.config = ProxyConfig(
+ **cls.get_proxy_config())
@classmethod
def teardownAll(cls):
@@ -303,5 +317,6 @@ class ChainProxTest(ProxTestBase):
)
return d
+
class HTTPUpstreamProxTest(ChainProxTest, HTTPProxTest):
pass
diff --git a/test/tutils.py b/test/tutils.py
index 499efc6ed..aeaeb0de8 100644
--- a/test/tutils.py
+++ b/test/tutils.py
@@ -1,5 +1,8 @@
from cStringIO import StringIO
-import os, shutil, tempfile, argparse
+import os
+import shutil
+import tempfile
+import argparse
from contextlib import contextmanager
import sys
from libmproxy import flow, utils, controller
@@ -14,8 +17,11 @@ from nose.plugins.skip import SkipTest
from mock import Mock
from time import time
+
def _SkipWindows():
raise SkipTest("Skipped on Windows.")
+
+
def SkipWindows(fn):
if os.name == "nt":
return _SkipWindows
@@ -83,10 +89,23 @@ def treq(content="content", scheme="http", host="address", port=22):
"""
headers = odict.ODictCaseless()
headers["header"] = ["qvalue"]
- req = http.HTTPRequest("relative", "GET", scheme, host, port, "/path", (1, 1), headers, content,
- None, None, None)
+ req = http.HTTPRequest(
+ "relative",
+ "GET",
+ scheme,
+ host,
+ port,
+ "/path",
+ (1,
+ 1),
+ headers,
+ content,
+ None,
+ None,
+ None)
return req
+
def treq_absolute(content="content"):
"""
@return: libmproxy.protocol.http.HTTPRequest
@@ -107,7 +126,15 @@ def tresp(content="message"):
headers = odict.ODictCaseless()
headers["header_response"] = ["svalue"]
- resp = http.HTTPResponse((1, 1), 200, "OK", headers, content, time(), time())
+ resp = http.HTTPResponse(
+ (1,
+ 1),
+ 200,
+ "OK",
+ headers,
+ content,
+ time(),
+ time())
return resp
@@ -118,10 +145,11 @@ def terr(content="error"):
err = Error(content)
return err
+
def tflowview(request_contents=None):
m = Mock()
cs = ConsoleState()
- if request_contents == None:
+ if request_contents is None:
flow = tflow()
else:
flow = tflow(req=treq(request_contents))
@@ -129,9 +157,11 @@ def tflowview(request_contents=None):
fv = FlowView(m, cs, flow)
return fv
+
def get_body_line(last_displayed_body, line_nb):
return last_displayed_body.contents()[line_nb + 2]
+
@contextmanager
def tmpdir(*args, **kwargs):
orig_workdir = os.getcwd()
@@ -149,6 +179,7 @@ class MockParser(argparse.ArgumentParser):
argparse.ArgumentParser sys.exits() by default.
Make it more testable by throwing an exception instead.
"""
+
def error(self, message):
raise Exception(message)
@@ -169,14 +200,14 @@ def raises(exc, obj, *args, **kwargs):
:kwargs Arguments to be passed to the callable.
"""
try:
- apply(obj, args, kwargs)
- except Exception, v:
+ obj(*args, **kwargs)
+ except Exception as v:
if isinstance(exc, basestring):
if exc.lower() in str(v).lower():
return
else:
raise AssertionError(
- "Expected %s, but caught %s"%(
+ "Expected %s, but caught %s" % (
repr(str(exc)), v
)
)
@@ -185,7 +216,7 @@ def raises(exc, obj, *args, **kwargs):
return
else:
raise AssertionError(
- "Expected %s, but caught %s %s"%(
+ "Expected %s, but caught %s %s" % (
exc.__name__, v.__class__.__name__, str(v)
)
)