[autofix.ci] apply automated fixes

This commit is contained in:
autofix-ci[bot] 2022-11-29 13:28:41 +00:00 committed by GitHub
parent 8361c81cdf
commit 8c2428c9d3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
351 changed files with 4686 additions and 2808 deletions

View File

@ -10,20 +10,21 @@ from mitmproxy import log
from mitmproxy.proxy import layer
from mitmproxy.proxy import server_hooks
from mitmproxy.proxy.layers import dns
from mitmproxy.proxy.layers.http import _hooks as http
from mitmproxy.proxy.layers import modes
from mitmproxy.proxy.layers import quic
from mitmproxy.proxy.layers import tcp
from mitmproxy.proxy.layers import tls
from mitmproxy.proxy.layers import udp
from mitmproxy.proxy.layers import websocket
from mitmproxy.proxy.layers.http import _hooks as http
known = set()
def category(name: str, desc: str, hooks: list[type[hooks.Hook]]) -> None:
all_params = [
list(inspect.signature(hook.__init__, eval_str=True).parameters.values())[1:] for hook in hooks
list(inspect.signature(hook.__init__, eval_str=True).parameters.values())[1:]
for hook in hooks
]
# slightly overengineered, but this was fun to write. ¯\_(ツ)_/¯

View File

@ -1,11 +1,12 @@
import json
from typing import NamedTuple, Optional
import libtmux
import random
import subprocess
import threading
import time
from typing import NamedTuple
from typing import Optional
import libtmux
class InstructionSpec(NamedTuple):

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
from clidirector import CliDirector
import screenplays
from clidirector import CliDirector
if __name__ == "__main__":

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python3
from clidirector import CliDirector

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python3
import re
from pathlib import Path

View File

@ -1,5 +1,4 @@
#!/usr/bin/env python3
from mitmproxy import flowfilter

View File

@ -1,8 +1,11 @@
#!/usr/bin/env python3
import asyncio
from mitmproxy import options, optmanager
from mitmproxy.tools import dump, console, web
from mitmproxy import options
from mitmproxy import optmanager
from mitmproxy.tools import console
from mitmproxy.tools import dump
from mitmproxy.tools import web
masters = {
"mitmproxy": console.master.ConsoleMaster,

View File

@ -4,7 +4,9 @@ protobuf messages based on a user defined rule set.
"""
from mitmproxy import contentviews
from mitmproxy.contentviews.grpc import ViewGrpcProtobuf, ViewConfig, ProtoParser
from mitmproxy.contentviews.grpc import ProtoParser
from mitmproxy.contentviews.grpc import ViewConfig
from mitmproxy.contentviews.grpc import ViewGrpcProtobuf
config: ViewConfig = ViewConfig()
config.parser_rules = [
@ -68,13 +70,13 @@ config.parser_rules = [
tag_prefixes=["1.5.1", "1.5.3", "1.5.4", "1.5.5", "1.5.6"],
name="latitude",
intended_decoding=ProtoParser.DecodedTypes.double,
), # noqa: E501
),
ProtoParser.ParserFieldDefinition(
tag=".2",
tag_prefixes=["1.5.1", "1.5.3", "1.5.4", "1.5.5", "1.5.6"],
name="longitude",
intended_decoding=ProtoParser.DecodedTypes.double,
), # noqa: E501
),
ProtoParser.ParserFieldDefinition(tag="7", name="app"),
],
),

View File

@ -7,7 +7,8 @@ The content view API is explained in the mitmproxy.contentviews module.
"""
from typing import Optional
from mitmproxy import contentviews, flow
from mitmproxy import contentviews
from mitmproxy import flow
from mitmproxy import http

View File

@ -2,10 +2,11 @@
Use mitmproxy's filter pattern in scripts.
"""
from __future__ import annotations
import logging
from mitmproxy import http
from mitmproxy import flowfilter
from mitmproxy import http
class Filter:

View File

@ -7,7 +7,8 @@ Modifying streamed responses is tricky and brittle:
- If you want to replace all occurrences of "foobar", make sure to catch the cases
where one chunk ends with [...]foo" and the next starts with "bar[...].
"""
from typing import Iterable, Union
from collections.abc import Iterable
from typing import Union
def modify(data: bytes) -> Union[bytes, Iterable[bytes]]:

View File

@ -6,7 +6,6 @@ the body is fully transmitted. Such trailers need to be announced in the initial
headers by name, so the receiving endpoint can wait and read them after the
body.
"""
from mitmproxy import http
from mitmproxy.http import Headers

View File

@ -2,11 +2,13 @@
"""
Read a mitmproxy dump file.
"""
from mitmproxy import io, http
from mitmproxy.exceptions import FlowReadException
import pprint
import sys
from mitmproxy import http
from mitmproxy import io
from mitmproxy.exceptions import FlowReadException
with open(sys.argv[1], "rb") as logfile:
freader = io.FlowReader(logfile)
pp = pprint.PrettyPrinter(indent=4)

View File

@ -11,7 +11,8 @@ import random
import sys
from typing import BinaryIO
from mitmproxy import io, http
from mitmproxy import http
from mitmproxy import io
class Writer:

View File

@ -8,4 +8,7 @@ def load(l):
logging.info("This is some informative text.")
logging.warning("This is a warning.")
logging.error("This is an error.")
logging.log(ALERT, "This is an alert. It has the same urgency as info, but will also pop up in the status bar.")
logging.log(
ALERT,
"This is an alert. It has the same urgency as info, but will also pop up in the status bar.",
)

View File

@ -3,7 +3,6 @@ Make events hooks non-blocking using async or @concurrent
"""
import asyncio
import logging
import time
from mitmproxy.script import concurrent

View File

@ -10,7 +10,8 @@ Usage:
"""
import logging
from mitmproxy import ctx, http
from mitmproxy import ctx
from mitmproxy import http
def request(flow: http.HTTPFlow) -> None:

View File

@ -12,8 +12,8 @@ Example Invocation:
"""
import logging
from mitmproxy.utils import strutils
from mitmproxy import tcp
from mitmproxy.utils import strutils
def tcp_message(flow: tcp.TCPFlow):

View File

@ -5,7 +5,8 @@ This example shows how to inject a WebSocket message into a running connection.
"""
import asyncio
from mitmproxy import ctx, http
from mitmproxy import ctx
from mitmproxy import http
# Simple example: Inject a message as a response to an event

View File

@ -6,6 +6,7 @@ instance, we're using the Flask framework (http://flask.pocoo.org/) to expose
a single simplest-possible page.
"""
from flask import Flask
from mitmproxy.addons import asgiapp
app = Flask("proxapp")
@ -24,5 +25,4 @@ addons = [
# mitmproxy will connect to said domain and use its certificate but won't send any data.
# By using `--set upstream_cert=false` and `--set connection_strategy_lazy` the local certificate is used instead.
# asgiapp.WSGIApp(app, "example.com", 443),
]

View File

@ -1,10 +1,13 @@
from mitmproxy import ctx, command
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy.utils import emoji
@command.command('all.markers')
@command.command("all.markers")
def all_markers():
'Create a new flow showing all marker values'
"Create a new flow showing all marker values"
for marker in emoji.emoji:
ctx.master.commands.call('view.flows.create', 'get', f'https://example.com/{marker}')
ctx.master.commands.call('flow.mark', [ctx.master.view.focus.flow], marker)
ctx.master.commands.call(
"view.flows.create", "get", f"https://example.com/{marker}"
)
ctx.master.commands.call("flow.mark", [ctx.master.view.focus.flow], marker)

View File

@ -9,79 +9,250 @@ import logging
# known DoH providers' hostnames and IP addresses to block
default_blocklist: dict = {
"hostnames": [
"dns.adguard.com", "dns-family.adguard.com", "dns.google", "cloudflare-dns.com",
"mozilla.cloudflare-dns.com", "security.cloudflare-dns.com", "family.cloudflare-dns.com",
"dns.quad9.net", "dns9.quad9.net", "dns10.quad9.net", "dns11.quad9.net", "doh.opendns.com",
"doh.familyshield.opendns.com", "doh.cleanbrowsing.org", "doh.xfinity.com", "dohdot.coxlab.net",
"odvr.nic.cz", "doh.dnslify.com", "dns.nextdns.io", "dns.dnsoverhttps.net", "doh.crypto.sx",
"doh.powerdns.org", "doh-fi.blahdns.com", "doh-jp.blahdns.com", "doh-de.blahdns.com",
"doh.ffmuc.net", "dns.dns-over-https.com", "doh.securedns.eu", "dns.rubyfish.cn",
"dns.containerpi.com", "dns.containerpi.com", "dns.containerpi.com", "doh-2.seby.io",
"doh.seby.io", "commons.host", "doh.dnswarden.com", "doh.dnswarden.com", "doh.dnswarden.com",
"dns-nyc.aaflalo.me", "dns.aaflalo.me", "doh.applied-privacy.net", "doh.captnemo.in",
"doh.tiar.app", "doh.tiarap.org", "doh.dns.sb", "rdns.faelix.net", "doh.li", "doh.armadillodns.net",
"jp.tiar.app", "jp.tiarap.org", "doh.42l.fr", "dns.hostux.net", "dns.hostux.net", "dns.aa.net.uk",
"adblock.mydns.network", "ibksturm.synology.me", "jcdns.fun", "ibuki.cgnat.net", "dns.twnic.tw",
"example.doh.blockerdns.com", "dns.digitale-gesellschaft.ch", "doh.libredns.gr",
"doh.centraleu.pi-dns.com", "doh.northeu.pi-dns.com", "doh.westus.pi-dns.com",
"doh.eastus.pi-dns.com", "dns.flatuslifir.is", "private.canadianshield.cira.ca",
"protected.canadianshield.cira.ca", "family.canadianshield.cira.ca", "dns.google.com",
"dns.google.com"
"dns.adguard.com",
"dns-family.adguard.com",
"dns.google",
"cloudflare-dns.com",
"mozilla.cloudflare-dns.com",
"security.cloudflare-dns.com",
"family.cloudflare-dns.com",
"dns.quad9.net",
"dns9.quad9.net",
"dns10.quad9.net",
"dns11.quad9.net",
"doh.opendns.com",
"doh.familyshield.opendns.com",
"doh.cleanbrowsing.org",
"doh.xfinity.com",
"dohdot.coxlab.net",
"odvr.nic.cz",
"doh.dnslify.com",
"dns.nextdns.io",
"dns.dnsoverhttps.net",
"doh.crypto.sx",
"doh.powerdns.org",
"doh-fi.blahdns.com",
"doh-jp.blahdns.com",
"doh-de.blahdns.com",
"doh.ffmuc.net",
"dns.dns-over-https.com",
"doh.securedns.eu",
"dns.rubyfish.cn",
"dns.containerpi.com",
"dns.containerpi.com",
"dns.containerpi.com",
"doh-2.seby.io",
"doh.seby.io",
"commons.host",
"doh.dnswarden.com",
"doh.dnswarden.com",
"doh.dnswarden.com",
"dns-nyc.aaflalo.me",
"dns.aaflalo.me",
"doh.applied-privacy.net",
"doh.captnemo.in",
"doh.tiar.app",
"doh.tiarap.org",
"doh.dns.sb",
"rdns.faelix.net",
"doh.li",
"doh.armadillodns.net",
"jp.tiar.app",
"jp.tiarap.org",
"doh.42l.fr",
"dns.hostux.net",
"dns.hostux.net",
"dns.aa.net.uk",
"adblock.mydns.network",
"ibksturm.synology.me",
"jcdns.fun",
"ibuki.cgnat.net",
"dns.twnic.tw",
"example.doh.blockerdns.com",
"dns.digitale-gesellschaft.ch",
"doh.libredns.gr",
"doh.centraleu.pi-dns.com",
"doh.northeu.pi-dns.com",
"doh.westus.pi-dns.com",
"doh.eastus.pi-dns.com",
"dns.flatuslifir.is",
"private.canadianshield.cira.ca",
"protected.canadianshield.cira.ca",
"family.canadianshield.cira.ca",
"dns.google.com",
"dns.google.com",
],
"ips": [
"104.16.248.249", "104.16.248.249", "104.16.249.249", "104.16.249.249", "104.18.2.55",
"104.18.26.128", "104.18.27.128", "104.18.3.55", "104.18.44.204", "104.18.44.204",
"104.18.45.204", "104.18.45.204", "104.182.57.196", "104.236.178.232", "104.24.122.53",
"104.24.123.53", "104.28.0.106", "104.28.1.106", "104.31.90.138", "104.31.91.138",
"115.159.131.230", "116.202.176.26", "116.203.115.192", "136.144.215.158", "139.59.48.222",
"139.99.222.72", "146.112.41.2", "146.112.41.3", "146.185.167.43", "149.112.112.10",
"149.112.112.11", "149.112.112.112", "149.112.112.9", "149.112.121.10", "149.112.121.20",
"149.112.121.30", "149.112.122.10", "149.112.122.20", "149.112.122.30", "159.69.198.101",
"168.235.81.167", "172.104.93.80", "172.65.3.223", "174.138.29.175", "174.68.248.77",
"176.103.130.130", "176.103.130.131", "176.103.130.132", "176.103.130.134", "176.56.236.175",
"178.62.214.105", "185.134.196.54", "185.134.197.54", "185.213.26.187", "185.216.27.142",
"185.228.168.10", "185.228.168.168", "185.235.81.1", "185.26.126.37", "185.26.126.37",
"185.43.135.1", "185.95.218.42", "185.95.218.43", "195.30.94.28", "2001:148f:fffe::1",
"2001:19f0:7001:3259:5400:2ff:fe71:bc9", "2001:19f0:7001:5554:5400:2ff:fe57:3077",
"2001:19f0:7001:5554:5400:2ff:fe57:3077", "2001:19f0:7001:5554:5400:2ff:fe57:3077",
"2001:4860:4860::8844", "2001:4860:4860::8888",
"2001:4b98:dc2:43:216:3eff:fe86:1d28", "2001:558:fe21:6b:96:113:151:149",
"2001:608:a01::3", "2001:678:888:69:c45d:2738:c3f2:1878", "2001:8b0::2022", "2001:8b0::2023",
"2001:c50:ffff:1:101:101:101:101", "210.17.9.228", "217.169.20.22", "217.169.20.23",
"2400:6180:0:d0::5f73:4001", "2400:8902::f03c:91ff:feda:c514", "2604:180:f3::42",
"2604:a880:1:20::51:f001", "2606:4700::6810:f8f9", "2606:4700::6810:f9f9", "2606:4700::6812:1a80",
"2606:4700::6812:1b80", "2606:4700::6812:237", "2606:4700::6812:337", "2606:4700:3033::6812:2ccc",
"2606:4700:3033::6812:2dcc", "2606:4700:3033::6818:7b35", "2606:4700:3034::681c:16a",
"2606:4700:3035::6818:7a35", "2606:4700:3035::681f:5a8a", "2606:4700:3036::681c:6a",
"2606:4700:3036::681f:5b8a", "2606:4700:60:0:a71e:6467:cef8:2a56", "2620:10a:80bb::10",
"2620:10a:80bb::20", "2620:10a:80bb::30" "2620:10a:80bc::10", "2620:10a:80bc::20",
"2620:10a:80bc::30", "2620:119:fc::2", "2620:119:fc::3", "2620:fe::10", "2620:fe::11",
"2620:fe::9", "2620:fe::fe:10", "2620:fe::fe:11", "2620:fe::fe:9", "2620:fe::fe",
"2a00:5a60::ad1:ff", "2a00:5a60::ad2:ff", "2a00:5a60::bad1:ff", "2a00:5a60::bad2:ff",
"2a00:d880:5:bf0::7c93", "2a01:4f8:1c0c:8233::1", "2a01:4f8:1c1c:6b4b::1", "2a01:4f8:c2c:52bf::1",
"2a01:4f9:c010:43ce::1", "2a01:4f9:c01f:4::abcd", "2a01:7c8:d002:1ef:5054:ff:fe40:3703",
"2a01:9e00::54", "2a01:9e00::55", "2a01:9e01::54", "2a01:9e01::55",
"2a02:1205:34d5:5070:b26e:bfff:fe1d:e19b", "2a03:4000:38:53c::2",
"2a03:b0c0:0:1010::e9a:3001", "2a04:bdc7:100:70::abcd", "2a05:fc84::42", "2a05:fc84::43",
"2a07:a8c0::", "2a0d:4d00:81::1", "2a0d:5600:33:3::abcd", "35.198.2.76", "35.231.247.227",
"45.32.55.94", "45.67.219.208", "45.76.113.31", "45.77.180.10", "45.90.28.0",
"46.101.66.244", "46.227.200.54", "46.227.200.55", "46.239.223.80", "8.8.4.4",
"8.8.8.8", "83.77.85.7", "88.198.91.187", "9.9.9.10", "9.9.9.11", "9.9.9.9",
"94.130.106.88", "95.216.181.228", "95.216.212.177", "96.113.151.148",
]
"104.16.248.249",
"104.16.248.249",
"104.16.249.249",
"104.16.249.249",
"104.18.2.55",
"104.18.26.128",
"104.18.27.128",
"104.18.3.55",
"104.18.44.204",
"104.18.44.204",
"104.18.45.204",
"104.18.45.204",
"104.182.57.196",
"104.236.178.232",
"104.24.122.53",
"104.24.123.53",
"104.28.0.106",
"104.28.1.106",
"104.31.90.138",
"104.31.91.138",
"115.159.131.230",
"116.202.176.26",
"116.203.115.192",
"136.144.215.158",
"139.59.48.222",
"139.99.222.72",
"146.112.41.2",
"146.112.41.3",
"146.185.167.43",
"149.112.112.10",
"149.112.112.11",
"149.112.112.112",
"149.112.112.9",
"149.112.121.10",
"149.112.121.20",
"149.112.121.30",
"149.112.122.10",
"149.112.122.20",
"149.112.122.30",
"159.69.198.101",
"168.235.81.167",
"172.104.93.80",
"172.65.3.223",
"174.138.29.175",
"174.68.248.77",
"176.103.130.130",
"176.103.130.131",
"176.103.130.132",
"176.103.130.134",
"176.56.236.175",
"178.62.214.105",
"185.134.196.54",
"185.134.197.54",
"185.213.26.187",
"185.216.27.142",
"185.228.168.10",
"185.228.168.168",
"185.235.81.1",
"185.26.126.37",
"185.26.126.37",
"185.43.135.1",
"185.95.218.42",
"185.95.218.43",
"195.30.94.28",
"2001:148f:fffe::1",
"2001:19f0:7001:3259:5400:2ff:fe71:bc9",
"2001:19f0:7001:5554:5400:2ff:fe57:3077",
"2001:19f0:7001:5554:5400:2ff:fe57:3077",
"2001:19f0:7001:5554:5400:2ff:fe57:3077",
"2001:4860:4860::8844",
"2001:4860:4860::8888",
"2001:4b98:dc2:43:216:3eff:fe86:1d28",
"2001:558:fe21:6b:96:113:151:149",
"2001:608:a01::3",
"2001:678:888:69:c45d:2738:c3f2:1878",
"2001:8b0::2022",
"2001:8b0::2023",
"2001:c50:ffff:1:101:101:101:101",
"210.17.9.228",
"217.169.20.22",
"217.169.20.23",
"2400:6180:0:d0::5f73:4001",
"2400:8902::f03c:91ff:feda:c514",
"2604:180:f3::42",
"2604:a880:1:20::51:f001",
"2606:4700::6810:f8f9",
"2606:4700::6810:f9f9",
"2606:4700::6812:1a80",
"2606:4700::6812:1b80",
"2606:4700::6812:237",
"2606:4700::6812:337",
"2606:4700:3033::6812:2ccc",
"2606:4700:3033::6812:2dcc",
"2606:4700:3033::6818:7b35",
"2606:4700:3034::681c:16a",
"2606:4700:3035::6818:7a35",
"2606:4700:3035::681f:5a8a",
"2606:4700:3036::681c:6a",
"2606:4700:3036::681f:5b8a",
"2606:4700:60:0:a71e:6467:cef8:2a56",
"2620:10a:80bb::10",
"2620:10a:80bb::20",
"2620:10a:80bb::30" "2620:10a:80bc::10",
"2620:10a:80bc::20",
"2620:10a:80bc::30",
"2620:119:fc::2",
"2620:119:fc::3",
"2620:fe::10",
"2620:fe::11",
"2620:fe::9",
"2620:fe::fe:10",
"2620:fe::fe:11",
"2620:fe::fe:9",
"2620:fe::fe",
"2a00:5a60::ad1:ff",
"2a00:5a60::ad2:ff",
"2a00:5a60::bad1:ff",
"2a00:5a60::bad2:ff",
"2a00:d880:5:bf0::7c93",
"2a01:4f8:1c0c:8233::1",
"2a01:4f8:1c1c:6b4b::1",
"2a01:4f8:c2c:52bf::1",
"2a01:4f9:c010:43ce::1",
"2a01:4f9:c01f:4::abcd",
"2a01:7c8:d002:1ef:5054:ff:fe40:3703",
"2a01:9e00::54",
"2a01:9e00::55",
"2a01:9e01::54",
"2a01:9e01::55",
"2a02:1205:34d5:5070:b26e:bfff:fe1d:e19b",
"2a03:4000:38:53c::2",
"2a03:b0c0:0:1010::e9a:3001",
"2a04:bdc7:100:70::abcd",
"2a05:fc84::42",
"2a05:fc84::43",
"2a07:a8c0::",
"2a0d:4d00:81::1",
"2a0d:5600:33:3::abcd",
"35.198.2.76",
"35.231.247.227",
"45.32.55.94",
"45.67.219.208",
"45.76.113.31",
"45.77.180.10",
"45.90.28.0",
"46.101.66.244",
"46.227.200.54",
"46.227.200.55",
"46.239.223.80",
"8.8.4.4",
"8.8.8.8",
"83.77.85.7",
"88.198.91.187",
"9.9.9.10",
"9.9.9.11",
"9.9.9.9",
"94.130.106.88",
"95.216.181.228",
"95.216.212.177",
"96.113.151.148",
],
}
# additional hostnames to block
additional_doh_names: list[str] = [
'dns.google.com'
]
additional_doh_names: list[str] = ["dns.google.com"]
# additional IPs to block
additional_doh_ips: list[str] = [
additional_doh_ips: list[str] = []
]
doh_hostnames, doh_ips = default_blocklist['hostnames'], default_blocklist['ips']
doh_hostnames, doh_ips = default_blocklist["hostnames"], default_blocklist["ips"]
# convert to sets for faster lookups
doh_hostnames = set(doh_hostnames)
@ -95,9 +266,9 @@ def _has_dns_message_content_type(flow):
:param flow: mitmproxy flow
:return: True if 'Content-Type' header is DNS-looking, False otherwise
"""
doh_content_types = ['application/dns-message']
if 'Content-Type' in flow.request.headers:
if flow.request.headers['Content-Type'] in doh_content_types:
doh_content_types = ["application/dns-message"]
if "Content-Type" in flow.request.headers:
if flow.request.headers["Content-Type"] in doh_content_types:
return True
return False
@ -109,7 +280,7 @@ def _request_has_dns_query_string(flow):
:param flow: mitmproxy flow
:return: True is 'dns' is a parameter in the query string, False otherwise
"""
return 'dns' in flow.request.query
return "dns" in flow.request.query
def _request_is_dns_json(flow):
@ -127,12 +298,12 @@ def _request_is_dns_json(flow):
"""
# Header 'Accept: application/dns-json' is required in Cloudflare's DoH JSON API
# or they return a 400 HTTP response code
if 'Accept' in flow.request.headers:
if flow.request.headers['Accept'] == 'application/dns-json':
if "Accept" in flow.request.headers:
if flow.request.headers["Accept"] == "application/dns-json":
return True
# Google's DoH JSON API is https://dns.google/resolve
path = flow.request.path.split('?')[0]
if flow.request.host == 'dns.google' and path == '/resolve':
path = flow.request.path.split("?")[0]
if flow.request.host == "dns.google" and path == "/resolve":
return True
return False
@ -146,9 +317,9 @@ def _request_has_doh_looking_path(flow):
:return: True if path looks like it's DoH, otherwise False
"""
doh_paths = [
'/dns-query', # used in example in RFC 8484 (see https://tools.ietf.org/html/rfc8484#section-4.1.1)
"/dns-query", # used in example in RFC 8484 (see https://tools.ietf.org/html/rfc8484#section-4.1.1)
]
path = flow.request.path.split('?')[0]
path = flow.request.path.split("?")[0]
return path in doh_paths
@ -171,7 +342,7 @@ doh_request_detection_checks = [
_request_has_dns_query_string,
_request_is_dns_json,
_requested_hostname_is_in_doh_blocklist,
_request_has_doh_looking_path
_request_has_doh_looking_path,
]
@ -179,6 +350,9 @@ def request(flow):
for check in doh_request_detection_checks:
is_doh = check(flow)
if is_doh:
logging.warning("[DoH Detection] DNS over HTTPS request detected via method \"%s\"" % check.__name__)
logging.warning(
'[DoH Detection] DNS over HTTPS request detected via method "%s"'
% check.__name__
)
flow.kill()
break

View File

@ -1,4 +1,3 @@
from mitmproxy import http
from mitmproxy.connection import Server
from mitmproxy.net.server_spec import ServerSpec

View File

@ -1,14 +1,17 @@
import ipaddress
import time
import OpenSSL
import mitmproxy
from mitmproxy import ctx
from mitmproxy.certs import Cert
import ipaddress
import OpenSSL
import time
# Certificate for client connection is generated in dummy_cert() in certs.py. Monkeypatching
# the function to generate test cases for SSL Pinning.
def monkey_dummy_cert(privkey, cacert, commonname, sans):
ss = []
for i in sans:
@ -42,7 +45,7 @@ def monkey_dummy_cert(privkey, cacert, commonname, sans):
if ctx.options.certwrongCN:
# append an extra char to make certs common name different than original one.
# APpending a char in the end of the domain name.
new_cn = commonname + b'm'
new_cn = commonname + b"m"
cert.get_subject().CN = new_cn
else:
@ -52,7 +55,8 @@ def monkey_dummy_cert(privkey, cacert, commonname, sans):
if ss:
cert.set_version(2)
cert.add_extensions(
[OpenSSL.crypto.X509Extension(b"subjectAltName", False, ss)])
[OpenSSL.crypto.X509Extension(b"subjectAltName", False, ss)]
)
cert.set_pubkey(cacert.get_pubkey())
cert.sign(privkey, "sha256")
return Cert(cert)
@ -61,23 +65,29 @@ def monkey_dummy_cert(privkey, cacert, commonname, sans):
class CheckSSLPinning:
def load(self, loader):
loader.add_option(
"certbeginon", bool, False,
"certbeginon",
bool,
False,
"""
Sets SSL Certificate's 'Begins On' time in future.
"""
""",
)
loader.add_option(
"certexpire", bool, False,
"certexpire",
bool,
False,
"""
Sets SSL Certificate's 'Expires On' time in the past.
"""
""",
)
loader.add_option(
"certwrongCN", bool, False,
"certwrongCN",
bool,
False,
"""
Sets SSL Certificate's CommonName(CN) different from the domain name.
"""
""",
)
def clientconnect(self, layer):

View File

@ -11,7 +11,8 @@ Example usage:
import logging
from mitmproxy import ctx
from mitmproxy.proxy import layer, layers
from mitmproxy.proxy import layer
from mitmproxy.proxy import layers
def running():

View File

@ -1,6 +1,8 @@
from typing import Optional, Union
import json
from dataclasses import dataclass
from typing import Optional
from typing import Union
from mitmproxy import ctx
from mitmproxy.addonmanager import Loader
from mitmproxy.http import HTTPFlow
@ -79,7 +81,7 @@ class HttpsDomainFronting:
index = host.find(".", index)
if index == -1:
break
super_domain = host[(index + 1):]
super_domain = host[(index + 1) :]
mapping = self.star_mappings.get(super_domain)
if mapping is not None:
return mapping

View File

@ -7,16 +7,14 @@ mitmdump -s ./har_dump.py --set hardump=./dump.har
filename endwith '.zhar' will be compressed:
mitmdump -s ./har_dump.py --set hardump=./dump.zhar
"""
import base64
import json
import logging
import os
import zlib
from datetime import datetime
from datetime import timezone
import zlib
import mitmproxy
from mitmproxy import connection
from mitmproxy import ctx
@ -33,27 +31,28 @@ SERVERS_SEEN: set[connection.Server] = set()
def load(l):
l.add_option(
"hardump", str, "", "HAR dump path.",
"hardump",
str,
"",
"HAR dump path.",
)
def configure(updated):
HAR.update({
"log": {
"version": "1.2",
"creator": {
"name": "mitmproxy har_dump",
"version": "0.1",
"comment": "mitmproxy version %s" % version.MITMPROXY
},
"pages": [
{
"pageTimings": {}
}
],
"entries": []
HAR.update(
{
"log": {
"version": "1.2",
"creator": {
"name": "mitmproxy har_dump",
"version": "0.1",
"comment": "mitmproxy version %s" % version.MITMPROXY,
},
"pages": [{"pageTimings": {}}],
"entries": [],
}
}
})
)
# The `pages` attribute is needed for Firefox Dev Tools to load the HAR file.
# An empty value works fine.
@ -65,12 +64,15 @@ def flow_entry(flow: mitmproxy.http.HTTPFlow) -> dict:
connect_time = -1
if flow.server_conn and flow.server_conn not in SERVERS_SEEN:
connect_time = (flow.server_conn.timestamp_tcp_setup -
flow.server_conn.timestamp_start)
connect_time = (
flow.server_conn.timestamp_tcp_setup - flow.server_conn.timestamp_start
)
if flow.server_conn.timestamp_tls_setup is not None:
ssl_time = (flow.server_conn.timestamp_tls_setup -
flow.server_conn.timestamp_tcp_setup)
ssl_time = (
flow.server_conn.timestamp_tls_setup
- flow.server_conn.timestamp_tcp_setup
)
SERVERS_SEEN.add(flow.server_conn)
@ -81,28 +83,31 @@ def flow_entry(flow: mitmproxy.http.HTTPFlow) -> dict:
# spent waiting between request.timestamp_end and response.timestamp_start
# thus it correlates to HAR wait instead.
timings_raw = {
'send': flow.request.timestamp_end - flow.request.timestamp_start,
'receive': flow.response.timestamp_end - flow.response.timestamp_start,
'wait': flow.response.timestamp_start - flow.request.timestamp_end,
'connect': connect_time,
'ssl': ssl_time,
"send": flow.request.timestamp_end - flow.request.timestamp_start,
"receive": flow.response.timestamp_end - flow.response.timestamp_start,
"wait": flow.response.timestamp_start - flow.request.timestamp_end,
"connect": connect_time,
"ssl": ssl_time,
}
# HAR timings are integers in ms, so we re-encode the raw timings to that format.
timings = {
k: int(1000 * v) if v != -1 else -1
for k, v in timings_raw.items()
}
timings = {k: int(1000 * v) if v != -1 else -1 for k, v in timings_raw.items()}
# full_time is the sum of all timings.
# Timings set to -1 will be ignored as per spec.
full_time = sum(v for v in timings.values() if v > -1)
started_date_time = datetime.fromtimestamp(flow.request.timestamp_start, timezone.utc).isoformat()
started_date_time = datetime.fromtimestamp(
flow.request.timestamp_start, timezone.utc
).isoformat()
# Response body size and encoding
response_body_size = len(flow.response.raw_content) if flow.response.raw_content else 0
response_body_decoded_size = len(flow.response.content) if flow.response.content else 0
response_body_size = (
len(flow.response.raw_content) if flow.response.raw_content else 0
)
response_body_decoded_size = (
len(flow.response.content) if flow.response.content else 0
)
response_body_compression = response_body_decoded_size - response_body_size
entry = {
@ -127,9 +132,9 @@ def flow_entry(flow: mitmproxy.http.HTTPFlow) -> dict:
"content": {
"size": response_body_size,
"compression": response_body_compression,
"mimeType": flow.response.headers.get('Content-Type', '')
"mimeType": flow.response.headers.get("Content-Type", ""),
},
"redirectURL": flow.response.headers.get('Location', ''),
"redirectURL": flow.response.headers.get("Location", ""),
"headersSize": len(str(flow.response.headers)),
"bodySize": response_body_size,
},
@ -139,7 +144,9 @@ def flow_entry(flow: mitmproxy.http.HTTPFlow) -> dict:
# Store binary data as base64
if strutils.is_mostly_bin(flow.response.content):
entry["response"]["content"]["text"] = base64.b64encode(flow.response.content).decode()
entry["response"]["content"]["text"] = base64.b64encode(
flow.response.content
).decode()
entry["response"]["content"]["encoding"] = "base64"
else:
entry["response"]["content"]["text"] = flow.response.get_text(strict=False)
@ -152,7 +159,7 @@ def flow_entry(flow: mitmproxy.http.HTTPFlow) -> dict:
entry["request"]["postData"] = {
"mimeType": flow.request.headers.get("Content-Type", ""),
"text": flow.request.get_text(strict=False),
"params": params
"params": params,
}
if flow.server_conn.connected:
@ -165,7 +172,7 @@ def flow_entry(flow: mitmproxy.http.HTTPFlow) -> dict:
def response(flow: mitmproxy.http.HTTPFlow):
"""
Called when a server response has been received.
Called when a server response has been received.
"""
if flow.websocket is None:
flow_entry(flow)
@ -182,29 +189,29 @@ def websocket_end(flow: mitmproxy.http.HTTPFlow):
else:
data = base64.b64encode(message.content).decode()
websocket_message = {
'type': 'send' if message.from_client else 'receive',
'time': message.timestamp,
'opcode': message.type.value,
'data': data
"type": "send" if message.from_client else "receive",
"time": message.timestamp,
"opcode": message.type.value,
"data": data,
}
websocket_messages.append(websocket_message)
entry['_resourceType'] = 'websocket'
entry['_webSocketMessages'] = websocket_messages
entry["_resourceType"] = "websocket"
entry["_webSocketMessages"] = websocket_messages
def done():
"""
Called once on script shutdown, after any other events.
Called once on script shutdown, after any other events.
"""
if ctx.options.hardump:
json_dump: str = json.dumps(HAR, indent=2)
if ctx.options.hardump == '-':
if ctx.options.hardump == "-":
print(json_dump)
else:
raw: bytes = json_dump.encode()
if ctx.options.hardump.endswith('.zhar'):
if ctx.options.hardump.endswith(".zhar"):
raw = zlib.compress(raw, 9)
with open(os.path.expanduser(ctx.options.hardump), "wb") as f:
@ -234,7 +241,9 @@ def format_cookies(cookie_list):
# Expiration time needs to be formatted
expire_ts = cookies.get_expiration_ts(attrs)
if expire_ts is not None:
cookie_har["expires"] = datetime.fromtimestamp(expire_ts, timezone.utc).isoformat()
cookie_har["expires"] = datetime.fromtimestamp(
expire_ts, timezone.utc
).isoformat()
rv.append(cookie_har)
@ -251,6 +260,6 @@ def format_response_cookies(fields):
def name_value(obj):
"""
Convert (key, value) pairs to HAR format.
Convert (key, value) pairs to HAR format.
"""
return [{"name": k, "value": v} for k, v in obj.items()]

View File

@ -15,9 +15,10 @@ Note:
"""
import json
from mitmproxy import http
from typing import Union
from mitmproxy import http
PATH_TO_COOKIES = "./cookies.json" # insert your path to the cookie file here
FILTER_COOKIES = {
@ -43,7 +44,14 @@ def stringify_cookies(cookies: list[dict[str, Union[str, None]]]) -> str:
"""
Creates a cookie string from a list of cookie dicts.
"""
return "; ".join([f"{c['name']}={c['value']}" if c.get("value", None) is not None else f"{c['name']}" for c in cookies])
return "; ".join(
[
f"{c['name']}={c['value']}"
if c.get("value", None) is not None
else f"{c['name']}"
for c in cookies
]
)
def parse_cookies(cookie_string: str) -> list[dict[str, Union[str, None]]]:
@ -52,7 +60,9 @@ def parse_cookies(cookie_string: str) -> list[dict[str, Union[str, None]]]:
"""
return [
{"name": g[0], "value": g[1]} if len(g) == 2 else {"name": g[0], "value": None}
for g in [k.split("=", 1) for k in [c.strip() for c in cookie_string.split(";")] if k]
for g in [
k.split("=", 1) for k in [c.strip() for c in cookie_string.split(";")] if k
]
]

View File

@ -14,8 +14,9 @@ import mimetypes
import os
from pathlib import Path
from mitmproxy import ctx, http
from mitmproxy import ctx
from mitmproxy import flowfilter
from mitmproxy import http
class HTTPDump:
@ -32,7 +33,7 @@ class HTTPDump:
name="open_browser",
typespec=bool,
default=True,
help="open integrated browser at start"
help="open integrated browser at start",
)
def running(self):

View File

@ -34,7 +34,8 @@ import base64
import json
import logging
from queue import Queue
from threading import Lock, Thread
from threading import Lock
from threading import Thread
import requests
@ -66,76 +67,77 @@ class JSONDumper:
self.outfile.close()
fields = {
'timestamp': (
('error', 'timestamp'),
('request', 'timestamp_start'),
('request', 'timestamp_end'),
('response', 'timestamp_start'),
('response', 'timestamp_end'),
('client_conn', 'timestamp_start'),
('client_conn', 'timestamp_end'),
('client_conn', 'timestamp_tls_setup'),
('server_conn', 'timestamp_start'),
('server_conn', 'timestamp_end'),
('server_conn', 'timestamp_tls_setup'),
('server_conn', 'timestamp_tcp_setup'),
"timestamp": (
("error", "timestamp"),
("request", "timestamp_start"),
("request", "timestamp_end"),
("response", "timestamp_start"),
("response", "timestamp_end"),
("client_conn", "timestamp_start"),
("client_conn", "timestamp_end"),
("client_conn", "timestamp_tls_setup"),
("server_conn", "timestamp_start"),
("server_conn", "timestamp_end"),
("server_conn", "timestamp_tls_setup"),
("server_conn", "timestamp_tcp_setup"),
),
'ip': (
('server_conn', 'source_address'),
('server_conn', 'ip_address'),
('server_conn', 'address'),
('client_conn', 'address'),
"ip": (
("server_conn", "source_address"),
("server_conn", "ip_address"),
("server_conn", "address"),
("client_conn", "address"),
),
'ws_messages': (
('messages',),
"ws_messages": (("messages",),),
"headers": (
("request", "headers"),
("response", "headers"),
),
'headers': (
('request', 'headers'),
('response', 'headers'),
),
'content': (
('request', 'content'),
('response', 'content'),
"content": (
("request", "content"),
("response", "content"),
),
}
def _init_transformations(self):
self.transformations = [
{
'fields': self.fields['headers'],
'func': dict,
"fields": self.fields["headers"],
"func": dict,
},
{
'fields': self.fields['timestamp'],
'func': lambda t: int(t * 1000),
"fields": self.fields["timestamp"],
"func": lambda t: int(t * 1000),
},
{
'fields': self.fields['ip'],
'func': lambda addr: {
'host': addr[0].replace('::ffff:', ''),
'port': addr[1],
"fields": self.fields["ip"],
"func": lambda addr: {
"host": addr[0].replace("::ffff:", ""),
"port": addr[1],
},
},
{
'fields': self.fields['ws_messages'],
'func': lambda ms: [{
'type': m[0],
'from_client': m[1],
'content': base64.b64encode(bytes(m[2], 'utf-8')) if self.encode else m[2],
'timestamp': int(m[3] * 1000),
} for m in ms],
}
"fields": self.fields["ws_messages"],
"func": lambda ms: [
{
"type": m[0],
"from_client": m[1],
"content": base64.b64encode(bytes(m[2], "utf-8"))
if self.encode
else m[2],
"timestamp": int(m[3] * 1000),
}
for m in ms
],
},
]
if self.encode:
self.transformations.append({
'fields': self.fields['content'],
'func': base64.b64encode,
})
self.transformations.append(
{
"fields": self.fields["content"],
"func": base64.b64encode,
}
)
@staticmethod
def transform_field(obj, path, func):
@ -156,8 +158,10 @@ class JSONDumper:
Recursively convert all list/dict elements of type `bytes` into strings.
"""
if isinstance(obj, dict):
return {cls.convert_to_strings(key): cls.convert_to_strings(value)
for key, value in obj.items()}
return {
cls.convert_to_strings(key): cls.convert_to_strings(value)
for key, value in obj.items()
}
elif isinstance(obj, list) or isinstance(obj, tuple):
return [cls.convert_to_strings(element) for element in obj]
elif isinstance(obj, bytes):
@ -175,8 +179,8 @@ class JSONDumper:
Transform and dump (write / send) a data frame.
"""
for tfm in self.transformations:
for field in tfm['fields']:
self.transform_field(frame, field, tfm['func'])
for field in tfm["fields"]:
self.transform_field(frame, field, tfm["func"])
frame = self.convert_to_strings(frame)
if self.outfile:
@ -191,14 +195,21 @@ class JSONDumper:
"""
Extra options to be specified in `~/.mitmproxy/config.yaml`.
"""
loader.add_option('dump_encodecontent', bool, False,
'Encode content as base64.')
loader.add_option('dump_destination', str, 'jsondump.out',
'Output destination: path to a file or URL.')
loader.add_option('dump_username', str, '',
'Basic auth username for URL destinations.')
loader.add_option('dump_password', str, '',
'Basic auth password for URL destinations.')
loader.add_option(
"dump_encodecontent", bool, False, "Encode content as base64."
)
loader.add_option(
"dump_destination",
str,
"jsondump.out",
"Output destination: path to a file or URL.",
)
loader.add_option(
"dump_username", str, "", "Basic auth username for URL destinations."
)
loader.add_option(
"dump_password", str, "", "Basic auth password for URL destinations."
)
def configure(self, _):
"""
@ -207,18 +218,18 @@ class JSONDumper:
"""
self.encode = ctx.options.dump_encodecontent
if ctx.options.dump_destination.startswith('http'):
if ctx.options.dump_destination.startswith("http"):
self.outfile = None
self.url = ctx.options.dump_destination
logging.info('Sending all data frames to %s' % self.url)
logging.info("Sending all data frames to %s" % self.url)
if ctx.options.dump_username and ctx.options.dump_password:
self.auth = (ctx.options.dump_username, ctx.options.dump_password)
logging.info('HTTP Basic auth enabled.')
logging.info("HTTP Basic auth enabled.")
else:
self.outfile = open(ctx.options.dump_destination, 'a')
self.outfile = open(ctx.options.dump_destination, "a")
self.url = None
self.lock = Lock()
logging.info('Writing all data frames to %s' % ctx.options.dump_destination)
logging.info("Writing all data frames to %s" % ctx.options.dump_destination)
self._init_transformations()

View File

@ -2,27 +2,33 @@
# relative links (<a href="./about.html">) and expands them to absolute links
# In practice this can be used to front an indexing spider that may not have the capability to expand relative page links.
# Usage: mitmdump -s link_expander.py or mitmproxy -s link_expander.py
import re
from urllib.parse import urljoin
def response(flow):
if "Content-Type" in flow.response.headers and flow.response.headers["Content-Type"].find("text/html") != -1:
if (
"Content-Type" in flow.response.headers
and flow.response.headers["Content-Type"].find("text/html") != -1
):
pageUrl = flow.request.url
pageText = flow.response.text
pattern = (r"<a\s+(?:[^>]*?\s+)?href=(?P<delimiter>[\"'])"
r"(?P<link>(?!https?:\/\/|ftps?:\/\/|\/\/|#|javascript:|mailto:).*?)(?P=delimiter)")
pattern = (
r"<a\s+(?:[^>]*?\s+)?href=(?P<delimiter>[\"'])"
r"(?P<link>(?!https?:\/\/|ftps?:\/\/|\/\/|#|javascript:|mailto:).*?)(?P=delimiter)"
)
rel_matcher = re.compile(pattern, flags=re.IGNORECASE)
rel_matches = rel_matcher.finditer(pageText)
map_dict = {}
for match_num, match in enumerate(rel_matches):
(delimiter, rel_link) = match.group("delimiter", "link")
abs_link = urljoin(pageUrl, rel_link)
map_dict["{0}{1}{0}".format(delimiter, rel_link)] = "{0}{1}{0}".format(delimiter, abs_link)
map_dict["{0}{1}{0}".format(delimiter, rel_link)] = "{0}{1}{0}".format(
delimiter, abs_link
)
for map in map_dict.items():
pageText = pageText.replace(*map)
# Uncomment the following to print the expansion mapping
# print("{0} -> {1}".format(*map))
flow.response.text = pageText
flow.response.text = pageText

View File

@ -6,12 +6,11 @@
#
# mitmproxywrapper.py -h
#
import subprocess
import re
import argparse
import contextlib
import os
import re
import subprocess
import sys
@ -21,59 +20,50 @@ class Wrapper:
self.extra_arguments = extra_arguments
def run_networksetup_command(self, *arguments):
return subprocess.check_output(
['sudo', 'networksetup'] + list(arguments))
return subprocess.check_output(["sudo", "networksetup"] + list(arguments))
def proxy_state_for_service(self, service):
state = self.run_networksetup_command(
'-getwebproxy',
service).splitlines()
return dict([re.findall(r'([^:]+): (.*)', line)[0] for line in state])
state = self.run_networksetup_command("-getwebproxy", service).splitlines()
return dict([re.findall(r"([^:]+): (.*)", line)[0] for line in state])
def enable_proxy_for_service(self, service):
print(f'Enabling proxy on {service}...')
for subcommand in ['-setwebproxy', '-setsecurewebproxy']:
print(f"Enabling proxy on {service}...")
for subcommand in ["-setwebproxy", "-setsecurewebproxy"]:
self.run_networksetup_command(
subcommand, service, '127.0.0.1', str(
self.port))
subcommand, service, "127.0.0.1", str(self.port)
)
def disable_proxy_for_service(self, service):
print(f'Disabling proxy on {service}...')
for subcommand in ['-setwebproxystate', '-setsecurewebproxystate']:
self.run_networksetup_command(subcommand, service, 'Off')
print(f"Disabling proxy on {service}...")
for subcommand in ["-setwebproxystate", "-setsecurewebproxystate"]:
self.run_networksetup_command(subcommand, service, "Off")
def interface_name_to_service_name_map(self):
order = self.run_networksetup_command('-listnetworkserviceorder')
order = self.run_networksetup_command("-listnetworkserviceorder")
mapping = re.findall(
r'\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$',
order,
re.MULTILINE)
r"\(\d+\)\s(.*)$\n\(.*Device: (.+)\)$", order, re.MULTILINE
)
return {b: a for (a, b) in mapping}
def run_command_with_input(self, command, input):
popen = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
popen = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = popen.communicate(input)
return stdout
def primary_interace_name(self):
scutil_script = 'get State:/Network/Global/IPv4\nd.show\n'
stdout = self.run_command_with_input('/usr/sbin/scutil', scutil_script)
interface, = re.findall(r'PrimaryInterface\s*:\s*(.+)', stdout)
scutil_script = "get State:/Network/Global/IPv4\nd.show\n"
stdout = self.run_command_with_input("/usr/sbin/scutil", scutil_script)
(interface,) = re.findall(r"PrimaryInterface\s*:\s*(.+)", stdout)
return interface
def primary_service_name(self):
return self.interface_name_to_service_name_map()[
self.primary_interace_name()]
return self.interface_name_to_service_name_map()[self.primary_interace_name()]
def proxy_enabled_for_service(self, service):
return self.proxy_state_for_service(service)['Enabled'] == 'Yes'
return self.proxy_state_for_service(service)["Enabled"] == "Yes"
def toggle_proxy(self):
new_state = not self.proxy_enabled_for_service(
self.primary_service_name())
new_state = not self.proxy_enabled_for_service(self.primary_service_name())
for service_name in self.connected_service_names():
if self.proxy_enabled_for_service(service_name) and not new_state:
self.disable_proxy_for_service(service_name)
@ -81,31 +71,29 @@ class Wrapper:
self.enable_proxy_for_service(service_name)
def connected_service_names(self):
scutil_script = 'list\n'
stdout = self.run_command_with_input('/usr/sbin/scutil', scutil_script)
service_ids = re.findall(r'State:/Network/Service/(.+)/IPv4', stdout)
scutil_script = "list\n"
stdout = self.run_command_with_input("/usr/sbin/scutil", scutil_script)
service_ids = re.findall(r"State:/Network/Service/(.+)/IPv4", stdout)
service_names = []
for service_id in service_ids:
scutil_script = f"show Setup:/Network/Service/{service_id}\n"
stdout = self.run_command_with_input(
'/usr/sbin/scutil',
scutil_script)
service_name, = re.findall(r'UserDefinedName\s*:\s*(.+)', stdout)
stdout = self.run_command_with_input("/usr/sbin/scutil", scutil_script)
(service_name,) = re.findall(r"UserDefinedName\s*:\s*(.+)", stdout)
service_names.append(service_name)
return service_names
def wrap_mitmproxy(self):
with self.wrap_proxy():
cmd = ['mitmproxy', '-p', str(self.port)]
cmd = ["mitmproxy", "-p", str(self.port)]
if self.extra_arguments:
cmd.extend(self.extra_arguments)
subprocess.check_call(cmd)
def wrap_honeyproxy(self):
with self.wrap_proxy():
popen = subprocess.Popen('honeyproxy.sh')
popen = subprocess.Popen("honeyproxy.sh")
try:
popen.wait()
except KeyboardInterrupt:
@ -127,26 +115,29 @@ class Wrapper:
@classmethod
def ensure_superuser(cls):
if os.getuid() != 0:
print('Relaunching with sudo...')
os.execv('/usr/bin/sudo', ['/usr/bin/sudo'] + sys.argv)
print("Relaunching with sudo...")
os.execv("/usr/bin/sudo", ["/usr/bin/sudo"] + sys.argv)
@classmethod
def main(cls):
parser = argparse.ArgumentParser(
description='Helper tool for OS X proxy configuration and mitmproxy.',
epilog='Any additional arguments will be passed on unchanged to mitmproxy.')
description="Helper tool for OS X proxy configuration and mitmproxy.",
epilog="Any additional arguments will be passed on unchanged to mitmproxy.",
)
parser.add_argument(
'-t',
'--toggle',
action='store_true',
help='just toggle the proxy configuration')
"-t",
"--toggle",
action="store_true",
help="just toggle the proxy configuration",
)
# parser.add_argument('--honeyproxy', action='store_true', help='run honeyproxy instead of mitmproxy')
parser.add_argument(
'-p',
'--port',
"-p",
"--port",
type=int,
help='override the default port of 8080',
default=8080)
help="override the default port of 8080",
default=8080,
)
args, extra_arguments = parser.parse_known_args()
wrapper = cls(port=args.port, extra_arguments=extra_arguments)
@ -159,6 +150,6 @@ class Wrapper:
wrapper.wrap_mitmproxy()
if __name__ == '__main__':
if __name__ == "__main__":
Wrapper.ensure_superuser()
Wrapper.main()

View File

@ -1,24 +1,21 @@
# (this script works best with --anticache)
from bs4 import BeautifulSoup
from mitmproxy import ctx, http
from mitmproxy import ctx
from mitmproxy import http
class Injector:
def load(self, loader):
loader.add_option(
"iframe", str, "", "IFrame to inject"
)
loader.add_option("iframe", str, "", "IFrame to inject")
def response(self, flow: http.HTTPFlow) -> None:
if ctx.options.iframe:
html = BeautifulSoup(flow.response.content, "html.parser")
if html.body:
iframe = html.new_tag(
"iframe",
src=ctx.options.iframe,
frameborder=0,
height=0,
width=0)
"iframe", src=ctx.options.iframe, frameborder=0, height=0, width=0
)
html.body.insert(0, iframe)
flow.response.content = str(html).encode("utf8")

View File

@ -1,28 +1,34 @@
import base64
import binascii
import logging
import socket
from typing import Any, Optional
from typing import Any
from typing import Optional
import binascii
from ntlm_auth import gss_channel_bindings, ntlm
from ntlm_auth import gss_channel_bindings
from ntlm_auth import ntlm
from mitmproxy import addonmanager, http
from mitmproxy import addonmanager
from mitmproxy import ctx
from mitmproxy import http
from mitmproxy.net.http import http1
from mitmproxy.proxy import commands, layer
from mitmproxy.proxy import commands
from mitmproxy.proxy import layer
from mitmproxy.proxy.context import Context
from mitmproxy.proxy.layers.http import HttpConnectUpstreamHook, HttpLayer, HttpStream
from mitmproxy.proxy.layers.http import HttpConnectUpstreamHook
from mitmproxy.proxy.layers.http import HttpLayer
from mitmproxy.proxy.layers.http import HttpStream
from mitmproxy.proxy.layers.http._upstream_proxy import HttpUpstreamProxy
class NTLMUpstreamAuth:
"""
This addon handles authentication to systems upstream from us for the
upstream proxy and reverse proxy mode. There are 3 cases:
- Upstream proxy CONNECT requests should have authentication added, and
subsequent already connected requests should not.
- Upstream proxy regular requests
- Reverse proxy regular requests (CONNECT is invalid in this mode)
This addon handles authentication to systems upstream from us for the
upstream proxy and reverse proxy mode. There are 3 cases:
- Upstream proxy CONNECT requests should have authentication added, and
subsequent already connected requests should not.
- Upstream proxy regular requests
- Reverse proxy regular requests (CONNECT is invalid in this mode)
"""
def load(self, loader: addonmanager.Loader) -> None:
@ -34,7 +40,7 @@ class NTLMUpstreamAuth:
help="""
Add HTTP NTLM authentication to upstream proxy requests.
Format: username:password.
"""
""",
)
loader.add_option(
name="upstream_ntlm_domain",
@ -42,7 +48,7 @@ class NTLMUpstreamAuth:
default=None,
help="""
Add HTTP NTLM domain for authentication to upstream proxy requests.
"""
""",
)
loader.add_option(
name="upstream_proxy_address",
@ -50,7 +56,7 @@ class NTLMUpstreamAuth:
default=None,
help="""
upstream poxy address.
"""
""",
)
loader.add_option(
name="upstream_ntlm_compatibility",
@ -59,7 +65,7 @@ class NTLMUpstreamAuth:
help="""
Add HTTP NTLM compatibility for authentication to upstream proxy requests.
Valid values are 0-5 (Default: 3)
"""
""",
)
logging.debug("AddOn: NTLM Upstream Authentication - Loaded")
@ -69,9 +75,13 @@ class NTLMUpstreamAuth:
for l in context.layers:
if isinstance(l, HttpLayer):
for _, stream in l.streams.items():
return stream.flow if isinstance(stream, HttpStream) else None
return (
stream.flow if isinstance(stream, HttpStream) else None
)
def build_connect_flow(context: Context, connect_header: tuple) -> http.HTTPFlow:
def build_connect_flow(
context: Context, connect_header: tuple
) -> http.HTTPFlow:
flow = extract_flow_from_context(context)
if not flow:
logging.error("failed to build connect flow")
@ -85,23 +95,27 @@ class NTLMUpstreamAuth:
assert self.conn.address
self.ntlm_context = CustomNTLMContext(ctx)
proxy_authorization = self.ntlm_context.get_ntlm_start_negotiate_message()
self.flow = build_connect_flow(self.context, ("Proxy-Authorization", proxy_authorization))
self.flow = build_connect_flow(
self.context, ("Proxy-Authorization", proxy_authorization)
)
yield HttpConnectUpstreamHook(self.flow)
raw = http1.assemble_request(self.flow.request)
yield commands.SendData(self.tunnel_connection, raw)
def extract_proxy_authenticate_msg(response_head: list) -> str:
for header in response_head:
if b'Proxy-Authenticate' in header:
challenge_message = str(bytes(header).decode('utf-8'))
if b"Proxy-Authenticate" in header:
challenge_message = str(bytes(header).decode("utf-8"))
try:
token = challenge_message.split(': ')[1]
token = challenge_message.split(": ")[1]
except IndexError:
logging.error("Failed to extract challenge_message")
raise
return token
def patched_receive_handshake_data(self, data) -> layer.CommandGenerator[tuple[bool, Optional[str]]]:
def patched_receive_handshake_data(
self, data
) -> layer.CommandGenerator[tuple[bool, Optional[str]]]:
self.buf += data
response_head = self.buf.maybe_extract_lines()
if response_head:
@ -119,8 +133,14 @@ class NTLMUpstreamAuth:
else:
if not challenge_message:
return True, None
proxy_authorization = self.ntlm_context.get_ntlm_challenge_response_message(challenge_message)
self.flow = build_connect_flow(self.context, ("Proxy-Authorization", proxy_authorization))
proxy_authorization = (
self.ntlm_context.get_ntlm_challenge_response_message(
challenge_message
)
)
self.flow = build_connect_flow(
self.context, ("Proxy-Authorization", proxy_authorization)
)
raw = http1.assemble_request(self.flow.request)
yield commands.SendData(self.tunnel_connection, raw)
return False, None
@ -131,19 +151,19 @@ class NTLMUpstreamAuth:
HttpUpstreamProxy.receive_handshake_data = patched_receive_handshake_data
def done(self):
logging.info('close ntlm session')
logging.info("close ntlm session")
addons = [
NTLMUpstreamAuth()
]
addons = [NTLMUpstreamAuth()]
class CustomNTLMContext:
def __init__(self,
ctx,
preferred_type: str = 'NTLM',
cbt_data: gss_channel_bindings.GssChannelBindingsStruct = None):
def __init__(
self,
ctx,
preferred_type: str = "NTLM",
cbt_data: gss_channel_bindings.GssChannelBindingsStruct = None,
):
# TODO:// take care the cbt_data
auth: str = ctx.options.upstream_ntlm_auth
domain: str = str(ctx.options.upstream_ntlm_domain).upper()
@ -158,29 +178,39 @@ class CustomNTLMContext:
domain=domain,
workstation=workstation,
ntlm_compatibility=ntlm_compatibility,
cbt_data=cbt_data)
cbt_data=cbt_data,
)
def get_ntlm_start_negotiate_message(self) -> str:
negotiate_message = self.ntlm_context.step()
negotiate_message_base_64_in_bytes = base64.b64encode(negotiate_message)
negotiate_message_base_64_ascii = negotiate_message_base_64_in_bytes.decode("ascii")
negotiate_message_base_64_final = f'{self.preferred_type} {negotiate_message_base_64_ascii}'
negotiate_message_base_64_ascii = negotiate_message_base_64_in_bytes.decode(
"ascii"
)
negotiate_message_base_64_final = (
f"{self.preferred_type} {negotiate_message_base_64_ascii}"
)
logging.debug(
f'{self.preferred_type} Authentication, negotiate message: {negotiate_message_base_64_final}'
f"{self.preferred_type} Authentication, negotiate message: {negotiate_message_base_64_final}"
)
return negotiate_message_base_64_final
def get_ntlm_challenge_response_message(self, challenge_message: str) -> Any:
challenge_message = challenge_message.replace(self.preferred_type + " ", "", 1)
try:
challenge_message_ascii_bytes = base64.b64decode(challenge_message, validate=True)
challenge_message_ascii_bytes = base64.b64decode(
challenge_message, validate=True
)
except binascii.Error as err:
logging.debug(f'{self.preferred_type} Authentication fail with error {err.__str__()}')
logging.debug(
f"{self.preferred_type} Authentication fail with error {err.__str__()}"
)
return False
authenticate_message = self.ntlm_context.step(challenge_message_ascii_bytes)
negotiate_message_base_64 = '{} {}'.format(self.preferred_type,
base64.b64encode(authenticate_message).decode('ascii'))
negotiate_message_base_64 = "{} {}".format(
self.preferred_type, base64.b64encode(authenticate_message).decode("ascii")
)
logging.debug(
f'{self.preferred_type} Authentication, response to challenge message: {negotiate_message_base_64}'
f"{self.preferred_type} Authentication, response to challenge message: {negotiate_message_base_64}"
)
return negotiate_message_base_64

View File

@ -18,4 +18,7 @@ Usage:
def load(l):
import pydevd_pycharm
pydevd_pycharm.settrace("localhost", port=5678, stdoutToServer=True, stderrToServer=True, suspend=False)
pydevd_pycharm.settrace(
"localhost", port=5678, stdoutToServer=True, stderrToServer=True, suspend=False
)

View File

@ -58,12 +58,13 @@ class StreamSaver:
return data
if not self.fh:
self.path = datetime.fromtimestamp(self.flow.request.timestamp_start).strftime(
ctx.options.save_streamed_data)
self.path = self.path.replace('%+T', str(self.flow.request.timestamp_start))
self.path = self.path.replace('%+I', str(self.flow.client_conn.id))
self.path = self.path.replace('%+D', self.direction)
self.path = self.path.replace('%+C', self.flow.client_conn.address[0])
self.path = datetime.fromtimestamp(
self.flow.request.timestamp_start
).strftime(ctx.options.save_streamed_data)
self.path = self.path.replace("%+T", str(self.flow.request.timestamp_start))
self.path = self.path.replace("%+I", str(self.flow.client_conn.id))
self.path = self.path.replace("%+D", self.direction)
self.path = self.path.replace("%+C", self.flow.client_conn.address[0])
self.path = os.path.expanduser(self.path)
parent = Path(self.path).parent
@ -89,25 +90,27 @@ class StreamSaver:
def load(loader):
loader.add_option(
"save_streamed_data", Optional[str], None,
"save_streamed_data",
Optional[str],
None,
"Format string for saving streamed data to files. If set each streamed request or response is written "
"to a file with a name derived from the string. In addition to formating supported by python "
"strftime() (using the request start time) the code '%+T' is replaced with the time stamp of the request, "
"'%+D' by 'req' or 'rsp' depending on the direction of the data, '%+C' by the client IP addresses and "
"'%+I' by the client connection ID."
"'%+I' by the client connection ID.",
)
def requestheaders(flow):
if ctx.options.save_streamed_data and flow.request.stream:
flow.request.stream = StreamSaver(flow, 'req')
flow.request.stream = StreamSaver(flow, "req")
def responseheaders(flow):
if isinstance(flow.request.stream, StreamSaver):
flow.request.stream.done()
if ctx.options.save_streamed_data and flow.response.stream:
flow.response.stream = StreamSaver(flow, 'rsp')
flow.response.stream = StreamSaver(flow, "rsp")
def response(flow):

View File

@ -3,20 +3,19 @@ import re
from collections.abc import Sequence
from json import dumps
from mitmproxy import command, flow
from mitmproxy import command
from mitmproxy import flow
MARKER = ':mag:'
RESULTS_STR = 'Search Results: '
MARKER = ":mag:"
RESULTS_STR = "Search Results: "
class Search:
def __init__(self):
self.exp = None
@command.command('search')
def _search(self,
flows: Sequence[flow.Flow],
regex: str) -> None:
@command.command("search")
def _search(self, flows: Sequence[flow.Flow], regex: str) -> None:
"""
Defines a command named "search" that matches
the given regular expression against most parts
@ -49,11 +48,11 @@ class Search:
for _flow in flows:
# Erase previous results while preserving other comments:
comments = list()
for c in _flow.comment.split('\n'):
for c in _flow.comment.split("\n"):
if c.startswith(RESULTS_STR):
break
comments.append(c)
_flow.comment = '\n'.join(comments)
_flow.comment = "\n".join(comments)
if _flow.marked == MARKER:
_flow.marked = False
@ -62,7 +61,7 @@ class Search:
if results:
comments.append(RESULTS_STR)
comments.append(dumps(results, indent=2))
_flow.comment = '\n'.join(comments)
_flow.comment = "\n".join(comments)
_flow.marked = MARKER
def header_results(self, message):
@ -71,22 +70,16 @@ class Search:
def flow_results(self, _flow):
results = dict()
results.update(
{'flow_comment': self.exp.findall(_flow.comment)})
results.update({"flow_comment": self.exp.findall(_flow.comment)})
if _flow.request is not None:
results.update(
{'request_path': self.exp.findall(_flow.request.path)})
results.update(
{'request_headers': self.header_results(_flow.request)})
results.update({"request_path": self.exp.findall(_flow.request.path)})
results.update({"request_headers": self.header_results(_flow.request)})
if _flow.request.text:
results.update(
{'request_body': self.exp.findall(_flow.request.text)})
results.update({"request_body": self.exp.findall(_flow.request.text)})
if _flow.response is not None:
results.update(
{'response_headers': self.header_results(_flow.response)})
results.update({"response_headers": self.header_results(_flow.response)})
if _flow.response.text:
results.update(
{'response_body': self.exp.findall(_flow.response.text)})
results.update({"response_body": self.exp.findall(_flow.response.text)})
return results

View File

@ -12,15 +12,15 @@ secure_hosts: set[str] = set()
def request(flow: http.HTTPFlow) -> None:
flow.request.headers.pop('If-Modified-Since', None)
flow.request.headers.pop('Cache-Control', None)
flow.request.headers.pop("If-Modified-Since", None)
flow.request.headers.pop("Cache-Control", None)
# do not force https redirection
flow.request.headers.pop('Upgrade-Insecure-Requests', None)
flow.request.headers.pop("Upgrade-Insecure-Requests", None)
# proxy connections to SSL-enabled hosts
if flow.request.pretty_host in secure_hosts:
flow.request.scheme = 'https'
flow.request.scheme = "https"
flow.request.port = 443
# We need to update the request destination to whatever is specified in the host header:
@ -31,32 +31,36 @@ def request(flow: http.HTTPFlow) -> None:
def response(flow: http.HTTPFlow) -> None:
assert flow.response
flow.response.headers.pop('Strict-Transport-Security', None)
flow.response.headers.pop('Public-Key-Pins', None)
flow.response.headers.pop("Strict-Transport-Security", None)
flow.response.headers.pop("Public-Key-Pins", None)
# strip links in response body
flow.response.content = flow.response.content.replace(b'https://', b'http://')
flow.response.content = flow.response.content.replace(b"https://", b"http://")
# strip meta tag upgrade-insecure-requests in response body
csp_meta_tag_pattern = br'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
flow.response.content = re.sub(csp_meta_tag_pattern, b'', flow.response.content, flags=re.IGNORECASE)
csp_meta_tag_pattern = rb'<meta.*http-equiv=["\']Content-Security-Policy[\'"].*upgrade-insecure-requests.*?>'
flow.response.content = re.sub(
csp_meta_tag_pattern, b"", flow.response.content, flags=re.IGNORECASE
)
# strip links in 'Location' header
if flow.response.headers.get('Location', '').startswith('https://'):
location = flow.response.headers['Location']
if flow.response.headers.get("Location", "").startswith("https://"):
location = flow.response.headers["Location"]
hostname = urllib.parse.urlparse(location).hostname
if hostname:
secure_hosts.add(hostname)
flow.response.headers['Location'] = location.replace('https://', 'http://', 1)
flow.response.headers["Location"] = location.replace("https://", "http://", 1)
# strip upgrade-insecure-requests in Content-Security-Policy header
csp_header = flow.response.headers.get('Content-Security-Policy', '')
if re.search('upgrade-insecure-requests', csp_header, flags=re.IGNORECASE):
csp = flow.response.headers['Content-Security-Policy']
new_header = re.sub(r'upgrade-insecure-requests[;\s]*', '', csp, flags=re.IGNORECASE)
flow.response.headers['Content-Security-Policy'] = new_header
csp_header = flow.response.headers.get("Content-Security-Policy", "")
if re.search("upgrade-insecure-requests", csp_header, flags=re.IGNORECASE):
csp = flow.response.headers["Content-Security-Policy"]
new_header = re.sub(
r"upgrade-insecure-requests[;\s]*", "", csp, flags=re.IGNORECASE
)
flow.response.headers["Content-Security-Policy"] = new_header
# strip secure flag from 'Set-Cookie' headers
cookies = flow.response.headers.get_all('Set-Cookie')
cookies = [re.sub(r';\s*secure\s*', '', s) for s in cookies]
flow.response.headers.set_all('Set-Cookie', cookies)
cookies = flow.response.headers.get_all("Set-Cookie")
cookies = [re.sub(r";\s*secure\s*", "", s) for s in cookies]
flow.response.headers.set_all("Set-Cookie", cookies)

View File

@ -10,7 +10,7 @@ from mitmproxy.exceptions import HttpSyntaxException
def error(self, flow: http.HTTPFlow):
"""Kills the flow if it has an error different to HTTPSyntaxException.
Sometimes, web scanners generate malformed HTTP syntax on purpose and we do not want to kill these requests.
Sometimes, web scanners generate malformed HTTP syntax on purpose and we do not want to kill these requests.
"""
if flow.error is not None and not isinstance(flow.error, HttpSyntaxException):
flow.kill()

View File

@ -1,13 +1,13 @@
import json
from mitmproxy.net.http import cookies
from mitmproxy.test import taddons
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from mitmproxy.test import taddons
from mitmproxy.net.http import cookies
class TestHARDump:
def flow(self, resp_content=b'message'):
def flow(self, resp_content=b"message"):
times = dict(
timestamp_start=746203272,
timestamp_end=746203272,
@ -15,8 +15,8 @@ class TestHARDump:
# Create a dummy flow for testing
return tflow.tflow(
req=tutils.treq(method=b'GET', **times),
resp=tutils.tresp(content=resp_content, **times)
req=tutils.treq(method=b"GET", **times),
resp=tutils.tresp(content=resp_content, **times),
)
def test_simple(self, tmpdir, tdata):
@ -26,7 +26,7 @@ class TestHARDump:
a = tctx.script(tdata.path("../examples/contrib/har_dump.py"))
# check script is read without errors
assert tctx.master.logs == []
assert a.name_value # last function in har_dump.py
assert a.name_value # last function in har_dump.py
path = str(tmpdir.join("somefile"))
tctx.configure(a, hardump=path)
@ -46,7 +46,9 @@ class TestHARDump:
a.done()
with open(path) as inp:
har = json.load(inp)
assert har["log"]["entries"][0]["response"]["content"]["encoding"] == "base64"
assert (
har["log"]["entries"][0]["response"]["content"]["encoding"] == "base64"
)
def test_format_cookies(self, tdata):
with taddons.context() as tctx:
@ -55,17 +57,21 @@ class TestHARDump:
CA = cookies.CookieAttrs
f = a.format_cookies([("n", "v", CA([("k", "v")]))])[0]
assert f['name'] == "n"
assert f['value'] == "v"
assert not f['httpOnly']
assert not f['secure']
assert f["name"] == "n"
assert f["value"] == "v"
assert not f["httpOnly"]
assert not f["secure"]
f = a.format_cookies([("n", "v", CA([("httponly", None), ("secure", None)]))])[0]
assert f['httpOnly']
assert f['secure']
f = a.format_cookies(
[("n", "v", CA([("httponly", None), ("secure", None)]))]
)[0]
assert f["httpOnly"]
assert f["secure"]
f = a.format_cookies([("n", "v", CA([("expires", "Mon, 24-Aug-2037 00:00:00 GMT")]))])[0]
assert f['expires']
f = a.format_cookies(
[("n", "v", CA([("expires", "Mon, 24-Aug-2037 00:00:00 GMT")]))]
)[0]
assert f["expires"]
def test_binary(self, tmpdir, tdata):
with taddons.context() as tctx:

View File

@ -1,21 +1,21 @@
import json
import base64
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from mitmproxy.test import taddons
import json
import requests_mock
from mitmproxy.test import taddons
from mitmproxy.test import tflow
from mitmproxy.test import tutils
example_dir = tutils.test_data.push("../examples")
class TestJSONDump:
def echo_response(self, request, context):
self.request = {'json': request.json(), 'headers': request.headers}
return ''
self.request = {"json": request.json(), "headers": request.headers}
return ""
def flow(self, resp_content=b'message'):
def flow(self, resp_content=b"message"):
times = dict(
timestamp_start=746203272,
timestamp_end=746203272,
@ -23,8 +23,8 @@ class TestJSONDump:
# Create a dummy flow for testing
return tflow.tflow(
req=tutils.treq(method=b'GET', **times),
resp=tutils.tresp(content=resp_content, **times)
req=tutils.treq(method=b"GET", **times),
resp=tutils.tresp(content=resp_content, **times),
)
def test_simple(self, tmpdir):
@ -36,7 +36,7 @@ class TestJSONDump:
tctx.invoke(a, "done")
with open(path) as inp:
entry = json.loads(inp.readline())
assert entry['response']['content'] == 'message'
assert entry["response"]["content"] == "message"
def test_contentencode(self, tmpdir):
with taddons.context() as tctx:
@ -45,24 +45,28 @@ class TestJSONDump:
content = b"foo" + b"\xFF" * 10
tctx.configure(a, dump_destination=path, dump_encodecontent=True)
tctx.invoke(
a, "response", self.flow(resp_content=content)
)
tctx.invoke(a, "response", self.flow(resp_content=content))
tctx.invoke(a, "done")
with open(path) as inp:
entry = json.loads(inp.readline())
assert entry['response']['content'] == base64.b64encode(content).decode('utf-8')
assert entry["response"]["content"] == base64.b64encode(content).decode(
"utf-8"
)
def test_http(self, tmpdir):
with requests_mock.Mocker() as mock:
mock.post('http://my-server', text=self.echo_response)
mock.post("http://my-server", text=self.echo_response)
with taddons.context() as tctx:
a = tctx.script(example_dir.path("complex/jsondump.py"))
tctx.configure(a, dump_destination='http://my-server',
dump_username='user', dump_password='pass')
tctx.configure(
a,
dump_destination="http://my-server",
dump_username="user",
dump_password="pass",
)
tctx.invoke(a, "response", self.flow())
tctx.invoke(a, "done")
assert self.request['json']['response']['content'] == 'message'
assert self.request['headers']['Authorization'] == 'Basic dXNlcjpwYXNz'
assert self.request["json"]["response"]["content"] == "message"
assert self.request["headers"]["Authorization"] == "Basic dXNlcjpwYXNz"

View File

@ -1,229 +1,331 @@
import pytest
import requests
from examples.complex import xss_scanner as xss
from mitmproxy.test import tflow, tutils
from mitmproxy.test import tflow
from mitmproxy.test import tutils
class TestXSSScanner():
class TestXSSScanner:
def test_get_XSS_info(self):
# First type of exploit: <script>PAYLOAD</script>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><script>%s</script><html>" %
xss.FULL_PAYLOAD,
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData('https://example.com',
"End of URL",
'</script><script>alert(0)</script><script>',
xss.FULL_PAYLOAD.decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><script>%s</script><html>" % xss.FULL_PAYLOAD,
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"</script><script>alert(0)</script><script>",
xss.FULL_PAYLOAD.decode("utf-8"),
)
assert xss_info == expected_xss_info
xss_info = xss.get_XSS_data(b"<html><script>%s</script><html>" %
xss.FULL_PAYLOAD.replace(b"'", b"%27").replace(b'"', b"%22"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
'</script><script>alert(0)</script><script>',
xss.FULL_PAYLOAD.replace(b"'", b"%27").replace(b'"', b"%22").decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><script>%s</script><html>"
% xss.FULL_PAYLOAD.replace(b"'", b"%27").replace(b'"', b"%22"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"</script><script>alert(0)</script><script>",
xss.FULL_PAYLOAD.replace(b"'", b"%27")
.replace(b'"', b"%22")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><script>%s</script><html>" %
xss.FULL_PAYLOAD.replace(b"'", b"%27").replace(b'"', b"%22").replace(b"/", b"%2F"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><script>%s</script><html>"
% xss.FULL_PAYLOAD.replace(b"'", b"%27")
.replace(b'"', b"%22")
.replace(b"/", b"%2F"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Second type of exploit: <script>t='PAYLOAD'</script>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><script>t='%s';</script></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").replace(b"\"", b"%22"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"';alert(0);g='",
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b"\"", b"%22").decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><script>t='%s';</script></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b'"', b"%22"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"';alert(0);g='",
xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b'"', b"%22")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><script>t='%s';</script></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b"\"", b"%22").replace(b"'", b"%22"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><script>t='%s';</script></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b'"', b"%22")
.replace(b"'", b"%22"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Third type of exploit: <script>t="PAYLOAD"</script>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><script>t=\"%s\";</script></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").replace(b"'", b"%27"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
'";alert(0);g="',
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b"'", b"%27").decode('utf-8'))
xss_info = xss.get_XSS_data(
b'<html><script>t="%s";</script></html>'
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"'", b"%27"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
'";alert(0);g="',
xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"'", b"%27")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><script>t=\"%s\";</script></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b"'", b"%27").replace(b"\"", b"%22"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b'<html><script>t="%s";</script></html>'
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b"'", b"%27")
.replace(b'"', b"%22"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Fourth type of exploit: <a href='PAYLOAD'>Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href='%s'>Test</a></html>" %
xss.FULL_PAYLOAD,
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"'><script>alert(0)</script>",
xss.FULL_PAYLOAD.decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><a href='%s'>Test</a></html>" % xss.FULL_PAYLOAD,
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"'><script>alert(0)</script>",
xss.FULL_PAYLOAD.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href='OtherStuff%s'>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"'", b"%27"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><a href='OtherStuff%s'>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"'", b"%27"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Fifth type of exploit: <a href="PAYLOAD">Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=\"%s\">Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"'", b"%27"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"\"><script>alert(0)</script>",
xss.FULL_PAYLOAD.replace(b"'", b"%27").decode('utf-8'))
xss_info = xss.get_XSS_data(
b'<html><a href="%s">Test</a></html>'
% xss.FULL_PAYLOAD.replace(b"'", b"%27"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
'"><script>alert(0)</script>',
xss.FULL_PAYLOAD.replace(b"'", b"%27").decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=\"OtherStuff%s\">Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"'", b"%27").replace(b"\"", b"%22"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b'<html><a href="OtherStuff%s">Test</a></html>'
% xss.FULL_PAYLOAD.replace(b"'", b"%27").replace(b'"', b"%22"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Sixth type of exploit: <a href=PAYLOAD>Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=%s>Test</a></html>" %
xss.FULL_PAYLOAD,
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"><script>alert(0)</script>",
xss.FULL_PAYLOAD.decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><a href=%s>Test</a></html>" % xss.FULL_PAYLOAD,
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"><script>alert(0)</script>",
xss.FULL_PAYLOAD.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable
xss_info = xss.get_XSS_data(b"<html><a href=OtherStuff%s>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b"=", b"%3D"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><a href=OtherStuff%s>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"=", b"%3D"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Seventh type of exploit: <html>PAYLOAD</html>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><b>%s</b></html>" %
xss.FULL_PAYLOAD,
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"<script>alert(0)</script>",
xss.FULL_PAYLOAD.decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><b>%s</b></html>" % xss.FULL_PAYLOAD,
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"<script>alert(0)</script>",
xss.FULL_PAYLOAD.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable
xss_info = xss.get_XSS_data(b"<html><b>%s</b></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").replace(b"/", b"%2F"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><b>%s</b></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"/", b"%2F"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Eighth type of exploit: <a href=PAYLOAD>Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=%s>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"Javascript:alert(0)",
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><a href=%s>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"Javascript:alert(0)",
xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=OtherStuff%s>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b"=", b"%3D"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><a href=OtherStuff%s>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"=", b"%3D"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Ninth type of exploit: <a href="STUFF PAYLOAD">Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=\"STUFF %s\">Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
'" onmouseover="alert(0)" t="',
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").decode('utf-8'))
xss_info = xss.get_XSS_data(
b'<html><a href="STUFF %s">Test</a></html>'
% xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
'" onmouseover="alert(0)" t="',
xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=\"STUFF %s\">Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b'"', b"%22"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b'<html><a href="STUFF %s">Test</a></html>'
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b'"', b"%22"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Tenth type of exploit: <a href='STUFF PAYLOAD'>Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href='STUFF %s'>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
"' onmouseover='alert(0)' t='",
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><a href='STUFF %s'>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
"' onmouseover='alert(0)' t='",
xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href='STUFF %s'>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b"'", b"%22"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><a href='STUFF %s'>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"'", b"%22"),
"https://example.com",
"End of URL",
)
assert xss_info is None
# Eleventh type of exploit: <a href=STUFF_PAYLOAD>Test</a>
# Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=STUFF%s>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL")
expected_xss_info = xss.XSSData("https://example.com",
"End of URL",
" onmouseover=alert(0) t=",
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E").decode('utf-8'))
xss_info = xss.get_XSS_data(
b"<html><a href=STUFF%s>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E"),
"https://example.com",
"End of URL",
)
expected_xss_info = xss.XSSData(
"https://example.com",
"End of URL",
" onmouseover=alert(0) t=",
xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.decode("utf-8"),
)
assert xss_info == expected_xss_info
# Non-Exploitable:
xss_info = xss.get_XSS_data(b"<html><a href=STUFF_%s>Test</a></html>" %
xss.FULL_PAYLOAD.replace(b"<", b"%3C").replace(b">", b"%3E")
.replace(b"=", b"%3D"),
"https://example.com",
"End of URL")
xss_info = xss.get_XSS_data(
b"<html><a href=STUFF_%s>Test</a></html>"
% xss.FULL_PAYLOAD.replace(b"<", b"%3C")
.replace(b">", b"%3E")
.replace(b"=", b"%3D"),
"https://example.com",
"End of URL",
)
assert xss_info is None
def test_get_SQLi_data(self):
sqli_data = xss.get_SQLi_data("<html>SQL syntax MySQL</html>",
"<html></html>",
"https://example.com",
"End of URL")
expected_sqli_data = xss.SQLiData("https://example.com",
"End of URL",
"SQL syntax.*MySQL",
"MySQL")
sqli_data = xss.get_SQLi_data(
"<html>SQL syntax MySQL</html>",
"<html></html>",
"https://example.com",
"End of URL",
)
expected_sqli_data = xss.SQLiData(
"https://example.com", "End of URL", "SQL syntax.*MySQL", "MySQL"
)
assert sqli_data == expected_sqli_data
sqli_data = xss.get_SQLi_data("<html>SQL syntax MySQL</html>",
"<html>SQL syntax MySQL</html>",
"https://example.com",
"End of URL")
sqli_data = xss.get_SQLi_data(
"<html>SQL syntax MySQL</html>",
"<html>SQL syntax MySQL</html>",
"https://example.com",
"End of URL",
)
assert sqli_data is None
def test_inside_quote(self):
@ -233,9 +335,12 @@ class TestXSSScanner():
assert not xss.inside_quote("'", b"longStringNotInIt", 1, b"short")
def test_paths_to_text(self):
text = xss.paths_to_text("""<html><head><h1>STRING</h1></head>
text = xss.paths_to_text(
"""<html><head><h1>STRING</h1></head>
<script>STRING</script>
<a href=STRING></a></html>""", "STRING")
<a href=STRING></a></html>""",
"STRING",
)
expected_text = ["/html/head/h1", "/html/script"]
assert text == expected_text
assert xss.paths_to_text("""<html></html>""", "STRING") == []
@ -244,114 +349,156 @@ class TestXSSScanner():
class MockResponse:
def __init__(self, html, headers=None, cookies=None):
self.text = html
return MockResponse("<html>%s</html>" % xss.FULL_PAYLOAD)
def mocked_requests_invuln(*args, headers=None, cookies=None):
class MockResponse:
def __init__(self, html, headers=None, cookies=None):
self.text = html
return MockResponse("<html></html>")
def test_test_end_of_url_injection(self, get_request_vuln):
xss_info = xss.test_end_of_URL_injection("<html></html>", "https://example.com/index.html", {})[0]
expected_xss_info = xss.XSSData('https://example.com/index.html/1029zxcs\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\eq=3847asd',
'End of URL',
'<script>alert(0)</script>',
'1029zxcs\\\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd')
sqli_info = xss.test_end_of_URL_injection("<html></html>", "https://example.com/", {})[1]
xss_info = xss.test_end_of_URL_injection(
"<html></html>", "https://example.com/index.html", {}
)[0]
expected_xss_info = xss.XSSData(
"https://example.com/index.html/1029zxcs'd\"ao<ac>so[sb]po(pc)se;sl/bsl\\eq=3847asd",
"End of URL",
"<script>alert(0)</script>",
"1029zxcs\\'d\"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd",
)
sqli_info = xss.test_end_of_URL_injection(
"<html></html>", "https://example.com/", {}
)[1]
assert xss_info == expected_xss_info
assert sqli_info is None
def test_test_referer_injection(self, get_request_vuln):
xss_info = xss.test_referer_injection("<html></html>", "https://example.com/", {})[0]
expected_xss_info = xss.XSSData('https://example.com/',
'Referer',
'<script>alert(0)</script>',
'1029zxcs\\\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd')
sqli_info = xss.test_referer_injection("<html></html>", "https://example.com/", {})[1]
xss_info = xss.test_referer_injection(
"<html></html>", "https://example.com/", {}
)[0]
expected_xss_info = xss.XSSData(
"https://example.com/",
"Referer",
"<script>alert(0)</script>",
"1029zxcs\\'d\"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd",
)
sqli_info = xss.test_referer_injection(
"<html></html>", "https://example.com/", {}
)[1]
assert xss_info == expected_xss_info
assert sqli_info is None
def test_test_user_agent_injection(self, get_request_vuln):
xss_info = xss.test_user_agent_injection("<html></html>", "https://example.com/", {})[0]
expected_xss_info = xss.XSSData('https://example.com/',
'User Agent',
'<script>alert(0)</script>',
'1029zxcs\\\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd')
sqli_info = xss.test_user_agent_injection("<html></html>", "https://example.com/", {})[1]
xss_info = xss.test_user_agent_injection(
"<html></html>", "https://example.com/", {}
)[0]
expected_xss_info = xss.XSSData(
"https://example.com/",
"User Agent",
"<script>alert(0)</script>",
"1029zxcs\\'d\"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd",
)
sqli_info = xss.test_user_agent_injection(
"<html></html>", "https://example.com/", {}
)[1]
assert xss_info == expected_xss_info
assert sqli_info is None
def test_test_query_injection(self, get_request_vuln):
xss_info = xss.test_query_injection("<html></html>", "https://example.com/vuln.php?cmd=ls", {})[0]
expected_xss_info = xss.XSSData('https://example.com/vuln.php?cmd=1029zxcs\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\eq=3847asd',
'Query',
'<script>alert(0)</script>',
'1029zxcs\\\'d"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd')
sqli_info = xss.test_query_injection("<html></html>", "https://example.com/vuln.php?cmd=ls", {})[1]
xss_info = xss.test_query_injection(
"<html></html>", "https://example.com/vuln.php?cmd=ls", {}
)[0]
expected_xss_info = xss.XSSData(
"https://example.com/vuln.php?cmd=1029zxcs'd\"ao<ac>so[sb]po(pc)se;sl/bsl\\eq=3847asd",
"Query",
"<script>alert(0)</script>",
"1029zxcs\\'d\"ao<ac>so[sb]po(pc)se;sl/bsl\\\\eq=3847asd",
)
sqli_info = xss.test_query_injection(
"<html></html>", "https://example.com/vuln.php?cmd=ls", {}
)[1]
assert xss_info == expected_xss_info
assert sqli_info is None
@pytest.fixture(scope='function')
@pytest.fixture(scope="function")
def get_request_vuln(self, monkeypatch):
monkeypatch.setattr(requests, 'get', self.mocked_requests_vuln)
monkeypatch.setattr(requests, "get", self.mocked_requests_vuln)
@pytest.fixture(scope='function')
@pytest.fixture(scope="function")
def get_request_invuln(self, monkeypatch):
monkeypatch.setattr(requests, 'get', self.mocked_requests_invuln)
monkeypatch.setattr(requests, "get", self.mocked_requests_invuln)
@pytest.fixture(scope='function')
@pytest.fixture(scope="function")
def mock_gethostbyname(self, monkeypatch):
def gethostbyname(domain):
claimed_domains = ["google.com"]
if domain not in claimed_domains:
from socket import gaierror
raise gaierror("[Errno -2] Name or service not known")
else:
return '216.58.221.46'
return "216.58.221.46"
monkeypatch.setattr("socket.gethostbyname", gethostbyname)
def test_find_unclaimed_URLs(self, logger, mock_gethostbyname):
xss.find_unclaimed_URLs("<html><script src=\"http://google.com\"></script></html>",
"https://example.com")
xss.find_unclaimed_URLs(
'<html><script src="http://google.com"></script></html>',
"https://example.com",
)
assert logger.args == []
xss.find_unclaimed_URLs("<html><script src=\"http://unclaimedDomainName.com\"></script></html>",
"https://example.com")
assert logger.args[0] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
xss.find_unclaimed_URLs("<html><iframe src=\"http://unclaimedDomainName.com\"></iframe></html>",
"https://example.com")
assert logger.args[1] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
xss.find_unclaimed_URLs("<html><link rel=\"stylesheet\" href=\"http://unclaimedDomainName.com\"></html>",
"https://example.com")
assert logger.args[2] == 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
xss.find_unclaimed_URLs(
'<html><script src="http://unclaimedDomainName.com"></script></html>',
"https://example.com",
)
assert (
logger.args[0]
== 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
)
xss.find_unclaimed_URLs(
'<html><iframe src="http://unclaimedDomainName.com"></iframe></html>',
"https://example.com",
)
assert (
logger.args[1]
== 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
)
xss.find_unclaimed_URLs(
'<html><link rel="stylesheet" href="http://unclaimedDomainName.com"></html>',
"https://example.com",
)
assert (
logger.args[2]
== 'XSS found in https://example.com due to unclaimed URL "http://unclaimedDomainName.com".'
)
def test_log_XSS_data(self, logger):
xss.log_XSS_data(None)
assert logger.args == []
# self, url: str, injection_point: str, exploit: str, line: str
xss.log_XSS_data(xss.XSSData('https://example.com',
'Location',
'String',
'Line of HTML'))
assert logger.args[0] == '===== XSS Found ===='
assert logger.args[1] == 'XSS URL: https://example.com'
assert logger.args[2] == 'Injection Point: Location'
assert logger.args[3] == 'Suggested Exploit: String'
assert logger.args[4] == 'Line: Line of HTML'
xss.log_XSS_data(
xss.XSSData("https://example.com", "Location", "String", "Line of HTML")
)
assert logger.args[0] == "===== XSS Found ===="
assert logger.args[1] == "XSS URL: https://example.com"
assert logger.args[2] == "Injection Point: Location"
assert logger.args[3] == "Suggested Exploit: String"
assert logger.args[4] == "Line: Line of HTML"
def test_log_SQLi_data(self, logger):
xss.log_SQLi_data(None)
assert logger.args == []
xss.log_SQLi_data(xss.SQLiData('https://example.com',
'Location',
'Oracle.*Driver',
'Oracle'))
assert logger.args[0] == '===== SQLi Found ====='
assert logger.args[1] == 'SQLi URL: https://example.com'
assert logger.args[2] == 'Injection Point: Location'
assert logger.args[3] == 'Regex used: Oracle.*Driver'
xss.log_SQLi_data(
xss.SQLiData("https://example.com", "Location", "Oracle.*Driver", "Oracle")
)
assert logger.args[0] == "===== SQLi Found ====="
assert logger.args[1] == "SQLi URL: https://example.com"
assert logger.args[2] == "Injection Point: Location"
assert logger.args[3] == "Regex used: Oracle.*Driver"
def test_get_cookies(self):
mocked_req = tutils.treq()
@ -363,7 +510,7 @@ class TestXSSScanner():
def test_response(self, get_request_invuln, logger):
mocked_flow = tflow.tflow(
req=tutils.treq(path=b"index.html?q=1"),
resp=tutils.tresp(content=b'<html></html>')
resp=tutils.tresp(content=b"<html></html>"),
)
xss.response(mocked_flow)
assert logger.args == []

View File

@ -17,10 +17,13 @@ Example:
import collections
import logging
import random
from abc import ABC, abstractmethod
from abc import ABC
from abc import abstractmethod
from enum import Enum
from mitmproxy import connection, ctx, tls
from mitmproxy import connection
from mitmproxy import ctx
from mitmproxy import tls
from mitmproxy.utils import human
@ -54,6 +57,7 @@ class ConservativeStrategy(TlsStrategy):
Conservative Interception Strategy - only intercept if there haven't been any failed attempts
in the history.
"""
def should_intercept(self, server_address: connection.Address) -> bool:
return InterceptionResult.FAILURE not in self.history[server_address]
@ -62,6 +66,7 @@ class ProbabilisticStrategy(TlsStrategy):
"""
Fixed probability that we intercept a given connection.
"""
def __init__(self, p: float):
self.p = p
super().__init__()
@ -75,7 +80,9 @@ class MaybeTls:
def load(self, l):
l.add_option(
"tls_strategy", int, 0,
"tls_strategy",
int,
0,
"TLS passthrough strategy. If set to 0, connections will be passed through after the first unsuccessful "
"handshake. If set to 0 < p <= 100, connections with be passed through with probability p.",
)
@ -97,7 +104,9 @@ class MaybeTls:
def tls_established_client(self, data: tls.TlsData):
server_address = data.context.server.peername
logging.info(f"TLS handshake successful: {human.format_address(server_address)}")
logging.info(
f"TLS handshake successful: {human.format_address(server_address)}"
)
self.strategy.record_success(server_address)
def tls_failed_client(self, data: tls.TlsData):

View File

@ -3,8 +3,8 @@ import logging
from bs4 import BeautifulSoup
from mitmproxy.http import HTTPFlow
from examples.contrib.webscanner_helper.urldict import URLDict
from mitmproxy.http import HTTPFlow
NO_CONTENT = object()
@ -14,7 +14,7 @@ class MappingAddonConfig:
class MappingAddon:
""" The mapping add-on can be used in combination with web application scanners to reduce their false positives.
"""The mapping add-on can be used in combination with web application scanners to reduce their false positives.
Many web application scanners produce false positives caused by dynamically changing content of web applications
such as the current time or current measurements. When testing for injection vulnerabilities, web application
@ -45,7 +45,7 @@ class MappingAddon:
"""Whether to store all new content in the configuration file."""
def __init__(self, filename: str, persistent: bool = False) -> None:
""" Initializes the mapping add-on
"""Initializes the mapping add-on
Args:
filename: str that provides the name of the file in which the urls and css selectors to mapped content is
@ -71,12 +71,16 @@ class MappingAddon:
def load(self, loader):
loader.add_option(
self.OPT_MAPPING_FILE, str, "",
"File where replacement configuration is stored."
self.OPT_MAPPING_FILE,
str,
"",
"File where replacement configuration is stored.",
)
loader.add_option(
self.OPT_MAP_PERSISTENT, bool, False,
"Whether to store all new content in the configuration file."
self.OPT_MAP_PERSISTENT,
bool,
False,
"Whether to store all new content in the configuration file.",
)
def configure(self, updated):
@ -88,23 +92,33 @@ class MappingAddon:
if self.OPT_MAP_PERSISTENT in updated:
self.persistent = updated[self.OPT_MAP_PERSISTENT]
def replace(self, soup: BeautifulSoup, css_sel: str, replace: BeautifulSoup) -> None:
def replace(
self, soup: BeautifulSoup, css_sel: str, replace: BeautifulSoup
) -> None:
"""Replaces the content of soup that matches the css selector with the given replace content."""
for content in soup.select(css_sel):
self.logger.debug(f"replace \"{content}\" with \"{replace}\"")
self.logger.debug(f'replace "{content}" with "{replace}"')
content.replace_with(copy.copy(replace))
def apply_template(self, soup: BeautifulSoup, template: dict[str, BeautifulSoup]) -> None:
def apply_template(
self, soup: BeautifulSoup, template: dict[str, BeautifulSoup]
) -> None:
"""Applies the given mapping template to the given soup."""
for css_sel, replace in template.items():
mapped = soup.select(css_sel)
if not mapped:
self.logger.warning(f"Could not find \"{css_sel}\", can not freeze anything.")
self.logger.warning(
f'Could not find "{css_sel}", can not freeze anything.'
)
else:
self.replace(soup, css_sel, BeautifulSoup(replace, features=MappingAddonConfig.HTML_PARSER))
self.replace(
soup,
css_sel,
BeautifulSoup(replace, features=MappingAddonConfig.HTML_PARSER),
)
def response(self, flow: HTTPFlow) -> None:
"""If a response is received, check if we should replace some content. """
"""If a response is received, check if we should replace some content."""
try:
templates = self.mapping_templates[flow]
res = flow.response
@ -118,7 +132,9 @@ class MappingAddon:
self.apply_template(content, template)
res.content = content.encode(encoding)
else:
self.logger.warning(f"Unsupported content type '{content_type}' or content encoding '{encoding}'")
self.logger.warning(
f"Unsupported content type '{content_type}' or content encoding '{encoding}'"
)
except KeyError:
pass

View File

@ -3,13 +3,15 @@ import logging
import random
import string
import time
from typing import Any, cast
from typing import Any
from typing import cast
from selenium import webdriver
import mitmproxy.http
from mitmproxy import flowfilter
from mitmproxy import master
from mitmproxy.script import concurrent
from selenium import webdriver
logger = logging.getLogger(__name__)
@ -18,14 +20,14 @@ cookie_key_name = {
"expires": "Expires",
"domain": "Domain",
"is_http_only": "HttpOnly",
"is_secure": "Secure"
"is_secure": "Secure",
}
def randomString(string_length=10):
"""Generate a random string of fixed length """
"""Generate a random string of fixed length"""
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(string_length))
return "".join(random.choice(letters) for i in range(string_length))
class AuthorizationOracle(abc.ABC):
@ -41,7 +43,7 @@ class AuthorizationOracle(abc.ABC):
class SeleniumAddon:
""" This Addon can be used in combination with web application scanners in order to help them to authenticate
"""This Addon can be used in combination with web application scanners in order to help them to authenticate
against a web application.
Since the authentication is highly dependant on the web application, this add-on includes the abstract method
@ -50,8 +52,7 @@ class SeleniumAddon:
application. In addition, an authentication oracle which inherits from AuthorizationOracle should be created.
"""
def __init__(self, fltr: str, domain: str,
auth_oracle: AuthorizationOracle):
def __init__(self, fltr: str, domain: str, auth_oracle: AuthorizationOracle):
self.filter = flowfilter.parse(fltr)
self.auth_oracle = auth_oracle
self.domain = domain
@ -62,9 +63,8 @@ class SeleniumAddon:
options.headless = True
profile = webdriver.FirefoxProfile()
profile.set_preference('network.proxy.type', 0)
self.browser = webdriver.Firefox(firefox_profile=profile,
options=options)
profile.set_preference("network.proxy.type", 0)
self.browser = webdriver.Firefox(firefox_profile=profile, options=options)
self.cookies: list[dict[str, str]] = []
def _login(self, flow):
@ -76,7 +76,9 @@ class SeleniumAddon:
def request(self, flow: mitmproxy.http.HTTPFlow):
if flow.request.is_replay:
logger.warning("Caught replayed request: " + str(flow))
if (not self.filter or self.filter(flow)) and self.auth_oracle.is_unauthorized_request(flow):
if (
not self.filter or self.filter(flow)
) and self.auth_oracle.is_unauthorized_request(flow):
logger.debug("unauthorized request detected, perform login")
self._login(flow)
@ -88,7 +90,7 @@ class SeleniumAddon:
if self.auth_oracle.is_unauthorized_response(flow):
self._login(flow)
new_flow = flow.copy()
if master and hasattr(master, 'commands'):
if master and hasattr(master, "commands"):
# cast necessary for mypy
cast(Any, master).commands.call("replay.client", [new_flow])
count = 0
@ -99,7 +101,9 @@ class SeleniumAddon:
if new_flow.response:
flow.response = new_flow.response
else:
logger.warning("Could not call 'replay.client' command since master was not initialized yet.")
logger.warning(
"Could not call 'replay.client' command since master was not initialized yet."
)
if self.set_cookies and flow.response:
logger.debug("set set-cookie header for response")
@ -124,7 +128,8 @@ class SeleniumAddon:
def _set_request_cookies(self, flow: mitmproxy.http.HTTPFlow):
if self.cookies:
cookies = "; ".join(
map(lambda c: f"{c['name']}={c['value']}", self.cookies))
map(lambda c: f"{c['name']}={c['value']}", self.cookies)
)
flow.request.headers["cookie"] = cookies
@abc.abstractmethod

View File

@ -1,15 +1,15 @@
from typing import TextIO, Callable
from typing import Callable
from typing import TextIO
from unittest import mock
from unittest.mock import MagicMock
from examples.contrib.webscanner_helper.mapping import MappingAddon
from examples.contrib.webscanner_helper.mapping import MappingAddonConfig
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from examples.contrib.webscanner_helper.mapping import MappingAddon, MappingAddonConfig
class TestConfig:
def test_config(self):
assert MappingAddonConfig.HTML_PARSER == "html.parser"
@ -20,7 +20,6 @@ mapping_content = f'{{"{url}": {{"body": "{new_content}"}}}}'
class TestMappingAddon:
def test_init(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
@ -36,8 +35,8 @@ class TestMappingAddon:
loader = MagicMock()
mapping.load(loader)
assert 'mapping_file' in str(loader.add_option.call_args_list)
assert 'map_persistent' in str(loader.add_option.call_args_list)
assert "mapping_file" in str(loader.add_option.call_args_list)
assert "map_persistent" in str(loader.add_option.call_args_list)
def test_configure(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
@ -45,7 +44,10 @@ class TestMappingAddon:
tfile.write(mapping_content)
mapping = MappingAddon(tmpfile)
new_filename = "My new filename"
updated = {str(mapping.OPT_MAPPING_FILE): new_filename, str(mapping.OPT_MAP_PERSISTENT): True}
updated = {
str(mapping.OPT_MAPPING_FILE): new_filename,
str(mapping.OPT_MAP_PERSISTENT): True,
}
open_mock = mock.mock_open(read_data="{}")
with mock.patch("builtins.open", open_mock):
@ -161,5 +163,8 @@ class TestMappingAddon:
with open(tmpfile, "w") as tfile:
tfile.write("{}")
mapping = MappingAddon(tmpfile, persistent=True)
with mock.patch('examples.complex.webscanner_helper.urldict.URLDict.dump', selfself.mock_dump):
with mock.patch(
"examples.complex.webscanner_helper.urldict.URLDict.dump",
selfself.mock_dump,
):
mapping.done()

View File

@ -3,16 +3,16 @@ from unittest.mock import MagicMock
import pytest
from examples.contrib.webscanner_helper.proxyauth_selenium import AuthorizationOracle
from examples.contrib.webscanner_helper.proxyauth_selenium import logger
from examples.contrib.webscanner_helper.proxyauth_selenium import randomString
from examples.contrib.webscanner_helper.proxyauth_selenium import SeleniumAddon
from mitmproxy.http import HTTPFlow
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from mitmproxy.http import HTTPFlow
from examples.contrib.webscanner_helper.proxyauth_selenium import logger, randomString, AuthorizationOracle, \
SeleniumAddon
class TestRandomString:
def test_random_string(self):
res = randomString()
assert isinstance(res, str)
@ -36,8 +36,11 @@ oracle = AuthenticationOracleTest()
@pytest.fixture(scope="module", autouse=True)
def selenium_addon(request):
addon = SeleniumAddon(fltr=r"~u http://example\.com/login\.php", domain=r"~d http://example\.com",
auth_oracle=oracle)
addon = SeleniumAddon(
fltr=r"~u http://example\.com/login\.php",
domain=r"~d http://example\.com",
auth_oracle=oracle,
)
browser = MagicMock()
addon.browser = browser
yield addon
@ -49,11 +52,10 @@ def selenium_addon(request):
class TestSeleniumAddon:
def test_request_replay(self, selenium_addon):
f = tflow.tflow(resp=tutils.tresp())
f.request.is_replay = True
with mock.patch.object(logger, 'warning') as mock_warning:
with mock.patch.object(logger, "warning") as mock_warning:
selenium_addon.request(f)
mock_warning.assert_called()
@ -62,7 +64,7 @@ class TestSeleniumAddon:
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
with mock.patch.object(logger, 'debug') as mock_debug:
with mock.patch.object(logger, "debug") as mock_debug:
selenium_addon.request(f)
mock_debug.assert_called()
assert selenium_addon.set_cookies
@ -79,9 +81,11 @@ class TestSeleniumAddon:
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
with mock.patch.object(logger, 'debug') as mock_debug:
with mock.patch('examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login',
return_value=[{"name": "cookie", "value": "test"}]) as mock_login:
with mock.patch.object(logger, "debug") as mock_debug:
with mock.patch(
"examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login",
return_value=[{"name": "cookie", "value": "test"}],
) as mock_login:
selenium_addon.request(f)
mock_debug.assert_called()
assert selenium_addon.set_cookies
@ -95,7 +99,7 @@ class TestSeleniumAddon:
selenium_addon.set_cookies = False
assert not selenium_addon.set_cookies
with mock.patch.object(logger, 'debug') as mock_debug:
with mock.patch.object(logger, "debug") as mock_debug:
selenium_addon.request(f)
mock_debug.assert_called()
selenium_addon.filter = fltr
@ -105,8 +109,10 @@ class TestSeleniumAddon:
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
with mock.patch('examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login',
return_value=[]) as mock_login:
with mock.patch(
"examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login",
return_value=[],
) as mock_login:
selenium_addon.response(f)
mock_login.assert_called()
@ -114,7 +120,9 @@ class TestSeleniumAddon:
f = tflow.tflow(resp=tutils.tresp())
f.request.url = "http://example.com/login.php"
selenium_addon.set_cookies = False
with mock.patch('examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login',
return_value=[{"name": "cookie", "value": "test"}]) as mock_login:
with mock.patch(
"examples.complex.webscanner_helper.proxyauth_selenium.SeleniumAddon.login",
return_value=[{"name": "cookie", "value": "test"}],
) as mock_login:
selenium_addon.response(f)
mock_login.assert_called()

View File

@ -1,5 +1,6 @@
from mitmproxy.test import tflow, tutils
from examples.contrib.webscanner_helper.urldict import URLDict
from mitmproxy.test import tflow
from mitmproxy.test import tutils
url = "http://10.10.10.10"
new_content_body = "New Body"
@ -11,11 +12,10 @@ input_file_content_error = f'{{"{url_error}": {content}}}'
class TestUrlDict:
def test_urldict_empty(self):
urldict = URLDict()
dump = urldict.dumps()
assert dump == '{}'
assert dump == "{}"
def test_urldict_loads(self):
urldict = URLDict.loads(input_file_content)

View File

@ -4,17 +4,18 @@ from pathlib import Path
from unittest import mock
from unittest.mock import patch
from examples.contrib.webscanner_helper.urlindex import filter_404
from examples.contrib.webscanner_helper.urlindex import JSONUrlIndexWriter
from examples.contrib.webscanner_helper.urlindex import SetEncoder
from examples.contrib.webscanner_helper.urlindex import TextUrlIndexWriter
from examples.contrib.webscanner_helper.urlindex import UrlIndexAddon
from examples.contrib.webscanner_helper.urlindex import UrlIndexWriter
from examples.contrib.webscanner_helper.urlindex import WRITER
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from examples.contrib.webscanner_helper.urlindex import UrlIndexWriter, SetEncoder, JSONUrlIndexWriter, \
TextUrlIndexWriter, WRITER, \
filter_404, \
UrlIndexAddon
class TestBaseClass:
@patch.multiple(UrlIndexWriter, __abstractmethods__=set())
def test_base_class(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
@ -25,14 +26,13 @@ class TestBaseClass:
class TestSetEncoder:
def test_set_encoder_set(self):
test_set = {"foo", "bar", "42"}
result = SetEncoder.default(SetEncoder(), test_set)
assert isinstance(result, list)
assert 'foo' in result
assert 'bar' in result
assert '42' in result
assert "foo" in result
assert "bar" in result
assert "42" in result
def test_set_encoder_str(self):
test_str = "test"
@ -45,18 +45,18 @@ class TestSetEncoder:
class TestJSONUrlIndexWriter:
def test_load(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(
"{\"http://example.com:80\": {\"/\": {\"GET\": [301]}}, \"http://www.example.com:80\": {\"/\": {\"GET\": [302]}}}")
'{"http://example.com:80": {"/": {"GET": [301]}}, "http://www.example.com:80": {"/": {"GET": [302]}}}'
)
writer = JSONUrlIndexWriter(filename=tmpfile)
writer.load()
assert 'http://example.com:80' in writer.host_urls
assert '/' in writer.host_urls['http://example.com:80']
assert 'GET' in writer.host_urls['http://example.com:80']['/']
assert 301 in writer.host_urls['http://example.com:80']['/']['GET']
assert "http://example.com:80" in writer.host_urls
assert "/" in writer.host_urls["http://example.com:80"]
assert "GET" in writer.host_urls["http://example.com:80"]["/"]
assert 301 in writer.host_urls["http://example.com:80"]["/"]["GET"]
def test_load_empty(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
@ -102,7 +102,8 @@ class TestTestUrlIndexWriter:
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
tfile.write(
"2020-04-22T05:41:08.679231 STATUS: 200 METHOD: GET URL:http://example.com")
"2020-04-22T05:41:08.679231 STATUS: 200 METHOD: GET URL:http://example.com"
)
writer = TextUrlIndexWriter(filename=tmpfile)
writer.load()
assert True
@ -173,7 +174,6 @@ class TestFilter:
class TestUrlIndexAddon:
def test_init(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
UrlIndexAddon(tmpfile)
@ -202,7 +202,9 @@ class TestUrlIndexAddon:
tfile.write("")
url_index = UrlIndexAddon(tmpfile, append=False)
f = tflow.tflow(resp=tutils.tresp())
with mock.patch('examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.add_url'):
with mock.patch(
"examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.add_url"
):
url_index.response(f)
assert not Path(tmpfile).exists()
@ -210,7 +212,9 @@ class TestUrlIndexAddon:
tmpfile = tmpdir.join("tmpfile")
url_index = UrlIndexAddon(tmpfile)
f = tflow.tflow(resp=tutils.tresp())
with mock.patch('examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.add_url') as mock_add_url:
with mock.patch(
"examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.add_url"
) as mock_add_url:
url_index.response(f)
mock_add_url.assert_called()
@ -229,6 +233,8 @@ class TestUrlIndexAddon:
def test_done(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
url_index = UrlIndexAddon(tmpfile)
with mock.patch('examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.save') as mock_save:
with mock.patch(
"examples.complex.webscanner_helper.urlindex.JSONUrlIndexWriter.save"
) as mock_save:
url_index.done()
mock_save.assert_called()

View File

@ -1,20 +1,22 @@
import json
from unittest import mock
from examples.contrib.webscanner_helper.urlinjection import HTMLInjection
from examples.contrib.webscanner_helper.urlinjection import InjectionGenerator
from examples.contrib.webscanner_helper.urlinjection import logger
from examples.contrib.webscanner_helper.urlinjection import RobotsInjection
from examples.contrib.webscanner_helper.urlinjection import SitemapInjection
from examples.contrib.webscanner_helper.urlinjection import UrlInjectionAddon
from mitmproxy import flowfilter
from mitmproxy.test import tflow
from mitmproxy.test import tutils
from examples.contrib.webscanner_helper.urlinjection import InjectionGenerator, HTMLInjection, RobotsInjection, \
SitemapInjection, \
UrlInjectionAddon, logger
index = json.loads(
"{\"http://example.com:80\": {\"/\": {\"GET\": [301]}}, \"http://www.example.com:80\": {\"/test\": {\"POST\": [302]}}}")
'{"http://example.com:80": {"/": {"GET": [301]}}, "http://www.example.com:80": {"/test": {"POST": [302]}}}'
)
class TestInjectionGenerator:
def test_inject(self):
f = tflow.tflow(resp=tutils.tresp())
injection_generator = InjectionGenerator()
@ -23,12 +25,11 @@ class TestInjectionGenerator:
class TestHTMLInjection:
def test_inject_not404(self):
html_injection = HTMLInjection()
f = tflow.tflow(resp=tutils.tresp())
with mock.patch.object(logger, 'warning') as mock_warning:
with mock.patch.object(logger, "warning") as mock_warning:
html_injection.inject(index, f)
assert mock_warning.called
@ -57,12 +58,11 @@ class TestHTMLInjection:
class TestRobotsInjection:
def test_inject_not404(self):
robots_injection = RobotsInjection()
f = tflow.tflow(resp=tutils.tresp())
with mock.patch.object(logger, 'warning') as mock_warning:
with mock.patch.object(logger, "warning") as mock_warning:
robots_injection.inject(index, f)
assert mock_warning.called
@ -76,12 +76,11 @@ class TestRobotsInjection:
class TestSitemapInjection:
def test_inject_not404(self):
sitemap_injection = SitemapInjection()
f = tflow.tflow(resp=tutils.tresp())
with mock.patch.object(logger, 'warning') as mock_warning:
with mock.patch.object(logger, "warning") as mock_warning:
sitemap_injection.inject(index, f)
assert mock_warning.called
@ -89,19 +88,22 @@ class TestSitemapInjection:
sitemap_injection = SitemapInjection()
f = tflow.tflow(resp=tutils.tresp())
f.response.status_code = 404
assert "<url><loc>http://example.com:80/</loc></url>" not in str(f.response.content)
assert "<url><loc>http://example.com:80/</loc></url>" not in str(
f.response.content
)
sitemap_injection.inject(index, f)
assert "<url><loc>http://example.com:80/</loc></url>" in str(f.response.content)
class TestUrlInjectionAddon:
def test_init(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
json.dump(index, tfile)
flt = f"~u .*/site.html$"
url_injection = UrlInjectionAddon(f"~u .*/site.html$", tmpfile, HTMLInjection(insert=True))
url_injection = UrlInjectionAddon(
f"~u .*/site.html$", tmpfile, HTMLInjection(insert=True)
)
assert "http://example.com:80" in url_injection.url_store
fltr = flowfilter.parse(flt)
f = tflow.tflow(resp=tutils.tresp())

View File

@ -1,18 +1,17 @@
import multiprocessing
import time
from pathlib import Path
from unittest import mock
from examples.contrib.webscanner_helper.watchdog import logger
from examples.contrib.webscanner_helper.watchdog import WatchdogAddon
from mitmproxy.connections import ServerConnection
from mitmproxy.exceptions import HttpSyntaxException
from mitmproxy.test import tflow
from mitmproxy.test import tutils
import multiprocessing
from examples.contrib.webscanner_helper.watchdog import WatchdogAddon, logger
class TestWatchdog:
def test_init_file(self, tmpdir):
tmpfile = tmpdir.join("tmpfile")
with open(tmpfile, "w") as tfile:
@ -35,14 +34,18 @@ class TestWatchdog:
def test_serverconnect(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir), timeout=10)
with mock.patch('mitmproxy.connections.ServerConnection.settimeout') as mock_set_timeout:
with mock.patch(
"mitmproxy.connections.ServerConnection.settimeout"
) as mock_set_timeout:
w.serverconnect(ServerConnection("127.0.0.1"))
mock_set_timeout.assert_called()
def test_serverconnect_None(self, tmpdir):
event = multiprocessing.Event()
w = WatchdogAddon(event, Path(tmpdir))
with mock.patch('mitmproxy.connections.ServerConnection.settimeout') as mock_set_timeout:
with mock.patch(
"mitmproxy.connections.ServerConnection.settimeout"
) as mock_set_timeout:
w.serverconnect(ServerConnection("127.0.0.1"))
assert not mock_set_timeout.called
@ -52,7 +55,7 @@ class TestWatchdog:
f = tflow.tflow(resp=tutils.tresp())
f.error = "Test Error"
with mock.patch.object(logger, 'error') as mock_error:
with mock.patch.object(logger, "error") as mock_error:
open_mock = mock.mock_open()
with mock.patch("pathlib.Path.open", open_mock, create=True):
w.error(f)
@ -66,7 +69,7 @@ class TestWatchdog:
f.error = HttpSyntaxException()
assert isinstance(f.error, HttpSyntaxException)
with mock.patch.object(logger, 'error') as mock_error:
with mock.patch.object(logger, "error") as mock_error:
open_mock = mock.mock_open()
with mock.patch("pathlib.Path.open", open_mock, create=True):
w.error(f)
@ -79,6 +82,6 @@ class TestWatchdog:
assert w.not_in_timeout(None, None)
assert w.not_in_timeout(time.time, None)
with mock.patch('time.time', return_value=5):
with mock.patch("time.time", return_value=5):
assert not w.not_in_timeout(3, 20)
assert w.not_in_timeout(3, 1)

View File

@ -1,7 +1,12 @@
import itertools
import json
from collections.abc import Generator
from collections.abc import MutableMapping
from typing import Any, Callable, Generator, TextIO, Union, cast
from typing import Any
from typing import Callable
from typing import cast
from typing import TextIO
from typing import Union
from mitmproxy import flowfilter
from mitmproxy.http import HTTPFlow
@ -76,7 +81,7 @@ class URLDict(MutableMapping):
def _dump(self, value_dumper: Callable = f_id) -> dict:
dumped: dict[Union[flowfilter.TFilter, str], Any] = {}
for fltr, value in self.store.items():
if hasattr(fltr, 'pattern'):
if hasattr(fltr, "pattern"):
# cast necessary for mypy
dumped[cast(Any, fltr).pattern] = value_dumper(value)
else:

View File

@ -3,7 +3,8 @@ import datetime
import json
import logging
from pathlib import Path
from typing import Optional, Union
from typing import Optional
from typing import Union
from mitmproxy import flowfilter
from mitmproxy.http import HTTPFlow
@ -67,7 +68,9 @@ class JSONUrlIndexWriter(UrlIndexWriter):
res = flow.response
if req is not None and res is not None:
urls = self.host_urls.setdefault(f"{req.scheme}://{req.host}:{req.port}", dict())
urls = self.host_urls.setdefault(
f"{req.scheme}://{req.host}:{req.port}", dict()
)
methods = urls.setdefault(req.path, {})
codes = methods.setdefault(req.method, set())
codes.add(res.status_code)
@ -88,8 +91,10 @@ class TextUrlIndexWriter(UrlIndexWriter):
req = flow.request
if res is not None and req is not None:
with self.filepath.open("a+") as f:
f.write(f"{datetime.datetime.utcnow().isoformat()} STATUS: {res.status_code} METHOD: "
f"{req.method} URL:{req.url}\n")
f.write(
f"{datetime.datetime.utcnow().isoformat()} STATUS: {res.status_code} METHOD: "
f"{req.method} URL:{req.url}\n"
)
def save(self):
pass
@ -120,9 +125,14 @@ class UrlIndexAddon:
OPT_APPEND = "URLINDEX_APPEND"
OPT_INDEX_FILTER = "URLINDEX_FILTER"
def __init__(self, file_path: Union[str, Path], append: bool = True,
index_filter: Union[str, flowfilter.TFilter] = filter_404, index_format: str = "json"):
""" Initializes the urlindex add-on.
def __init__(
self,
file_path: Union[str, Path],
append: bool = True,
index_filter: Union[str, flowfilter.TFilter] = filter_404,
index_format: str = "json",
):
"""Initializes the urlindex add-on.
Args:
file_path: Path to file to which the URL index will be written. Can either be given as str or Path.
@ -153,7 +163,7 @@ class UrlIndexAddon:
def response(self, flow: HTTPFlow):
"""Checks if the response should be included in the URL based on the index_filter and adds it to the URL index
if appropriate.
if appropriate.
"""
if isinstance(self.index_filter, str) or self.index_filter is None:
raise ValueError("Invalid filter expression.")

View File

@ -11,6 +11,7 @@ logger = logging.getLogger(__name__)
class InjectionGenerator:
"""Abstract class for an generator of the injection content in order to inject the URL index."""
ENCODING = "UTF8"
@abc.abstractmethod
@ -32,11 +33,11 @@ class HTMLInjection(InjectionGenerator):
@classmethod
def _form_html(cls, url):
return f"<form action=\"{url}\" method=\"POST\"></form>"
return f'<form action="{url}" method="POST"></form>'
@classmethod
def _link_html(cls, url):
return f"<a href=\"{url}\">link to {url}</a>"
return f'<a href="{url}">link to {url}</a>'
@classmethod
def index_html(cls, index):
@ -54,9 +55,9 @@ class HTMLInjection(InjectionGenerator):
@classmethod
def landing_page(cls, index):
return (
"<head><meta charset=\"UTF-8\"></head><body>"
+ cls.index_html(index)
+ "</body>"
'<head><meta charset="UTF-8"></head><body>'
+ cls.index_html(index)
+ "</body>"
)
def inject(self, index, flow: HTTPFlow):
@ -64,19 +65,21 @@ class HTMLInjection(InjectionGenerator):
if flow.response.status_code != 404 and not self.insert:
logger.warning(
f"URL '{flow.request.url}' didn't return 404 status, "
f"index page would overwrite valid page.")
f"index page would overwrite valid page."
)
elif self.insert:
content = (flow.response
.content
.decode(self.ENCODING, "backslashreplace"))
content = flow.response.content.decode(
self.ENCODING, "backslashreplace"
)
if "</body>" in content:
content = content.replace("</body>", self.index_html(index) + "</body>")
content = content.replace(
"</body>", self.index_html(index) + "</body>"
)
else:
content += self.index_html(index)
flow.response.content = content.encode(self.ENCODING)
else:
flow.response.content = (self.landing_page(index)
.encode(self.ENCODING))
flow.response.content = self.landing_page(index).encode(self.ENCODING)
class RobotsInjection(InjectionGenerator):
@ -98,11 +101,12 @@ class RobotsInjection(InjectionGenerator):
if flow.response.status_code != 404:
logger.warning(
f"URL '{flow.request.url}' didn't return 404 status, "
f"index page would overwrite valid page.")
f"index page would overwrite valid page."
)
else:
flow.response.content = self.robots_txt(index,
self.directive).encode(
self.ENCODING)
flow.response.content = self.robots_txt(index, self.directive).encode(
self.ENCODING
)
class SitemapInjection(InjectionGenerator):
@ -111,7 +115,8 @@ class SitemapInjection(InjectionGenerator):
@classmethod
def sitemap(cls, index):
lines = [
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">"]
'<?xml version="1.0" encoding="UTF-8"?><urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'
]
for scheme_netloc, paths in index.items():
for path, methods in paths.items():
url = scheme_netloc + path
@ -124,13 +129,14 @@ class SitemapInjection(InjectionGenerator):
if flow.response.status_code != 404:
logger.warning(
f"URL '{flow.request.url}' didn't return 404 status, "
f"index page would overwrite valid page.")
f"index page would overwrite valid page."
)
else:
flow.response.content = self.sitemap(index).encode(self.ENCODING)
class UrlInjectionAddon:
""" The UrlInjection add-on can be used in combination with web application scanners to improve their crawling
"""The UrlInjection add-on can be used in combination with web application scanners to improve their crawling
performance.
The given URls will be injected into the web application. With this, web application scanners can find pages to
@ -143,8 +149,9 @@ class UrlInjectionAddon:
The URL index needed for the injection can be generated by the UrlIndex Add-on.
"""
def __init__(self, flt: str, url_index_file: str,
injection_gen: InjectionGenerator):
def __init__(
self, flt: str, url_index_file: str, injection_gen: InjectionGenerator
):
"""Initializes the UrlIndex add-on.
Args:
@ -168,5 +175,7 @@ class UrlInjectionAddon:
self.injection_gen.inject(self.url_store, flow)
flow.response.status_code = 200
flow.response.headers["content-type"] = "text/html"
logger.debug(f"Set status code to 200 and set content to logged "
f"urls. Method: {self.injection_gen}")
logger.debug(
f"Set status code to 200 and set content to logged "
f"urls. Method: {self.injection_gen}"
)

View File

@ -1,19 +1,20 @@
import logging
import pathlib
import time
import logging
from datetime import datetime
from typing import Union
import mitmproxy.connections
import mitmproxy.http
from mitmproxy.addons.export import curl_command, raw
from mitmproxy.addons.export import curl_command
from mitmproxy.addons.export import raw
from mitmproxy.exceptions import HttpSyntaxException
logger = logging.getLogger(__name__)
class WatchdogAddon():
""" The Watchdog Add-on can be used in combination with web application scanners in oder to check if the device
class WatchdogAddon:
"""The Watchdog Add-on can be used in combination with web application scanners in oder to check if the device
under test responds correctls to the scanner's responses.
The Watchdog Add-on checks if the device under test responds correctly to the scanner's responses.
@ -45,10 +46,14 @@ class WatchdogAddon():
@classmethod
def not_in_timeout(cls, last_triggered, timeout):
"""Checks if current error lies not in timeout after last trigger (potential reset of connection)."""
return last_triggered is None or timeout is None or (time.time() - last_triggered > timeout)
return (
last_triggered is None
or timeout is None
or (time.time() - last_triggered > timeout)
)
def error(self, flow):
""" Checks if the watchdog will be triggered.
"""Checks if the watchdog will be triggered.
Only triggers watchdog for timeouts after last reset and if flow.error is set (shows that error is a server
error). Ignores HttpSyntaxException Errors since this can be triggered on purpose by web application scanner.
@ -56,8 +61,11 @@ class WatchdogAddon():
Args:
flow: mitmproxy.http.flow
"""
if (self.not_in_timeout(self.last_trigger, self.timeout)
and flow.error is not None and not isinstance(flow.error, HttpSyntaxException)):
if (
self.not_in_timeout(self.last_trigger, self.timeout)
and flow.error is not None
and not isinstance(flow.error, HttpSyntaxException)
):
self.last_trigger = time.time()
logger.error(f"Watchdog triggered! Cause: {flow}")
@ -65,7 +73,11 @@ class WatchdogAddon():
# save the request which might have caused the problem
if flow.request:
with (self.flow_dir / f"{datetime.utcnow().isoformat()}.curl").open("w") as f:
with (self.flow_dir / f"{datetime.utcnow().isoformat()}.curl").open(
"w"
) as f:
f.write(curl_command(flow))
with (self.flow_dir / f"{datetime.utcnow().isoformat()}.raw").open("wb") as f:
with (self.flow_dir / f"{datetime.utcnow().isoformat()}.raw").open(
"wb"
) as f:
f.write(raw(flow))

View File

@ -38,7 +38,9 @@ import logging
import re
import socket
from html.parser import HTMLParser
from typing import NamedTuple, Optional, Union
from typing import NamedTuple
from typing import Optional
from typing import Union
from urllib.parse import urlparse
import requests
@ -82,14 +84,14 @@ Cookies = dict[str, str]
def get_cookies(flow: http.HTTPFlow) -> Cookies:
""" Return a dict going from cookie names to cookie values
- Note that it includes both the cookies sent in the original request and
the cookies sent by the server """
"""Return a dict going from cookie names to cookie values
- Note that it includes both the cookies sent in the original request and
the cookies sent by the server"""
return {name: value for name, value in flow.request.cookies.fields}
def find_unclaimed_URLs(body, requestUrl):
""" Look for unclaimed URLs in script tags and log them if found"""
"""Look for unclaimed URLs in script tags and log them if found"""
def getValue(attrs: list[tuple[str, str]], attrName: str) -> Optional[str]:
for name, value in attrs:
@ -101,9 +103,15 @@ def find_unclaimed_URLs(body, requestUrl):
script_URLs: list[str] = []
def handle_starttag(self, tag, attrs):
if (tag == "script" or tag == "iframe") and "src" in [name for name, value in attrs]:
if (tag == "script" or tag == "iframe") and "src" in [
name for name, value in attrs
]:
self.script_URLs.append(getValue(attrs, "src"))
if tag == "link" and getValue(attrs, "rel") == "stylesheet" and "href" in [name for name, value in attrs]:
if (
tag == "link"
and getValue(attrs, "rel") == "stylesheet"
and "href" in [name for name, value in attrs]
):
self.script_URLs.append(getValue(attrs, "href"))
parser = ScriptURLExtractor()
@ -114,17 +122,21 @@ def find_unclaimed_URLs(body, requestUrl):
try:
socket.gethostbyname(domain)
except socket.gaierror:
logging.error(f"XSS found in {requestUrl} due to unclaimed URL \"{url}\".")
logging.error(f'XSS found in {requestUrl} due to unclaimed URL "{url}".')
def test_end_of_URL_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
""" Test the given URL for XSS via injection onto the end of the URL and
log the XSS if found """
def test_end_of_URL_injection(
original_body: str, request_URL: str, cookies: Cookies
) -> VulnData:
"""Test the given URL for XSS via injection onto the end of the URL and
log the XSS if found"""
parsed_URL = urlparse(request_URL)
path = parsed_URL.path
if path != "" and path[-1] != "/": # ensure the path ends in a /
path += "/"
path += FULL_PAYLOAD.decode('utf-8') # the path must be a string while the payload is bytes
path += FULL_PAYLOAD.decode(
"utf-8"
) # the path must be a string while the payload is bytes
url = parsed_URL._replace(path=path).geturl()
body = requests.get(url, cookies=cookies).text.lower()
xss_info = get_XSS_data(body, url, "End of URL")
@ -132,31 +144,42 @@ def test_end_of_URL_injection(original_body: str, request_URL: str, cookies: Coo
return xss_info, sqli_info
def test_referer_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
""" Test the given URL for XSS via injection into the referer and
log the XSS if found """
body = requests.get(request_URL, headers={'referer': FULL_PAYLOAD}, cookies=cookies).text.lower()
def test_referer_injection(
original_body: str, request_URL: str, cookies: Cookies
) -> VulnData:
"""Test the given URL for XSS via injection into the referer and
log the XSS if found"""
body = requests.get(
request_URL, headers={"referer": FULL_PAYLOAD}, cookies=cookies
).text.lower()
xss_info = get_XSS_data(body, request_URL, "Referer")
sqli_info = get_SQLi_data(body, original_body, request_URL, "Referer")
return xss_info, sqli_info
def test_user_agent_injection(original_body: str, request_URL: str, cookies: Cookies) -> VulnData:
""" Test the given URL for XSS via injection into the user agent and
log the XSS if found """
body = requests.get(request_URL, headers={'User-Agent': FULL_PAYLOAD}, cookies=cookies).text.lower()
def test_user_agent_injection(
original_body: str, request_URL: str, cookies: Cookies
) -> VulnData:
"""Test the given URL for XSS via injection into the user agent and
log the XSS if found"""
body = requests.get(
request_URL, headers={"User-Agent": FULL_PAYLOAD}, cookies=cookies
).text.lower()
xss_info = get_XSS_data(body, request_URL, "User Agent")
sqli_info = get_SQLi_data(body, original_body, request_URL, "User Agent")
return xss_info, sqli_info
def test_query_injection(original_body: str, request_URL: str, cookies: Cookies):
""" Test the given URL for XSS via injection into URL queries and
log the XSS if found """
"""Test the given URL for XSS via injection into URL queries and
log the XSS if found"""
parsed_URL = urlparse(request_URL)
query_string = parsed_URL.query
# queries is a list of parameters where each parameter is set to the payload
queries = [query.split("=")[0] + "=" + FULL_PAYLOAD.decode('utf-8') for query in query_string.split("&")]
queries = [
query.split("=")[0] + "=" + FULL_PAYLOAD.decode("utf-8")
for query in query_string.split("&")
]
new_query_string = "&".join(queries)
new_URL = parsed_URL._replace(query=new_query_string).geturl()
body = requests.get(new_URL, cookies=cookies).text.lower()
@ -166,7 +189,7 @@ def test_query_injection(original_body: str, request_URL: str, cookies: Cookies)
def log_XSS_data(xss_info: Optional[XSSData]) -> None:
""" Log information about the given XSS to mitmproxy """
"""Log information about the given XSS to mitmproxy"""
# If it is None, then there is no info to log
if not xss_info:
return
@ -178,7 +201,7 @@ def log_XSS_data(xss_info: Optional[XSSData]) -> None:
def log_SQLi_data(sqli_info: Optional[SQLiData]) -> None:
""" Log information about the given SQLi to mitmproxy """
"""Log information about the given SQLi to mitmproxy"""
if not sqli_info:
return
logging.error("===== SQLi Found =====")
@ -189,51 +212,88 @@ def log_SQLi_data(sqli_info: Optional[SQLiData]) -> None:
return
def get_SQLi_data(new_body: str, original_body: str, request_URL: str, injection_point: str) -> Optional[SQLiData]:
""" Return a SQLiDict if there is a SQLi otherwise return None
String String URL String -> (SQLiDict or None) """
def get_SQLi_data(
new_body: str, original_body: str, request_URL: str, injection_point: str
) -> Optional[SQLiData]:
"""Return a SQLiDict if there is a SQLi otherwise return None
String String URL String -> (SQLiDict or None)"""
# Regexes taken from Damn Small SQLi Scanner: https://github.com/stamparm/DSSS/blob/master/dsss.py#L17
DBMS_ERRORS = {
"MySQL": (r"SQL syntax.*MySQL", r"Warning.*mysql_.*", r"valid MySQL result", r"MySqlClient\."),
"PostgreSQL": (r"PostgreSQL.*ERROR", r"Warning.*\Wpg_.*", r"valid PostgreSQL result", r"Npgsql\."),
"Microsoft SQL Server": (r"Driver.* SQL[\-\_\ ]*Server", r"OLE DB.* SQL Server", r"(\W|\A)SQL Server.*Driver",
r"Warning.*mssql_.*", r"(\W|\A)SQL Server.*[0-9a-fA-F]{8}",
r"(?s)Exception.*\WSystem\.Data\.SqlClient\.", r"(?s)Exception.*\WRoadhouse\.Cms\."),
"Microsoft Access": (r"Microsoft Access Driver", r"JET Database Engine", r"Access Database Engine"),
"Oracle": (r"\bORA-[0-9][0-9][0-9][0-9]", r"Oracle error", r"Oracle.*Driver", r"Warning.*\Woci_.*", r"Warning.*\Wora_.*"),
"MySQL": (
r"SQL syntax.*MySQL",
r"Warning.*mysql_.*",
r"valid MySQL result",
r"MySqlClient\.",
),
"PostgreSQL": (
r"PostgreSQL.*ERROR",
r"Warning.*\Wpg_.*",
r"valid PostgreSQL result",
r"Npgsql\.",
),
"Microsoft SQL Server": (
r"Driver.* SQL[\-\_\ ]*Server",
r"OLE DB.* SQL Server",
r"(\W|\A)SQL Server.*Driver",
r"Warning.*mssql_.*",
r"(\W|\A)SQL Server.*[0-9a-fA-F]{8}",
r"(?s)Exception.*\WSystem\.Data\.SqlClient\.",
r"(?s)Exception.*\WRoadhouse\.Cms\.",
),
"Microsoft Access": (
r"Microsoft Access Driver",
r"JET Database Engine",
r"Access Database Engine",
),
"Oracle": (
r"\bORA-[0-9][0-9][0-9][0-9]",
r"Oracle error",
r"Oracle.*Driver",
r"Warning.*\Woci_.*",
r"Warning.*\Wora_.*",
),
"IBM DB2": (r"CLI Driver.*DB2", r"DB2 SQL error", r"\bdb2_\w+\("),
"SQLite": (r"SQLite/JDBCDriver", r"SQLite.Exception", r"System.Data.SQLite.SQLiteException", r"Warning.*sqlite_.*",
r"Warning.*SQLite3::", r"\[SQLITE_ERROR\]"),
"Sybase": (r"(?i)Warning.*sybase.*", r"Sybase message", r"Sybase.*Server message.*"),
"SQLite": (
r"SQLite/JDBCDriver",
r"SQLite.Exception",
r"System.Data.SQLite.SQLiteException",
r"Warning.*sqlite_.*",
r"Warning.*SQLite3::",
r"\[SQLITE_ERROR\]",
),
"Sybase": (
r"(?i)Warning.*sybase.*",
r"Sybase message",
r"Sybase.*Server message.*",
),
}
for dbms, regexes in DBMS_ERRORS.items():
for regex in regexes: # type: ignore
if re.search(regex, new_body, re.IGNORECASE) and not re.search(regex, original_body, re.IGNORECASE):
return SQLiData(request_URL,
injection_point,
regex,
dbms)
if re.search(regex, new_body, re.IGNORECASE) and not re.search(
regex, original_body, re.IGNORECASE
):
return SQLiData(request_URL, injection_point, regex, dbms)
return None
# A qc is either ' or "
def inside_quote(qc: str, substring_bytes: bytes, text_index: int, body_bytes: bytes) -> bool:
""" Whether the Numberth occurrence of the first string in the second
string is inside quotes as defined by the supplied QuoteChar """
substring = substring_bytes.decode('utf-8')
body = body_bytes.decode('utf-8')
def inside_quote(
qc: str, substring_bytes: bytes, text_index: int, body_bytes: bytes
) -> bool:
"""Whether the Numberth occurrence of the first string in the second
string is inside quotes as defined by the supplied QuoteChar"""
substring = substring_bytes.decode("utf-8")
body = body_bytes.decode("utf-8")
num_substrings_found = 0
in_quote = False
for index, char in enumerate(body):
# Whether the next chunk of len(substring) chars is the substring
next_part_is_substring = (
(not (index + len(substring) > len(body))) and
(body[index:index + len(substring)] == substring)
next_part_is_substring = (not (index + len(substring) > len(body))) and (
body[index : index + len(substring)] == substring
)
# Whether this char is escaped with a \
is_not_escaped = (
(index - 1 < 0 or index - 1 > len(body)) or
(body[index - 1] != "\\")
is_not_escaped = (index - 1 < 0 or index - 1 > len(body)) or (
body[index - 1] != "\\"
)
if char == qc and is_not_escaped:
in_quote = not in_quote
@ -245,25 +305,27 @@ def inside_quote(qc: str, substring_bytes: bytes, text_index: int, body_bytes: b
def paths_to_text(html: str, string: str) -> list[str]:
""" Return list of Paths to a given str in the given HTML tree
- Note that it does a BFS """
"""Return list of Paths to a given str in the given HTML tree
- Note that it does a BFS"""
def remove_last_occurence_of_sub_string(string: str, substr: str) -> str:
""" Delete the last occurrence of substr from str
"""Delete the last occurrence of substr from str
String String -> String
"""
index = string.rfind(substr)
return string[:index] + string[index + len(substr):]
return string[:index] + string[index + len(substr) :]
class PathHTMLParser(HTMLParser):
currentPath = ""
paths: list[str] = []
def handle_starttag(self, tag, attrs):
self.currentPath += ("/" + tag)
self.currentPath += "/" + tag
def handle_endtag(self, tag):
self.currentPath = remove_last_occurence_of_sub_string(self.currentPath, "/" + tag)
self.currentPath = remove_last_occurence_of_sub_string(
self.currentPath, "/" + tag
)
def handle_data(self, data):
if string in data:
@ -274,13 +336,15 @@ def paths_to_text(html: str, string: str) -> list[str]:
return parser.paths
def get_XSS_data(body: Union[str, bytes], request_URL: str, injection_point: str) -> Optional[XSSData]:
""" Return a XSSDict if there is a XSS otherwise return None """
def get_XSS_data(
body: Union[str, bytes], request_URL: str, injection_point: str
) -> Optional[XSSData]:
"""Return a XSSDict if there is a XSS otherwise return None"""
def in_script(text, index, body) -> bool:
""" Whether the Numberth occurrence of the first string in the second
string is inside a script tag """
paths = paths_to_text(body.decode('utf-8'), text.decode("utf-8"))
"""Whether the Numberth occurrence of the first string in the second
string is inside a script tag"""
paths = paths_to_text(body.decode("utf-8"), text.decode("utf-8"))
try:
path = paths[index]
return "script" in path
@ -288,12 +352,12 @@ def get_XSS_data(body: Union[str, bytes], request_URL: str, injection_point: str
return False
def in_HTML(text: bytes, index: int, body: bytes) -> bool:
""" Whether the Numberth occurrence of the first string in the second
string is inside the HTML but not inside a script tag or part of
a HTML attribute"""
"""Whether the Numberth occurrence of the first string in the second
string is inside the HTML but not inside a script tag or part of
a HTML attribute"""
# if there is a < then lxml will interpret that as a tag, so only search for the stuff before it
text = text.split(b"<")[0]
paths = paths_to_text(body.decode('utf-8'), text.decode("utf-8"))
paths = paths_to_text(body.decode("utf-8"), text.decode("utf-8"))
try:
path = paths[index]
return "script" not in path
@ -301,14 +365,14 @@ def get_XSS_data(body: Union[str, bytes], request_URL: str, injection_point: str
return False
def inject_javascript_handler(html: str) -> bool:
""" Whether you can inject a Javascript:alert(0) as a link """
"""Whether you can inject a Javascript:alert(0) as a link"""
class injectJSHandlerHTMLParser(HTMLParser):
injectJSHandler = False
def handle_starttag(self, tag, attrs):
for name, value in attrs:
if name == "href" and value.startswith(FRONT_WALL.decode('utf-8')):
if name == "href" and value.startswith(FRONT_WALL.decode("utf-8")):
self.injectJSHandler = True
parser = injectJSHandlerHTMLParser()
@ -317,7 +381,7 @@ def get_XSS_data(body: Union[str, bytes], request_URL: str, injection_point: str
# Only convert the body to bytes if needed
if isinstance(body, str):
body = bytes(body, 'utf-8')
body = bytes(body, "utf-8")
# Regex for between 24 and 72 (aka 24*3) characters encapsulated by the walls
regex = re.compile(b"""%s.{24,72}?%s""" % (FRONT_WALL, BACK_WALL))
matches = regex.findall(body)
@ -336,64 +400,121 @@ def get_XSS_data(body: Union[str, bytes], request_URL: str, injection_point: str
inject_slash = b"sl/bsl" in match # forward slashes
inject_semi = b"se;sl" in match # semicolons
inject_equals = b"eq=" in match # equals sign
if in_script_val and inject_slash and inject_open_angle and inject_close_angle: # e.g. <script>PAYLOAD</script>
return XSSData(request_URL,
injection_point,
'</script><script>alert(0)</script><script>',
match.decode('utf-8'))
elif in_script_val and in_single_quotes and inject_single_quotes and inject_semi: # e.g. <script>t='PAYLOAD';</script>
return XSSData(request_URL,
injection_point,
"';alert(0);g='",
match.decode('utf-8'))
elif in_script_val and in_double_quotes and inject_double_quotes and inject_semi: # e.g. <script>t="PAYLOAD";</script>
return XSSData(request_URL,
injection_point,
'";alert(0);g="',
match.decode('utf-8'))
elif in_tag and in_single_quotes and inject_single_quotes and inject_open_angle and inject_close_angle and inject_slash:
if (
in_script_val and inject_slash and inject_open_angle and inject_close_angle
): # e.g. <script>PAYLOAD</script>
return XSSData(
request_URL,
injection_point,
"</script><script>alert(0)</script><script>",
match.decode("utf-8"),
)
elif (
in_script_val and in_single_quotes and inject_single_quotes and inject_semi
): # e.g. <script>t='PAYLOAD';</script>
return XSSData(
request_URL, injection_point, "';alert(0);g='", match.decode("utf-8")
)
elif (
in_script_val and in_double_quotes and inject_double_quotes and inject_semi
): # e.g. <script>t="PAYLOAD";</script>
return XSSData(
request_URL, injection_point, '";alert(0);g="', match.decode("utf-8")
)
elif (
in_tag
and in_single_quotes
and inject_single_quotes
and inject_open_angle
and inject_close_angle
and inject_slash
):
# e.g. <a href='PAYLOAD'>Test</a>
return XSSData(request_URL,
injection_point,
"'><script>alert(0)</script>",
match.decode('utf-8'))
elif in_tag and in_double_quotes and inject_double_quotes and inject_open_angle and inject_close_angle and inject_slash:
return XSSData(
request_URL,
injection_point,
"'><script>alert(0)</script>",
match.decode("utf-8"),
)
elif (
in_tag
and in_double_quotes
and inject_double_quotes
and inject_open_angle
and inject_close_angle
and inject_slash
):
# e.g. <a href="PAYLOAD">Test</a>
return XSSData(request_URL,
injection_point,
'"><script>alert(0)</script>',
match.decode('utf-8'))
elif in_tag and not in_double_quotes and not in_single_quotes and inject_open_angle and inject_close_angle and inject_slash:
return XSSData(
request_URL,
injection_point,
'"><script>alert(0)</script>',
match.decode("utf-8"),
)
elif (
in_tag
and not in_double_quotes
and not in_single_quotes
and inject_open_angle
and inject_close_angle
and inject_slash
):
# e.g. <a href=PAYLOAD>Test</a>
return XSSData(request_URL,
injection_point,
'><script>alert(0)</script>',
match.decode('utf-8'))
elif inject_javascript_handler(body.decode('utf-8')): # e.g. <html><a href=PAYLOAD>Test</a>
return XSSData(request_URL,
injection_point,
'Javascript:alert(0)',
match.decode('utf-8'))
elif in_tag and in_double_quotes and inject_double_quotes and inject_equals: # e.g. <a href="PAYLOAD">Test</a>
return XSSData(request_URL,
injection_point,
'" onmouseover="alert(0)" t="',
match.decode('utf-8'))
elif in_tag and in_single_quotes and inject_single_quotes and inject_equals: # e.g. <a href='PAYLOAD'>Test</a>
return XSSData(request_URL,
injection_point,
"' onmouseover='alert(0)' t='",
match.decode('utf-8'))
elif in_tag and not in_single_quotes and not in_double_quotes and inject_equals: # e.g. <a href=PAYLOAD>Test</a>
return XSSData(request_URL,
injection_point,
" onmouseover=alert(0) t=",
match.decode('utf-8'))
elif in_HTML_val and not in_script_val and inject_open_angle and inject_close_angle and inject_slash: # e.g. <html>PAYLOAD</html>
return XSSData(request_URL,
injection_point,
'<script>alert(0)</script>',
match.decode('utf-8'))
return XSSData(
request_URL,
injection_point,
"><script>alert(0)</script>",
match.decode("utf-8"),
)
elif inject_javascript_handler(
body.decode("utf-8")
): # e.g. <html><a href=PAYLOAD>Test</a>
return XSSData(
request_URL,
injection_point,
"Javascript:alert(0)",
match.decode("utf-8"),
)
elif (
in_tag and in_double_quotes and inject_double_quotes and inject_equals
): # e.g. <a href="PAYLOAD">Test</a>
return XSSData(
request_URL,
injection_point,
'" onmouseover="alert(0)" t="',
match.decode("utf-8"),
)
elif (
in_tag and in_single_quotes and inject_single_quotes and inject_equals
): # e.g. <a href='PAYLOAD'>Test</a>
return XSSData(
request_URL,
injection_point,
"' onmouseover='alert(0)' t='",
match.decode("utf-8"),
)
elif (
in_tag and not in_single_quotes and not in_double_quotes and inject_equals
): # e.g. <a href=PAYLOAD>Test</a>
return XSSData(
request_URL,
injection_point,
" onmouseover=alert(0) t=",
match.decode("utf-8"),
)
elif (
in_HTML_val
and not in_script_val
and inject_open_angle
and inject_close_angle
and inject_slash
): # e.g. <html>PAYLOAD</html>
return XSSData(
request_URL,
injection_point,
"<script>alert(0)</script>",
match.decode("utf-8"),
)
else:
return None
return None

View File

@ -2,13 +2,14 @@ import contextlib
import inspect
import logging
import pprint
import sys
import traceback
import types
from collections.abc import Callable, Sequence
from collections.abc import Callable
from collections.abc import Sequence
from dataclasses import dataclass
from typing import Any, Optional
import sys
from typing import Any
from typing import Optional
from mitmproxy import exceptions
from mitmproxy import flow

View File

@ -11,19 +11,19 @@ from mitmproxy.addons import cut
from mitmproxy.addons import disable_h2c
from mitmproxy.addons import dns_resolver
from mitmproxy.addons import export
from mitmproxy.addons import next_layer
from mitmproxy.addons import onboarding
from mitmproxy.addons import proxyserver
from mitmproxy.addons import proxyauth
from mitmproxy.addons import script
from mitmproxy.addons import serverplayback
from mitmproxy.addons import mapremote
from mitmproxy.addons import maplocal
from mitmproxy.addons import mapremote
from mitmproxy.addons import modifybody
from mitmproxy.addons import modifyheaders
from mitmproxy.addons import next_layer
from mitmproxy.addons import onboarding
from mitmproxy.addons import proxyauth
from mitmproxy.addons import proxyserver
from mitmproxy.addons import save
from mitmproxy.addons import script
from mitmproxy.addons import serverplayback
from mitmproxy.addons import stickyauth
from mitmproxy.addons import stickycookie
from mitmproxy.addons import save
from mitmproxy.addons import tlsconfig
from mitmproxy.addons import upstream_auth

View File

@ -7,7 +7,8 @@ from typing import Optional
import asgiref.compatibility
import asgiref.wsgi
from mitmproxy import ctx, http
from mitmproxy import ctx
from mitmproxy import http
logger = logging.getLogger(__name__)

View File

@ -1,7 +1,11 @@
from collections.abc import Sequence
from typing import NamedTuple
from mitmproxy import ctx, exceptions, flowfilter, http, version
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import version
from mitmproxy.net.http.status_codes import NO_RESPONSE
from mitmproxy.net.http.status_codes import RESPONSES

View File

@ -74,7 +74,9 @@ class Browser:
cmd = get_browser_cmd()
if not cmd:
logging.log(ALERT, "Your platform is not supported yet - please submit a patch.")
logging.log(
ALERT, "Your platform is not supported yet - please submit a patch."
)
return
tdir = tempfile.TemporaryDirectory()
@ -85,7 +87,8 @@ class Browser:
*cmd,
"--user-data-dir=%s" % str(tdir.name),
"--proxy-server={}:{}".format(
ctx.options.listen_host or "127.0.0.1", ctx.options.listen_port or "8080"
ctx.options.listen_host or "127.0.0.1",
ctx.options.listen_port or "8080",
),
"--disable-fre",
"--no-default-browser-check",

View File

@ -1,10 +1,10 @@
import asyncio
import logging
import time
import traceback
from collections.abc import Sequence
from typing import Optional, cast
import time
from typing import cast
from typing import Optional
import mitmproxy.types
from mitmproxy import command
@ -13,11 +13,15 @@ from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import http
from mitmproxy import io
from mitmproxy.connection import ConnectionState, Server
from mitmproxy.connection import ConnectionState
from mitmproxy.connection import Server
from mitmproxy.hooks import UpdateHook
from mitmproxy.log import ALERT
from mitmproxy.options import Options
from mitmproxy.proxy import commands, events, layers, server
from mitmproxy.proxy import commands
from mitmproxy.proxy import events
from mitmproxy.proxy import layers
from mitmproxy.proxy import server
from mitmproxy.proxy.context import Context
from mitmproxy.proxy.layer import CommandGenerator
from mitmproxy.proxy.layers.http import HTTPMode
@ -161,9 +165,7 @@ class ClientPlayback:
else:
await h.replay()
except Exception:
logger.error(
f"Client replay has crashed!\n{traceback.format_exc()}"
)
logger.error(f"Client replay has crashed!\n{traceback.format_exc()}")
self.queue.task_done()
self.inflight = None

View File

@ -41,7 +41,7 @@ class CommandHistory:
def done(self):
if ctx.options.command_history and len(self.history) >= self.VACUUM_SIZE:
# vacuum history so that it doesn't grow indefinitely.
history_str = "\n".join(self.history[-self.VACUUM_SIZE // 2:]) + "\n"
history_str = "\n".join(self.history[-self.VACUUM_SIZE // 2 :]) + "\n"
try:
self.history_file.write_text(history_str)
except Exception as e:

View File

@ -1,6 +1,8 @@
from collections.abc import Sequence
from mitmproxy import command, flow, ctx
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import flow
from mitmproxy.hooks import UpdateHook

View File

@ -3,15 +3,16 @@ import os
from collections.abc import Sequence
from typing import Union
from mitmproxy.log import ALERT
from mitmproxy.utils import emoji
from mitmproxy import ctx, hooks
from mitmproxy import exceptions
from mitmproxy import command
from mitmproxy import flow
from mitmproxy import optmanager
from mitmproxy.net.http import status_codes
import mitmproxy.types
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import hooks
from mitmproxy import optmanager
from mitmproxy.log import ALERT
from mitmproxy.net.http import status_codes
from mitmproxy.utils import emoji
logger = logging.getLogger(__name__)

View File

@ -1,18 +1,18 @@
import io
import csv
import io
import logging
import os.path
from collections.abc import Sequence
from typing import Any, Union
from mitmproxy import command
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import certs
import mitmproxy.types
from typing import Any
from typing import Union
import pyperclip
import mitmproxy.types
from mitmproxy import certs
from mitmproxy import command
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy.log import ALERT
logger = logging.getLogger(__name__)
@ -132,7 +132,7 @@ class Cut:
writer.writerow(vals)
logger.log(
ALERT,
"Saved %s cuts over %d flows as CSV." % (len(cuts), len(flows))
"Saved %s cuts over %d flows as CSV." % (len(cuts), len(flows)),
)
except OSError as e:
logger.error(str(e))

View File

@ -1,7 +1,10 @@
import asyncio
import ipaddress
import socket
from typing import Callable, Iterable, Union
from collections.abc import Iterable
from typing import Callable
from typing import Union
from mitmproxy import dns
from mitmproxy.proxy import mode_specs

View File

@ -1,10 +1,12 @@
from __future__ import annotations
import logging
import itertools
import logging
import shutil
import sys
from typing import IO, Optional, Union
from typing import IO
from typing import Optional
from typing import Union
from wsproto.frame_protocol import CloseReason
@ -17,12 +19,15 @@ from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy.contrib import click as miniclick
from mitmproxy.net.dns import response_codes
from mitmproxy.tcp import TCPFlow, TCPMessage
from mitmproxy.udp import UDPFlow, UDPMessage
from mitmproxy.tcp import TCPFlow
from mitmproxy.tcp import TCPMessage
from mitmproxy.udp import UDPFlow
from mitmproxy.udp import UDPMessage
from mitmproxy.utils import human
from mitmproxy.utils import strutils
from mitmproxy.utils import vt_codes
from mitmproxy.websocket import WebSocketData, WebSocketMessage
from mitmproxy.websocket import WebSocketData
from mitmproxy.websocket import WebSocketMessage
def indent(n: int, text: str) -> str:
@ -387,9 +392,12 @@ class Dumper:
self.style(str(x), fg="bright_blue") for x in f.response.answers
)
else:
answers = self.style(response_codes.to_str(
f.response.response_code,
), fg="red")
answers = self.style(
response_codes.to_str(
f.response.response_code,
),
fg="red",
)
self.echo(f"{arrows} {answers}")
def dns_error(self, f: dns.DNSFlow):

View File

@ -1,6 +1,5 @@
import asyncio
import logging
import sys
from mitmproxy import log

View File

@ -4,7 +4,8 @@ import logging
from collections.abc import Callable
from typing import Optional
from mitmproxy import command, log
from mitmproxy import command
from mitmproxy import log
from mitmproxy.log import LogEntry
from mitmproxy.utils import signals

View File

@ -1,15 +1,18 @@
import logging
import shlex
from collections.abc import Callable, Sequence
from typing import Any, Union
from collections.abc import Callable
from collections.abc import Sequence
from typing import Any
from typing import Union
import pyperclip
import mitmproxy.types
from mitmproxy import command
from mitmproxy import ctx, http
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import http
from mitmproxy.net.http.http1 import assemble
from mitmproxy.utils import strutils

View File

@ -1,8 +1,9 @@
from typing import Optional
from mitmproxy import flow, flowfilter
from mitmproxy import exceptions
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import flowfilter
class Intercept:

View File

@ -1,4 +1,5 @@
import asyncio
from mitmproxy import ctx

View File

@ -8,7 +8,11 @@ from typing import NamedTuple
from werkzeug.security import safe_join
from mitmproxy import ctx, exceptions, flowfilter, http, version
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import version
from mitmproxy.utils.spec import parse_spec

View File

@ -2,7 +2,10 @@ import re
from collections.abc import Sequence
from typing import NamedTuple
from mitmproxy import ctx, exceptions, flowfilter, http
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy.utils.spec import parse_spec

View File

@ -2,8 +2,10 @@ import logging
import re
from collections.abc import Sequence
from mitmproxy import ctx, exceptions
from mitmproxy.addons.modifyheaders import parse_modify_spec, ModifySpec
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy.addons.modifyheaders import ModifySpec
from mitmproxy.addons.modifyheaders import parse_modify_spec
class ModifyBody:

View File

@ -4,7 +4,10 @@ from collections.abc import Sequence
from pathlib import Path
from typing import NamedTuple
from mitmproxy import ctx, exceptions, flowfilter, http
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy.http import Headers
from mitmproxy.utils import strutils
from mitmproxy.utils.spec import parse_spec

View File

@ -15,22 +15,39 @@ In that case it's not necessary to modify mitmproxy's source, adding a custom ad
that sets nextlayer.layer works just as well.
"""
import re
from collections.abc import Sequence
import struct
from typing import Any, Callable, Iterable, Optional, Union, cast
from collections.abc import Iterable
from collections.abc import Sequence
from typing import Any
from typing import Callable
from typing import cast
from typing import Optional
from typing import Union
from mitmproxy import ctx, dns, exceptions, connection
from mitmproxy import connection
from mitmproxy import ctx
from mitmproxy import dns
from mitmproxy import exceptions
from mitmproxy.net.tls import is_tls_record_magic
from mitmproxy.proxy.layers.http import HTTPMode
from mitmproxy.proxy import context, layer, layers, mode_specs
from mitmproxy.proxy import context
from mitmproxy.proxy import layer
from mitmproxy.proxy import layers
from mitmproxy.proxy import mode_specs
from mitmproxy.proxy.layers import modes
from mitmproxy.proxy.layers.http import HTTPMode
from mitmproxy.proxy.layers.quic import quic_parse_client_hello
from mitmproxy.proxy.layers.tls import HTTP_ALPNS, dtls_parse_client_hello, parse_client_hello
from mitmproxy.proxy.layers.tls import dtls_parse_client_hello
from mitmproxy.proxy.layers.tls import HTTP_ALPNS
from mitmproxy.proxy.layers.tls import parse_client_hello
from mitmproxy.tls import ClientHello
LayerCls = type[layer.Layer]
ClientSecurityLayerCls = Union[type[layers.ClientTLSLayer], type[layers.ClientQuicLayer]]
ServerSecurityLayerCls = Union[type[layers.ServerTLSLayer], type[layers.ServerQuicLayer]]
ClientSecurityLayerCls = Union[
type[layers.ClientTLSLayer], type[layers.ClientQuicLayer]
]
ServerSecurityLayerCls = Union[
type[layers.ServerTLSLayer], type[layers.ServerQuicLayer]
]
def stack_match(
@ -77,7 +94,7 @@ class NextLayer:
data_client: bytes,
*,
is_tls: Callable[[bytes], bool] = is_tls_record_magic,
client_hello: Callable[[bytes], Optional[ClientHello]] = parse_client_hello
client_hello: Callable[[bytes], Optional[ClientHello]] = parse_client_hello,
) -> Optional[bool]:
"""
Returns:
@ -148,7 +165,9 @@ class NextLayer:
ret.child_layer = client_layer_cls(context)
return ret
def is_destination_in_hosts(self, context: context.Context, hosts: Iterable[re.Pattern]) -> bool:
def is_destination_in_hosts(
self, context: context.Context, hosts: Iterable[re.Pattern]
) -> bool:
return any(
(context.server.address and rex.search(context.server.address[0]))
or (context.client.sni and rex.search(context.client.sni))
@ -168,15 +187,15 @@ class NextLayer:
):
return layers.HttpLayer(context, HTTPMode.regular)
# ... or an upstream proxy.
if (
s(modes.HttpUpstreamProxy)
or
s(modes.HttpUpstreamProxy, (layers.ClientTLSLayer, layers.ClientQuicLayer))
if s(modes.HttpUpstreamProxy) or s(
modes.HttpUpstreamProxy, (layers.ClientTLSLayer, layers.ClientQuicLayer)
):
return layers.HttpLayer(context, HTTPMode.upstream)
return None
def detect_udp_tls(self, data_client: bytes) -> Optional[tuple[ClientHello, ClientSecurityLayerCls, ServerSecurityLayerCls]]:
def detect_udp_tls(
self, data_client: bytes
) -> Optional[tuple[ClientHello, ClientSecurityLayerCls, ServerSecurityLayerCls]]:
if len(data_client) == 0:
return None
@ -198,23 +217,23 @@ class NextLayer:
# that's all we currently have to offer
return None
def raw_udp_layer(self, context: context.Context, ignore: bool = False) -> layer.Layer:
def raw_udp_layer(
self, context: context.Context, ignore: bool = False
) -> layer.Layer:
def s(*layers):
return stack_match(context, layers)
# for regular and upstream HTTP3, if we already created a client QUIC layer
# we need a server and raw QUIC layer as well
if (
s(modes.HttpProxy, layers.ClientQuicLayer)
or
s(modes.HttpUpstreamProxy, layers.ClientQuicLayer)
if s(modes.HttpProxy, layers.ClientQuicLayer) or s(
modes.HttpUpstreamProxy, layers.ClientQuicLayer
):
server_layer = layers.ServerQuicLayer(context)
server_layer.child_layer = layers.RawQuicLayer(context, ignore=ignore)
return server_layer
# for reverse HTTP3 and QUIC, we need a client and raw QUIC layer
elif (s(modes.ReverseProxy, layers.ServerQuicLayer)):
elif s(modes.ReverseProxy, layers.ServerQuicLayer):
client_layer = layers.ClientQuicLayer(context)
client_layer.child_layer = layers.RawQuicLayer(context, ignore=ignore)
return client_layer
@ -243,11 +262,7 @@ class NextLayer:
if context.client.transport_protocol == "tcp":
is_quic_stream = isinstance(context.layers[-1], layers.QuicStreamLayer)
if (
len(data_client) < 3
and not data_server
and not is_quic_stream
):
if len(data_client) < 3 and not data_server and not is_quic_stream:
return None # not enough data yet to make a decision
# 1. check for --ignore/--allow
@ -292,7 +307,7 @@ class NextLayer:
context.server.address,
data_client,
is_tls=lambda _: tls is not None,
client_hello=lambda _: None if tls is None else tls[0]
client_hello=lambda _: None if tls is None else tls[0],
):
return self.raw_udp_layer(context, ignore=True)
@ -310,7 +325,7 @@ class NextLayer:
return self.raw_udp_layer(context)
# 5. Check for reverse modes
if (isinstance(context.layers[0], modes.ReverseProxy)):
if isinstance(context.layers[0], modes.ReverseProxy):
scheme = cast(mode_specs.ReverseMode, context.client.proxy_mode).scheme
if scheme in ("udp", "dtls"):
return layers.UDPLayer(context)

View File

@ -1,6 +1,6 @@
from mitmproxy import ctx
from mitmproxy.addons import asgiapp
from mitmproxy.addons.onboardingapp import app
from mitmproxy import ctx
APP_HOST = "mitm.it"

View File

@ -1,8 +1,10 @@
import os
from flask import Flask, render_template
from flask import Flask
from flask import render_template
from mitmproxy.options import CONF_BASENAME, CONF_DIR
from mitmproxy.options import CONF_BASENAME
from mitmproxy.options import CONF_DIR
from mitmproxy.utils.magisk import write_magisk_module
app = Flask(__name__)

View File

@ -2,14 +2,16 @@ from __future__ import annotations
import binascii
import weakref
from abc import ABC, abstractmethod
from typing import MutableMapping
from abc import ABC
from abc import abstractmethod
from collections.abc import MutableMapping
from typing import Optional
import ldap3
import passlib.apache
from mitmproxy import connection, ctx
from mitmproxy import connection
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy.net.http import status_codes
@ -141,7 +143,9 @@ def is_http_proxy(f: http.HTTPFlow) -> bool:
- True, if authentication is done as if mitmproxy is a proxy
- False, if authentication is done as if mitmproxy is an HTTP server
"""
return isinstance(f.client_conn.proxy_mode, (mode_specs.RegularMode, mode_specs.UpstreamMode))
return isinstance(
f.client_conn.proxy_mode, (mode_specs.RegularMode, mode_specs.UpstreamMode)
)
def mkauth(username: str, password: str, scheme: str = "basic") -> str:

View File

@ -7,29 +7,34 @@ import asyncio
import collections
import ipaddress
import logging
from collections.abc import Iterable
from collections.abc import Iterator
from contextlib import contextmanager
from typing import Iterable, Iterator, Optional
from typing import Optional
from wsproto.frame_protocol import Opcode
from mitmproxy import (
command,
ctx,
exceptions,
http,
platform,
tcp,
udp,
websocket,
)
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy import platform
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy import websocket
from mitmproxy.connection import Address
from mitmproxy.flow import Flow
from mitmproxy.proxy import events, mode_specs, server_hooks
from mitmproxy.proxy import events
from mitmproxy.proxy import mode_specs
from mitmproxy.proxy import server_hooks
from mitmproxy.proxy.layers.tcp import TcpMessageInjected
from mitmproxy.proxy.layers.udp import UdpMessageInjected
from mitmproxy.proxy.layers.websocket import WebSocketMessageInjected
from mitmproxy.proxy.mode_servers import ProxyConnectionHandler, ServerInstance, ServerManager
from mitmproxy.utils import human, signals
from mitmproxy.proxy.mode_servers import ProxyConnectionHandler
from mitmproxy.proxy.mode_servers import ServerInstance
from mitmproxy.proxy.mode_servers import ServerManager
from mitmproxy.utils import human
from mitmproxy.utils import signals
logger = logging.getLogger(__name__)
@ -64,7 +69,8 @@ class Servers:
# Shutdown modes that have been removed from the list.
stop_tasks = [
s.stop() for spec, s in self._instances.items()
s.stop()
for spec, s in self._instances.items()
if spec not in new_instances
]
@ -101,6 +107,7 @@ class Proxyserver(ServerManager):
"""
This addon runs the actual proxy server.
"""
connections: dict[tuple, ProxyConnectionHandler]
servers: Servers
@ -116,7 +123,9 @@ class Proxyserver(ServerManager):
return f"Proxyserver({len(self.connections)} active conns)"
@contextmanager
def register_connection(self, connection_id: tuple, handler: ProxyConnectionHandler):
def register_connection(
self, connection_id: tuple, handler: ProxyConnectionHandler
):
self.connections[connection_id] = handler
try:
yield
@ -217,7 +226,10 @@ class Proxyserver(ServerManager):
if "connect_addr" in updated:
try:
if ctx.options.connect_addr:
self._connect_addr = str(ipaddress.ip_address(ctx.options.connect_addr)), 0
self._connect_addr = (
str(ipaddress.ip_address(ctx.options.connect_addr)),
0,
)
else:
self._connect_addr = None
except ValueError:
@ -229,25 +241,27 @@ class Proxyserver(ServerManager):
modes: list[mode_specs.ProxyMode] = []
for mode in ctx.options.mode:
try:
modes.append(
mode_specs.ProxyMode.parse(mode)
)
modes.append(mode_specs.ProxyMode.parse(mode))
except ValueError as e:
raise exceptions.OptionsError(f"Invalid proxy mode specification: {mode} ({e})")
raise exceptions.OptionsError(
f"Invalid proxy mode specification: {mode} ({e})"
)
# ...and don't listen on the same address.
listen_addrs = [
(
m.listen_host(ctx.options.listen_host),
m.listen_port(ctx.options.listen_port),
m.transport_protocol
m.transport_protocol,
)
for m in modes
]
if len(set(listen_addrs)) != len(listen_addrs):
(host, port, _) = collections.Counter(listen_addrs).most_common(1)[0][0]
dup_addr = human.format_address((host or "0.0.0.0", port))
raise exceptions.OptionsError(f"Cannot spawn multiple servers on the same address: {dup_addr}")
raise exceptions.OptionsError(
f"Cannot spawn multiple servers on the same address: {dup_addr}"
)
if ctx.options.mode and not ctx.master.addons.get("nextlayer"):
logger.warning("Warning: Running proxyserver without nextlayer addon!")
@ -255,20 +269,20 @@ class Proxyserver(ServerManager):
if platform.original_addr:
platform.init_transparent_mode()
else:
raise exceptions.OptionsError("Transparent mode not supported on this platform.")
raise exceptions.OptionsError(
"Transparent mode not supported on this platform."
)
if self.is_running:
asyncio.create_task(self.servers.update(modes))
async def setup_servers(self) -> bool:
return await self.servers.update([mode_specs.ProxyMode.parse(m) for m in ctx.options.mode])
return await self.servers.update(
[mode_specs.ProxyMode.parse(m) for m in ctx.options.mode]
)
def listen_addrs(self) -> list[Address]:
return [
addr
for server in self.servers
for addr in server.listen_addrs
]
return [addr for server in self.servers for addr in server.listen_addrs]
def inject_event(self, event: events.MessageInjected):
connection_id = (
@ -330,12 +344,7 @@ class Proxyserver(ServerManager):
for listen_host, listen_port, *_ in server.listen_addrs:
self_connect = (
connect_port == listen_port
and connect_host in (
"localhost",
"127.0.0.1",
"::1",
listen_host
)
and connect_host in ("localhost", "127.0.0.1", "::1", listen_host)
and server.mode.transport_protocol == data.server.transport_protocol
)
if self_connect:

View File

@ -2,13 +2,14 @@ import asyncio
import logging
import os.path
import sys
from typing import BinaryIO, Optional
from typing import BinaryIO
from typing import Optional
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import io
from mitmproxy import command
class ReadFile:

View File

@ -5,10 +5,11 @@ from collections.abc import Sequence
from datetime import datetime
from functools import lru_cache
from pathlib import Path
from typing import Literal, Optional
from typing import Literal
from typing import Optional
import mitmproxy.types
from mitmproxy import command, tcp, udp
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import dns
from mitmproxy import exceptions
@ -16,6 +17,8 @@ from mitmproxy import flow
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import io
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.log import ALERT

View File

@ -1,21 +1,22 @@
import asyncio
import importlib.machinery
import importlib.util
import logging
import os
import importlib.util
import importlib.machinery
import sys
import types
import traceback
import types
from collections.abc import Sequence
from typing import Optional
from mitmproxy import addonmanager, hooks
import mitmproxy.types as mtypes
from mitmproxy import addonmanager
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import eventsequence
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import command
from mitmproxy import eventsequence
from mitmproxy import ctx
import mitmproxy.types as mtypes
from mitmproxy import hooks
from mitmproxy.utils import asyncio_utils
logger = logging.getLogger(__name__)

View File

@ -1,14 +1,18 @@
import hashlib
import logging
import urllib
from collections.abc import Hashable, Sequence
from typing import Any, Optional
from collections.abc import Hashable
from collections.abc import Sequence
from typing import Any
from typing import Optional
import mitmproxy.types
from mitmproxy import command, hooks
from mitmproxy import ctx, http
from mitmproxy import command
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flow
from mitmproxy import hooks
from mitmproxy import http
from mitmproxy import io

View File

@ -1,8 +1,8 @@
from typing import Optional
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import ctx
class StickyAuth:

View File

@ -2,7 +2,10 @@ import collections
from http import cookiejar
from typing import Optional
from mitmproxy import http, flowfilter, ctx, exceptions
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy.net.http import cookies
TOrigin = tuple[str, int, str]
@ -31,7 +34,9 @@ def domain_match(a: str, b: str) -> bool:
class StickyCookie:
def __init__(self) -> None:
self.jar: collections.defaultdict[TOrigin, dict[str, str]] = collections.defaultdict(dict)
self.jar: collections.defaultdict[
TOrigin, dict[str, str]
] = collections.defaultdict(dict)
self.flt: Optional[flowfilter.TFilter] = None
def load(self, loader):

View File

@ -1,19 +1,17 @@
from __future__ import annotations
import asyncio
import logging
import sys
from typing import IO
import sys
from mitmproxy import ctx, log
from mitmproxy import ctx
from mitmproxy import log
from mitmproxy.utils import vt_codes
class TermLog:
def __init__(
self,
out: IO[str] | None = None
):
def __init__(self, out: IO[str] | None = None):
self.logger = TermLogHandler(out)
self.logger.install()
@ -41,10 +39,7 @@ class TermLog:
class TermLogHandler(log.MitmLogHandler):
def __init__(
self,
out: IO[str] | None = None
):
def __init__(self, out: IO[str] | None = None):
super().__init__()
self.file: IO[str] = out or sys.stdout
self.has_vt_codes = vt_codes.ensure_supported(self.file)

View File

@ -1,19 +1,28 @@
import ipaddress
import logging
import os
from pathlib import Path
import ssl
from typing import Any, Optional, TypedDict
from pathlib import Path
from typing import Any
from typing import Optional
from typing import TypedDict
from aioquic.h3.connection import H3_ALPN
from aioquic.tls import CipherSuite
from OpenSSL import SSL, crypto
from mitmproxy import certs, ctx, exceptions, connection, tls
from OpenSSL import crypto
from OpenSSL import SSL
from mitmproxy import certs
from mitmproxy import connection
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import tls
from mitmproxy.net import tls as net_tls
from mitmproxy.options import CONF_BASENAME
from mitmproxy.proxy import context
from mitmproxy.proxy.layers import modes
from mitmproxy.proxy.layers import tls as proxy_tls, quic
from mitmproxy.proxy.layers import quic
from mitmproxy.proxy.layers import tls as proxy_tls
# We manually need to specify this, otherwise OpenSSL may select a non-HTTP2 cipher by default.
# https://ssl-config.mozilla.org/#config=old
@ -166,7 +175,9 @@ class TlsConfig:
extra_chain_certs = []
ssl_ctx = net_tls.create_client_proxy_context(
method=net_tls.Method.DTLS_SERVER_METHOD if tls_start.is_dtls else net_tls.Method.TLS_SERVER_METHOD,
method=net_tls.Method.DTLS_SERVER_METHOD
if tls_start.is_dtls
else net_tls.Method.TLS_SERVER_METHOD,
min_version=net_tls.Version[ctx.options.tls_version_client_min],
max_version=net_tls.Version[ctx.options.tls_version_client_max],
cipher_list=tuple(cipher_list),
@ -179,7 +190,9 @@ class TlsConfig:
tls_start.ssl_conn = SSL.Connection(ssl_ctx)
tls_start.ssl_conn.use_certificate(entry.cert.to_pyopenssl())
tls_start.ssl_conn.use_privatekey(crypto.PKey.from_cryptography_key(entry.privatekey))
tls_start.ssl_conn.use_privatekey(
crypto.PKey.from_cryptography_key(entry.privatekey)
)
# Force HTTP/1 for secure web proxies, we currently don't support CONNECT over HTTP/2.
# There is a proof-of-concept branch at https://github.com/mhils/mitmproxy/tree/http2-proxy,
@ -256,7 +269,9 @@ class TlsConfig:
client_cert = p
ssl_ctx = net_tls.create_proxy_server_context(
method=net_tls.Method.DTLS_CLIENT_METHOD if tls_start.is_dtls else net_tls.Method.TLS_CLIENT_METHOD,
method=net_tls.Method.DTLS_CLIENT_METHOD
if tls_start.is_dtls
else net_tls.Method.TLS_CLIENT_METHOD,
min_version=net_tls.Version[ctx.options.tls_version_server_min],
max_version=net_tls.Version[ctx.options.tls_version_server_max],
cipher_list=tuple(cipher_list),
@ -328,9 +343,8 @@ class TlsConfig:
# if we don't have upstream ALPN, we allow all offered by the client
tls_start.settings.alpn_protocols = [
alpn.decode("ascii")
for alpn in [
alpn for alpn in (client.alpn, server.alpn) if alpn
] or client.alpn_offers
for alpn in [alpn for alpn in (client.alpn, server.alpn) if alpn]
or client.alpn_offers
]
# set the certificates

View File

@ -1,9 +1,9 @@
import re
import base64
import re
from typing import Optional
from mitmproxy import exceptions
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy import http
from mitmproxy.proxy import mode_specs
from mitmproxy.utils import strutils
@ -53,7 +53,10 @@ class UpstreamAuth:
def requestheaders(self, f: http.HTTPFlow):
if self.auth:
if isinstance(f.client_conn.proxy_mode, mode_specs.UpstreamMode) and f.request.scheme == "http":
if (
isinstance(f.client_conn.proxy_mode, mode_specs.UpstreamMode)
and f.request.scheme == "http"
):
f.request.headers["Proxy-Authorization"] = self.auth
elif isinstance(f.client_conn.proxy_mode, mode_specs.ReverseMode):
f.request.headers["Authorization"] = self.auth

View File

@ -11,25 +11,29 @@ The View:
import collections
import logging
import re
from collections.abc import Iterator, MutableMapping, Sequence
from typing import Any, Optional
from collections.abc import Iterator
from collections.abc import MutableMapping
from collections.abc import Sequence
from typing import Any
from typing import Optional
import sortedcontainers
import mitmproxy.flow
from mitmproxy import command
from mitmproxy import connection
from mitmproxy import ctx
from mitmproxy import dns
from mitmproxy import exceptions
from mitmproxy import hooks
from mitmproxy import connection
from mitmproxy import flowfilter
from mitmproxy import hooks
from mitmproxy import http
from mitmproxy import io
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.log import ALERT
from mitmproxy.utils import human, signals
from mitmproxy.utils import human
from mitmproxy.utils import signals
# The underlying sorted list implementation expects the sort key to be stable
@ -144,7 +148,9 @@ def _sig_view_remove(flow: mitmproxy.flow.Flow, index: int) -> None:
class View(collections.abc.Sequence):
def __init__(self) -> None:
super().__init__()
self._store: collections.OrderedDict[str, mitmproxy.flow.Flow] = collections.OrderedDict()
self._store: collections.OrderedDict[
str, mitmproxy.flow.Flow
] = collections.OrderedDict()
self.filter = flowfilter.match_all
# Should we show only marked flows?
self.show_marked = False
@ -475,7 +481,11 @@ class View(collections.abc.Sequence):
except ValueError as e:
raise exceptions.CommandError("Invalid URL: %s" % e)
c = connection.Client(peername=("", 0), sockname=("", 0), timestamp_start=req.timestamp_start - 0.0001)
c = connection.Client(
peername=("", 0),
sockname=("", 0),
timestamp_start=req.timestamp_start - 0.0001,
)
s = connection.Server(address=(req.host, req.port))
f = http.HTTPFlow(c, s)

View File

@ -6,15 +6,21 @@ import re
import sys
from dataclasses import dataclass
from pathlib import Path
from typing import NewType, Optional, Union
from cryptography import x509
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import rsa, dsa, ec
from cryptography.hazmat.primitives.serialization import pkcs12
from cryptography.x509 import NameOID, ExtendedKeyUsageOID
from typing import NewType
from typing import Optional
from typing import Union
import OpenSSL
from cryptography import x509
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import dsa
from cryptography.hazmat.primitives.asymmetric import ec
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.serialization import pkcs12
from cryptography.x509 import ExtendedKeyUsageOID
from cryptography.x509 import NameOID
from mitmproxy.coretypes import serializable
# Default expiry must not be too long: https://github.com/mitmproxy/mitmproxy/issues/815
@ -315,7 +321,10 @@ class CertStore:
self.default_chain_certs = (
[
Cert.from_pem(chunk)
for chunk in re.split(rb"(?=-----BEGIN( [A-Z]+)+-----)", self.default_chain_file.read_bytes())
for chunk in re.split(
rb"(?=-----BEGIN( [A-Z]+)+-----)",
self.default_chain_file.read_bytes(),
)
if chunk.startswith(b"-----BEGIN CERTIFICATE-----")
]
if self.default_chain_file

View File

@ -4,16 +4,21 @@
import functools
import inspect
import logging
import pyparsing
import sys
import textwrap
import types
from collections.abc import Sequence, Callable, Iterable
from typing import Any, NamedTuple, Optional
from collections.abc import Callable
from collections.abc import Iterable
from collections.abc import Sequence
from typing import Any
from typing import NamedTuple
from typing import Optional
import pyparsing
import mitmproxy.types
from mitmproxy import exceptions, command_lexer
from mitmproxy import command_lexer
from mitmproxy import exceptions
from mitmproxy.command_lexer import unquote
@ -195,7 +200,9 @@ class CommandManager:
Parse a possibly partial command. Return a sequence of ParseResults and a sequence of remainder type help items.
"""
parts: pyparsing.ParseResults = command_lexer.expr.parseString(cmdstr, parseAll=True)
parts: pyparsing.ParseResults = command_lexer.expr.parseString(
cmdstr, parseAll=True
)
parsed: list[ParseResult] = []
next_params: list[CommandParameter] = [

View File

@ -1,18 +1,20 @@
import dataclasses
import sys
import time
from dataclasses import dataclass, field
import uuid
import warnings
from abc import ABCMeta
from collections.abc import Sequence
from dataclasses import dataclass
from dataclasses import field
from enum import Flag
from typing import Literal, Optional
from typing import Literal
from typing import Optional
from mitmproxy import certs
from mitmproxy.coretypes import serializable
from mitmproxy.proxy import mode_specs
from mitmproxy.net import server_spec
from mitmproxy.proxy import mode_specs
from mitmproxy.utils import human
@ -48,12 +50,15 @@ class Connection(serializable.SerializableDataclass, metaclass=ABCMeta):
The connection object only exposes metadata about the connection, but not the underlying socket object.
This is intentional, all I/O should be handled by `mitmproxy.proxy.server` exclusively.
"""
peername: Optional[Address]
"""The remote's `(ip, port)` tuple for this connection."""
sockname: Optional[Address]
"""Our local `(ip, port)` tuple for this connection."""
state: ConnectionState = field(default=ConnectionState.CLOSED, metadata={"serialize": False})
state: ConnectionState = field(
default=ConnectionState.CLOSED, metadata={"serialize": False}
)
"""The current connection state."""
# all connections have a unique id. While
@ -177,7 +182,9 @@ class Client(Connection):
The certificate used by mitmproxy to establish TLS with the client.
"""
proxy_mode: mode_specs.ProxyMode = field(default=mode_specs.ProxyMode.parse("regular"))
proxy_mode: mode_specs.ProxyMode = field(
default=mode_specs.ProxyMode.parse("regular")
)
"""The proxy server type this client has been connecting to."""
timestamp_start: float = field(default_factory=time.time)

View File

@ -12,37 +12,41 @@ metadata depend on the protocol in use. Known attributes can be found in
`base.View`.
"""
import traceback
from typing import Union
from typing import Optional
from typing import Union
from mitmproxy import flow, tcp, udp
from mitmproxy import http
from mitmproxy.utils import signals, strutils
from . import (
auto,
raw,
hex,
json,
xml_html,
wbxml,
javascript,
css,
urlencoded,
multipart,
image,
query,
protobuf,
msgpack,
graphql,
grpc,
mqtt,
http3,
)
from .base import View, KEY_MAX, format_text, format_dict, TViewResult
from . import auto
from . import css
from . import graphql
from . import grpc
from . import hex
from . import http3
from . import image
from . import javascript
from . import json
from . import mqtt
from . import msgpack
from . import multipart
from . import protobuf
from . import query
from . import raw
from . import urlencoded
from . import wbxml
from . import xml_html
from ..tcp import TCPMessage
from ..udp import UDPMessage
from ..websocket import WebSocketMessage
from .base import format_dict
from .base import format_text
from .base import KEY_MAX
from .base import TViewResult
from .base import View
from mitmproxy import flow
from mitmproxy import http
from mitmproxy import tcp
from mitmproxy import udp
from mitmproxy.utils import signals
from mitmproxy.utils import strutils
views: list[View] = []

View File

@ -1,5 +1,5 @@
from mitmproxy import contentviews
from . import base
from mitmproxy import contentviews
class ViewAuto(base.View):

View File

@ -1,7 +1,12 @@
# Default view cutoff *in lines*
from abc import ABC, abstractmethod
from collections.abc import Iterable, Iterator, Mapping
from typing import ClassVar, Optional, Union
from abc import ABC
from abc import abstractmethod
from collections.abc import Iterable
from collections.abc import Iterator
from collections.abc import Mapping
from typing import ClassVar
from typing import Optional
from typing import Union
from mitmproxy import flow
from mitmproxy import http

View File

@ -1,8 +1,10 @@
import json
from typing import Any, Optional
from typing import Any
from typing import Optional
from mitmproxy.contentviews import base
from mitmproxy.contentviews.json import parse_json, PARSE_ERROR
from mitmproxy.contentviews.json import PARSE_ERROR
from mitmproxy.contentviews.json import parse_json
def format_graphql(data):

View File

@ -2,11 +2,17 @@ from __future__ import annotations
import logging
import struct
from dataclasses import dataclass, field
from collections.abc import Generator
from collections.abc import Iterable
from collections.abc import Iterator
from dataclasses import dataclass
from dataclasses import field
from enum import Enum
from typing import Generator, Iterable, Iterator
from mitmproxy import contentviews, flow, flowfilter, http
from mitmproxy import contentviews
from mitmproxy import flow
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy.contentviews import base
from mitmproxy.net.encoding import decode
@ -259,7 +265,9 @@ class ProtoParser:
packed_field: ProtoParser.Field,
) -> list[ProtoParser.Field]:
if not isinstance(packed_field.wire_value, bytes):
raise ValueError(f"can not unpack field with data other than bytes: {type(packed_field.wire_value)}")
raise ValueError(
f"can not unpack field with data other than bytes: {type(packed_field.wire_value)}"
)
wire_data: bytes = packed_field.wire_value
tag: int = packed_field.tag
options: ProtoParser.ParserOptions = packed_field.options
@ -953,7 +961,9 @@ def format_grpc(
@dataclass
class ViewConfig:
parser_options: ProtoParser.ParserOptions = field(default_factory=ProtoParser.ParserOptions)
parser_options: ProtoParser.ParserOptions = field(
default_factory=ProtoParser.ParserOptions
)
parser_rules: list[ProtoParser.ParserRule] = field(default_factory=list)

Some files were not shown because too many files have changed in this diff Show More