This commit is contained in:
Maximilian Hils 2016-03-20 22:50:03 +01:00
parent d8e8dfc1c6
commit 2ce023a991
9 changed files with 37 additions and 38 deletions

View File

@ -152,9 +152,9 @@ class FHead(_Rex):
flags = re.MULTILINE
def __call__(self, f):
if f.request and self.re.search(str(f.request.headers)):
if f.request and self.re.search(bytes(f.request.headers)):
return True
if f.response and self.re.search(str(f.response.headers)):
if f.response and self.re.search(bytes(f.response.headers)):
return True
return False
@ -165,7 +165,7 @@ class FHeadRequest(_Rex):
flags = re.MULTILINE
def __call__(self, f):
if f.request and self.re.search(str(f.request.headers)):
if f.request and self.re.search(bytes(f.request.headers)):
return True
@ -175,7 +175,7 @@ class FHeadResponse(_Rex):
flags = re.MULTILINE
def __call__(self, f):
if f.response and self.re.search(str(f.response.headers)):
if f.response and self.re.search(bytes(f.response.headers)):
return True

View File

@ -6,12 +6,11 @@ from __future__ import absolute_import
import traceback
from abc import abstractmethod, ABCMeta
import hashlib
import Cookie
import cookielib
from six.moves import http_cookies, http_cookiejar
import os
import re
import time
import urlparse
from six.moves import urllib
from netlib import wsgi
from netlib.exceptions import HttpException
@ -248,8 +247,8 @@ class ServerPlaybackState:
"""
r = flow.request
_, _, path, _, query, _ = urlparse.urlparse(r.url)
queriesArray = urlparse.parse_qsl(query, keep_blank_values=True)
_, _, path, _, query, _ = urllib.parse.urlparse(r.url)
queriesArray = urllib.parse_qsl(query, keep_blank_values=True)
key = [
str(r.port),
@ -323,9 +322,9 @@ class StickyCookieState:
)
def domain_match(self, a, b):
if cookielib.domain_match(a, b):
if http_cookiejar.domain_match(a, b):
return True
elif cookielib.domain_match(a, b.strip(".")):
elif http_cookiejar.domain_match(a, b.strip(".")):
return True
return False
@ -333,7 +332,7 @@ class StickyCookieState:
for i in f.response.headers.get_all("set-cookie"):
# FIXME: We now know that Cookie.py screws up some cookies with
# valid RFC 822/1123 datetime specifications for expiry. Sigh.
c = Cookie.SimpleCookie(str(i))
c = http_cookies.SimpleCookie(str(i))
for m in c.values():
k = self.ckey(m, f)
if self.domain_match(f.request.host, k[0]):

View File

@ -2,7 +2,7 @@ from __future__ import print_function, absolute_import
import os
import signal
import sys
import thread
from six.moves import _thread # PY3: We only need _thread.error, which is an alias of RuntimeError in 3.3+
from netlib.version_check import check_pyopenssl_version, check_mitmproxy_version
from . import version, cmdline
from .exceptions import ServerException
@ -68,7 +68,7 @@ def mitmproxy(args=None): # pragma: no cover
m = console.ConsoleMaster(server, console_options)
try:
m.run()
except (KeyboardInterrupt, thread.error):
except (KeyboardInterrupt, _thread.error):
pass
@ -103,7 +103,7 @@ def mitmdump(args=None): # pragma: no cover
except dump.DumpError as e:
print("mitmdump: %s" % e, file=sys.stderr)
sys.exit(1)
except (KeyboardInterrupt, thread.error):
except (KeyboardInterrupt, _thread.error):
pass
@ -134,5 +134,5 @@ def mitmweb(args=None): # pragma: no cover
m = web.WebMaster(server, web_options)
try:
m.run()
except (KeyboardInterrupt, thread.error):
except (KeyboardInterrupt, _thread.error):
pass

View File

@ -1,10 +1,10 @@
import configargparse
import cPickle as pickle
from six.moves import cPickle as pickle
from ctypes import byref, windll, Structure
from ctypes.wintypes import DWORD
import os
import socket
import SocketServer
from six.moves import socketserver
import struct
import threading
import time
@ -53,7 +53,7 @@ class Resolver(object):
return self.original_addr(csock)
class APIRequestHandler(SocketServer.StreamRequestHandler):
class APIRequestHandler(socketserver.StreamRequestHandler):
"""
TransparentProxy API: Returns the pickled server address, port tuple
@ -78,10 +78,10 @@ class APIRequestHandler(SocketServer.StreamRequestHandler):
proxifier.trusted_pids.discard(pid)
class APIServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
class APIServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
def __init__(self, proxifier, *args, **kwargs):
SocketServer.TCPServer.__init__(self, *args, **kwargs)
socketserver.TCPServer.__init__(self, *args, **kwargs)
self.proxifier = proxifier
self.daemon_threads = True

View File

@ -2,7 +2,7 @@ from __future__ import (absolute_import, print_function, division)
import threading
import time
import Queue
from six.moves import queue
import h2
import six
@ -252,12 +252,12 @@ class Http2SingleStreamLayer(_HttpTransmissionLayer, threading.Thread):
self.response_headers = None
self.pushed = False
self.request_data_queue = Queue.Queue()
self.request_data_queue = queue.Queue()
self.request_queued_data_length = 0
self.request_data_finished = threading.Event()
self.response_arrived = threading.Event()
self.response_data_queue = Queue.Queue()
self.response_data_queue = queue.Queue()
self.response_queued_data_length = 0
self.response_data_finished = threading.Event()
@ -382,7 +382,7 @@ class Http2SingleStreamLayer(_HttpTransmissionLayer, threading.Thread):
while True:
try:
yield self.response_data_queue.get(timeout=1)
except Queue.Empty:
except queue.Empty:
pass
if self.response_data_finished.is_set():
while self.response_data_queue.qsize() > 0:

View File

@ -3,7 +3,7 @@ import sys
import os
import itertools
import hashlib
import Queue
from six.moves import queue
import random
import select
import time
@ -92,8 +92,8 @@ class WebsocketFrameReader(threading.Thread):
self.showresp = showresp
self.hexdump = hexdump
self.rfile = rfile
self.terminate = Queue.Queue()
self.frames_queue = Queue.Queue()
self.terminate = queue.Queue()
self.frames_queue = queue.Queue()
self.logger = log.ConnectionLogger(
self.logfp,
self.hexdump,
@ -119,7 +119,7 @@ class WebsocketFrameReader(threading.Thread):
try:
self.terminate.get_nowait()
return
except Queue.Empty:
except queue.Empty:
pass
for rfile in r:
with self.logger.ctx() as log:
@ -344,7 +344,7 @@ class Pathoc(tcp.TCPClient):
timeout=timeout,
block=True if timeout != 0 else False
)
except Queue.Empty:
except queue.Empty:
if finish:
continue
else:

View File

@ -1,6 +1,6 @@
from six.moves import cStringIO as StringIO
import threading
import Queue
from six.moves import queue
import requests
import requests.packages.urllib3
@ -13,7 +13,7 @@ class Daemon:
IFACE = "127.0.0.1"
def __init__(self, ssl=None, **daemonargs):
self.q = Queue.Queue()
self.q = queue.Queue()
self.logfp = StringIO()
daemonargs["logfp"] = self.logfp
self.thread = _PaThread(self.IFACE, self.q, ssl, daemonargs)

View File

@ -1,4 +1,4 @@
import Queue
from six.moves import queue
import time
import os.path
from six.moves import cStringIO as StringIO
@ -853,7 +853,7 @@ class TestFlowMaster:
assert not fm.start_client_playback(pb, False)
fm.client_playback.testing = True
q = Queue.Queue()
q = queue.Queue()
assert not fm.state.flow_count()
fm.tick(q, 0)
assert fm.state.flow_count()
@ -909,7 +909,7 @@ class TestFlowMaster:
False,
None,
False)
q = Queue.Queue()
q = queue.Queue()
fm.tick(q, 0)
assert fm.should_exit.is_set()

View File

@ -1,8 +1,8 @@
import SocketServer
from six.moves import socketserver
from time import sleep
class service(SocketServer.BaseRequestHandler):
class service(socketserver.BaseRequestHandler):
def handle(self):
data = 'dummy'
@ -16,7 +16,7 @@ class service(SocketServer.BaseRequestHandler):
sleep(3600)
class ThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer):
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
pass
server = ThreadedTCPServer(('', 1520), service)