forked from MollardMichael/python-reverse-proxy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
proxy.py
113 lines (96 loc) · 4.12 KB
/
proxy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
#!/usr/bin/env python3
from http.server import BaseHTTPRequestHandler,HTTPServer
import argparse, os, random, sys, ssl, requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
from socketserver import ThreadingMixIn
import threading
certificate_file = 'carto.crt'
private_key_file = 'carto.key'
def merge_two_dicts(x, y):
#return x | y # python >= 3.9 required
return {**x, **y}
def set_header():
headers = {
'Host': hostname
}
return headers
class ProxyHTTPRequestHandler(BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.0'
def do_HEAD(self):
self.do_GET(body=False)
return
def do_GET(self, body=True):
sent = False
try:
url = 'https://{}{}'.format(hostname, self.path).replace('light_all/','').replace('dark_all/','').replace('@2x','') # URL modification
req_header = self.parse_headers()
#print(req_header)
print(url)
resp = requests.get(url, headers=merge_two_dicts(req_header, set_header()), verify=False, stream=True)
sent = True
self.send_response(resp.status_code)
self.send_resp_headers(resp)
msg = resp.text
if body:
#self.wfile.write(msg.encode(encoding='UTF-8',errors='strict')) # text
self.wfile.write(resp.content) # images
return
finally:
if not sent:
self.send_error(404, 'error trying to proxy')
def do_POST(self, body=True):
sent = False
try:
url = 'https://{}{}'.format(hostname, self.path)
content_len = int(self.headers.getheader('content-length', 0))
post_body = self.rfile.read(content_len)
req_header = self.parse_headers()
resp = requests.post(url, data=post_body, headers=merge_two_dicts(req_header, set_header()), verify=False)
sent = True
self.send_response(resp.status_code)
self.send_resp_headers(resp)
if body:
self.wfile.write(resp.content)
return
finally:
if not sent:
self.send_error(404, 'error trying to proxy')
def parse_headers(self):
req_header = {}
for line in self.headers:
line_parts = [o.strip() for o in line.split(':', 1)]
if len(line_parts) == 2:
req_header[line_parts[0]] = line_parts[1]
return req_header
def send_resp_headers(self, resp):
respheaders = resp.headers
#print ('Response Header')
for key in respheaders:
if key not in ['Content-Encoding', 'Transfer-Encoding', 'content-encoding', 'transfer-encoding', 'content-length', 'Content-Length']:
#print (key, respheaders[key])
self.send_header(key, respheaders[key])
self.send_header('Content-Length', len(resp.content))
self.end_headers()
def parse_args(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(description='Proxy HTTP requests')
parser.add_argument('--port', dest='port', type=int, default=9999,
help='serve HTTP requests on specified port (default: random)')
parser.add_argument('--hostname', dest='hostname', type=str, default='',
help='hostname to be processd (default: en.wikipedia.org)')
args = parser.parse_args(argv)
return args
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle requests in a separate thread."""
def main(argv=sys.argv[1:]):
global hostname
args = parse_args(argv)
hostname = args.hostname
print('https server is starting on {} port {}...'.format(args.hostname, args.port))
server_address = ('0.0.0.0', args.port)
httpd = ThreadedHTTPServer(server_address, ProxyHTTPRequestHandler)
httpd.socket = ssl.wrap_socket(httpd.socket, certfile=certificate_file, keyfile=private_key_file, server_side=True) # SSL
print('https server is running as reverse proxy')
httpd.serve_forever()
if __name__ == '__main__':
main()