Samesite - proxy that can cache partial transfers

Diff
anonymous

Diff

Differences From Artifact [858f04fa41]:

To Artifact [3c922437dd]:


1
2
3
4



5
6
7
8
9
10
11
12
13
14
15
16
17
18

19
20
21
22
23
24
25
#!/usr/bin/env python

from __future__ import unicode_literals, print_function




import bsddb.dbshelve, copy, datetime, os, BaseHTTPServer, sys, spacemap, re, urllib2

class Config:
	__slots__ = frozenset(['_config', '_default', '_section', 'options', 'root'])
	_default = {
		'general': {
			'port': '8008',
		},
		'_other': {
			'verbose': 'no',
			'noetag': 'no',
			'noparts': 'no',
			'strip': '',
			'sub': '',

	},}

	# function to read in config file
	def __init__(self):
		import ConfigParser, optparse

		parser = optparse.OptionParser()




>
>
>














>







1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
#!/usr/bin/env python

from __future__ import unicode_literals, print_function

#import gevent.monkey
#gevent.monkey.patch_all()

import bsddb.dbshelve, copy, datetime, os, BaseHTTPServer, sys, spacemap, re, urllib2

class Config:
	__slots__ = frozenset(['_config', '_default', '_section', 'options', 'root'])
	_default = {
		'general': {
			'port': '8008',
		},
		'_other': {
			'verbose': 'no',
			'noetag': 'no',
			'noparts': 'no',
			'strip': '',
			'sub': '',
			'proto': 'http',
	},}

	# function to read in config file
	def __init__(self):
		import ConfigParser, optparse

		parser = optparse.OptionParser()
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
	'expires',
	'referer',
	'server',
	'via',
	'x-cache', 'x-cache-lookup', 'x-livetool', 'x-powered-by',
])

block_size = 4096

class MyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
	def __process(self):
		# reload means file needs to be reloaded to serve request
		reload = False
		# recheck means file needs to be checked, this also means that if file hav been modified we can serve older copy
		recheck = False







|







90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
	'expires',
	'referer',
	'server',
	'via',
	'x-cache', 'x-cache-lookup', 'x-livetool', 'x-powered-by',
])

block_size = 8192

class MyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
	def __process(self):
		# reload means file needs to be reloaded to serve request
		reload = False
		# recheck means file needs to be checked, this also means that if file hav been modified we can serve older copy
		recheck = False
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150

		proxy_ignored = set([
			'accept', 'accept-charset', 'accept-encoding', 'accept-language',
			'cache-control', 'connection', 'content-length', 'cookie',
			'host',
			'if-modified-since', 'if-unmodified-since',
			'referer',
			'user-agent',
			'via',
			'x-forwarded-for', 'x-last-hr', 'x-last-http-status-code', 'x-removed', 'x-real-ip', 'x-retry-count',
		])

		print('===============[ {} request ]==='.format(self.command))

		for header in self.headers:







|







140
141
142
143
144
145
146
147
148
149
150
151
152
153
154

		proxy_ignored = set([
			'accept', 'accept-charset', 'accept-encoding', 'accept-language',
			'cache-control', 'connection', 'content-length', 'cookie',
			'host',
			'if-modified-since', 'if-unmodified-since',
			'referer',
			'ua-cpu', 'user-agent',
			'via',
			'x-forwarded-for', 'x-last-hr', 'x-last-http-status-code', 'x-removed', 'x-real-ip', 'x-retry-count',
		])

		print('===============[ {} request ]==='.format(self.command))

		for header in self.headers:
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
			info += '\nFile is old - rechecking.'
			recheck = True

		print(info)
		if reload or recheck:

			try:
				request = 'http://' + config['root'] + self.path
				my_headers = {}
				for header in ('cache-control', 'cookie', 'referer', 'user-agent'):
					if header in self.headers:
						my_headers[header] = self.headers[header]

				needed = None
				if self.command not in ('HEAD'):







|







220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
			info += '\nFile is old - rechecking.'
			recheck = True

		print(info)
		if reload or recheck:

			try:
				request = config['proto'] + '://' + config['root'] + self.path
				my_headers = {}
				for header in ('cache-control', 'cookie', 'referer', 'user-agent'):
					if header in self.headers:
						my_headers[header] = self.headers[header]

				needed = None
				if self.command not in ('HEAD'):
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
						while True:
							range = needed.pop()
							if range[0] == None:
								break
							ranges += '{}-{}'.format(range[0], range[1] - 1),
						my_headers['range'] = 'bytes=' + ','.join(ranges)

				my_headers['Accept-Encoding'] = 'gzip'
				request = urllib2.Request(request, headers = my_headers)

				source = urllib2.urlopen(request, timeout = 60)
				new_record = {}
				new_record['_parts'] = record['_parts']
				headers = source.info()








|







246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
						while True:
							range = needed.pop()
							if range[0] == None:
								break
							ranges += '{}-{}'.format(range[0], range[1] - 1),
						my_headers['range'] = 'bytes=' + ','.join(ranges)

				my_headers['Accept-Encoding'] = 'gzip, compress, deflate, identity; q=0'
				request = urllib2.Request(request, headers = my_headers)

				source = urllib2.urlopen(request, timeout = 60)
				new_record = {}
				new_record['_parts'] = record['_parts']
				headers = source.info()

450
451
452
453
454
455
456


		return self.__process()
	def do_GET(self):
		return self.__process()

config.section('general')
server = BaseHTTPServer.HTTPServer(('127.0.0.1', int(config['port'])), MyRequestHandler)
server.serve_forever()









>
>
454
455
456
457
458
459
460
461
462
		return self.__process()
	def do_GET(self):
		return self.__process()

config.section('general')
server = BaseHTTPServer.HTTPServer(('127.0.0.1', int(config['port'])), MyRequestHandler)
server.serve_forever()

#gevent.joinall()