Samesite - proxy that can cache partial transfers

Check-in [82969b1fc2]
anonymous

Check-in [82969b1fc2]

Overview
Comment:ua-xpu header
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | master | trunk
Files: files | file ages | folders
SHA3-256: 82969b1fc2909a99d99054fb4757738e6460d0f01da386e056ce8bec75c4c7da
User & Date: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 on 2012-01-25 08:36:38.000
Other Links: branch diff | manifest | tags
Context
2012-07-03
10:19
svn:eol-style sample config externals check-in: 35eb7bda31 user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk
2012-01-25
08:36
ua-xpu header check-in: 82969b1fc2 user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk
2012-01-20
10:30
taking out real values, some comments added check-in: 2af344df7b user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk
Changes
1
2
3
4



5
6
7
8
9
10
11
12
13
14
15
16
17
18

19
20
21
22
23
24
25
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29




+
+
+














+







#!/usr/bin/env python

from __future__ import unicode_literals, print_function

#import gevent.monkey
#gevent.monkey.patch_all()

import bsddb.dbshelve, copy, datetime, os, BaseHTTPServer, sys, spacemap, re, urllib2

class Config:
	__slots__ = frozenset(['_config', '_default', '_section', 'options', 'root'])
	_default = {
		'general': {
			'port': '8008',
		},
		'_other': {
			'verbose': 'no',
			'noetag': 'no',
			'noparts': 'no',
			'strip': '',
			'sub': '',
			'proto': 'http',
	},}

	# function to read in config file
	def __init__(self):
		import ConfigParser, optparse

		parser = optparse.OptionParser()
86
87
88
89
90
91
92
93

94
95
96
97
98
99
100
90
91
92
93
94
95
96

97
98
99
100
101
102
103
104







-
+







	'expires',
	'referer',
	'server',
	'via',
	'x-cache', 'x-cache-lookup', 'x-livetool', 'x-powered-by',
])

block_size = 4096
block_size = 8192

class MyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
	def __process(self):
		# reload means file needs to be reloaded to serve request
		reload = False
		# recheck means file needs to be checked, this also means that if file hav been modified we can serve older copy
		recheck = False
136
137
138
139
140
141
142
143

144
145
146
147
148
149
150
140
141
142
143
144
145
146

147
148
149
150
151
152
153
154







-
+








		proxy_ignored = set([
			'accept', 'accept-charset', 'accept-encoding', 'accept-language',
			'cache-control', 'connection', 'content-length', 'cookie',
			'host',
			'if-modified-since', 'if-unmodified-since',
			'referer',
			'user-agent',
			'ua-cpu', 'user-agent',
			'via',
			'x-forwarded-for', 'x-last-hr', 'x-last-http-status-code', 'x-removed', 'x-real-ip', 'x-retry-count',
		])

		print('===============[ {} request ]==='.format(self.command))

		for header in self.headers:
216
217
218
219
220
221
222
223

224
225
226
227
228
229
230
220
221
222
223
224
225
226

227
228
229
230
231
232
233
234







-
+







			info += '\nFile is old - rechecking.'
			recheck = True

		print(info)
		if reload or recheck:

			try:
				request = 'http://' + config['root'] + self.path
				request = config['proto'] + '://' + config['root'] + self.path
				my_headers = {}
				for header in ('cache-control', 'cookie', 'referer', 'user-agent'):
					if header in self.headers:
						my_headers[header] = self.headers[header]

				needed = None
				if self.command not in ('HEAD'):
242
243
244
245
246
247
248
249

250
251
252
253
254
255
256
246
247
248
249
250
251
252

253
254
255
256
257
258
259
260







-
+







						while True:
							range = needed.pop()
							if range[0] == None:
								break
							ranges += '{}-{}'.format(range[0], range[1] - 1),
						my_headers['range'] = 'bytes=' + ','.join(ranges)

				my_headers['Accept-Encoding'] = 'gzip'
				my_headers['Accept-Encoding'] = 'gzip, compress, deflate, identity; q=0'
				request = urllib2.Request(request, headers = my_headers)

				source = urllib2.urlopen(request, timeout = 60)
				new_record = {}
				new_record['_parts'] = record['_parts']
				headers = source.info()

450
451
452
453
454
455
456


454
455
456
457
458
459
460
461
462







+
+
		return self.__process()
	def do_GET(self):
		return self.__process()

config.section('general')
server = BaseHTTPServer.HTTPServer(('127.0.0.1', int(config['port'])), MyRequestHandler)
server.serve_forever()

#gevent.joinall()