Lines of
samesite.py
from check-in d1fa9d0737
that are changed by the sequence of edits moving toward
check-in 82969b1fc2:
1: #!/usr/bin/env python
2:
3: from __future__ import unicode_literals, print_function
4:
5: import bsddb.dbshelve, copy, datetime, os, BaseHTTPServer, sys, spacemap, re, urllib2
6:
7: class Config:
8: __slots__ = frozenset(['_config', '_default', '_section', 'options', 'root'])
9: _default = {
10: 'general': {
11: 'port': '8008',
12: },
13: '_other': {
14: 'verbose': 'no',
15: 'noetag': 'no',
16: 'noparts': 'no',
17: 'strip': '',
18: 'sub': '',
19: },}
20:
21: # function to read in config file
22: def __init__(self):
23: import ConfigParser, optparse
24:
25: parser = optparse.OptionParser()
26: parser.add_option('-c', '--config', dest = 'config', help = 'config file location', metavar = 'FILE', default = 'samesite.conf')
27: (self.options, args) = parser.parse_args()
28:
29: assert os.access(self.options.config, os.R_OK), "Fatal error: can't read {}".format(self.options.config)
30:
31: configDir = re.compile('^(.*)/[^/]+$').match(self.options.config)
32: if configDir:
33: self.root = configDir.group(1)
34: else:
35: self.root = os.getcwd()
36:
37: self._config = ConfigParser.ConfigParser()
38: self._config.readfp(open(self.options.config))
39:
40: for section in self._config.sections():
41: if section != 'general':
42: if self._config.has_option(section, 'dir'):
43: if re.compile('^/$').match(self._config.get(section, 'dir')):
44: self._config.set(section, 'dir', self.root + os.sep + section)
45: thisDir = re.compile('^(.*)/$').match(self._config.get(section, 'dir'))
46: if thisDir:
47: self._config.set(section, 'dir', thisDir.group(1))
48: if not re.compile('^/(.*)$').match(self._config.get(section, 'dir')):
49: self._config.set(section, 'dir', self.root + os.sep + self._config.get(section, 'dir'))
50: else:
51: self._config.set(section, 'dir', self.root + os.sep + section)
52:
53: if not self._config.has_option(section, 'root'):
54: self._config.set(section, 'root', section)
55:
56: # function to select config file section or create one
57: def section(self, section):
58: if not self._config.has_section(section):
59: self._config.add_section(section)
60: self._section = section
61:
62: # function to get config parameter, if parameter doesn't exists the default
63: # value or None is substituted
64: def __getitem__(self, name):
65: if not self._config.has_option(self._section, name):
66: if self._section in self._default:
67: if name in self._default[self._section]:
68: self._config.set(self._section, name, self._default[self._section][name])
69: else:
70: self._config.set(self._section, name, None)
71: elif name in self._default['_other']:
72: self._config.set(self._section, name, self._default['_other'][name])
73: else:
74: self._config.set(self._section, name, None)
75: return(self._config.get(self._section, name))
76:
77: config = Config()
78:
79: #assert options.port or os.access(options.log, os.R_OK), 'Log file unreadable'
80:
81: const_desc_fields = set(['content-length', 'last-modified', 'pragma'])
82: const_ignore_fields = set([
83: 'accept-ranges', 'age',
84: 'cache-control', 'connection', 'content-type',
85: 'date',
86: 'expires',
87: 'referer',
88: 'server',
89: 'via',
90: 'x-cache', 'x-cache-lookup', 'x-livetool', 'x-powered-by',
91: ])
92:
d1fa9d0737 2012-01-16 93: block_size = 4096
94:
95: class MyRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
96: def __process(self):
97: # reload means file needs to be reloaded to serve request
98: reload = False
99: # recheck means file needs to be checked, this also means that if file hav been modified we can serve older copy
100: recheck = False
101: # file_stat means file definitely exists
102: file_stat = None
103: # requested_ranges holds data about any range requested
104: requested_ranges = None
105: # records holds data from index locally, should be written back upon successfull completion
106: record = None
107:
108: myPath = re.compile('^(.*?)(\?.*)$').match(self.path)
109: if myPath:
110: my_path = myPath.group(1)
111: else:
112: my_path = self.path
113:
114: config.section(self.headers['host'])
115:
116: if config['sub'] != None and config['strip'] != None and len(config['strip']) > 0:
117: string = re.compile(config['strip']).sub(config['sub'], my_path)
118: my_path = string
119:
120: info = 'Checking file: ' + my_path
121:
122: if not os.access(config['dir'], os.X_OK):
123: os.mkdir(config['dir'])
124: # this is file index - everything is stored in this file
125: # _parts - list of stored parts of file
126: # _time - last time the file was checked
127: # everything else is just the headers
128: index = bsddb.dbshelve.open(config['dir'] + os.sep + '.index')
129:
130: desc_fields = const_desc_fields.copy()
131: ignore_fields = const_ignore_fields.copy()
132: if config['noetag'] == 'no':
133: desc_fields.add('etag')
134: else:
135: ignore_fields.add('etag')
136:
137: proxy_ignored = set([
138: 'accept', 'accept-charset', 'accept-encoding', 'accept-language',
139: 'cache-control', 'connection', 'content-length', 'cookie',
140: 'host',
141: 'if-modified-since', 'if-unmodified-since',
142: 'referer',
d1fa9d0737 2012-01-16 143: 'user-agent',
144: 'via',
145: 'x-forwarded-for', 'x-last-hr', 'x-last-http-status-code', 'x-removed', 'x-real-ip', 'x-retry-count',
146: ])
147:
148: print('===============[ {} request ]==='.format(self.command))
149:
150: for header in self.headers:
151: if header in proxy_ignored:
152: pass
153: elif header in ('range'):
154: isRange = re.compile('bytes=(\d+)-(\d+)').match(self.headers[header])
155: if isRange:
156: requested_ranges = spacemap.SpaceMap({int(isRange.group(1)): int(isRange.group(2)) + 1})
157: else:
158: return()
159: elif header in ('pragma'):
160: if my_path in index:
161: index[my_path][header] = self.headers[header]
162: else:
163: print('Unknown header - ', header, ': ', self.headers[header], sep='')
164: return()
165: print(header, self.headers[header])
166:
167: # creating file name from my_path
168: file_name = config['dir'] + os.sep + re.compile('%20').sub(' ', my_path)
169: # partial file or unfinished download
170: temp_name = config['dir'] + os.sep + '.parts' + re.compile('%20').sub(' ', my_path)
171:
172: # creating empty placeholder in index
173: # if there's no space map and there's no file in real directory - we have no file
174: # if there's an empty space map - file is full
175: # space map generally covers every bit of file we don't posess currently
176: if not my_path in index:
177: info += '\nThis one is new.'
178: reload = True
179: record = {}
180: else:
181: # forcibly checking file if no file present
182: record = index[my_path]
183: if os.access(file_name, os.R_OK):
184: info += '\nFull file found.'
185: file_stat = os.stat(file_name)
186: elif '_parts' in index[my_path] and os.access(temp_name, os.R_OK):
187: info += '\nPartial file found.'
188: file_stat = os.stat(temp_name)
189: recheck = True
190: else:
191: info += '\nFile not found or inaccessible.'
192: record['_parts'] = None
193: reload = True
194:
195: if not '_parts' in record:
196: record['_parts'] = None
197:
198: if record['_parts'] == None:
199: recheck = True
200:
201: # forcibly checking file if file size doesn't match with index data
202: if not reload:
203: if '_parts' in record and record['_parts'] == spacemap.SpaceMap():
204: if 'content-length' in record and file_stat and file_stat.st_size != int(record['content-length']):
205: info += '\nFile size is {} and stored file size is {}.'.format(file_stat.st_size, record['content-length'])
206: record['_parts'] = None
207: reload = True
208:
209: # forcibly checking file if index holds Pragma header
210: if not reload and 'pragma' in record and record['pragma'] == 'no-cache':
211: info +='\nPragma on: recheck imminent.'
212: recheck = True
213:
214: # skipping file processing if there's no need to recheck it and we have checked it at least 4 hours ago
215: if not recheck and not reload and '_time' in record and (record['_time'] - datetime.datetime.now() + datetime.timedelta(hours = 4)).days < 0:
216: info += '\nFile is old - rechecking.'
217: recheck = True
218:
219: print(info)
220: if reload or recheck:
221:
222: try:
d1fa9d0737 2012-01-16 223: request = 'http://' + config['root'] + self.path
224: my_headers = {}
225: for header in ('cache-control', 'cookie', 'referer', 'user-agent'):
226: if header in self.headers:
227: my_headers[header] = self.headers[header]
228:
229: needed = None
230: if self.command not in ('HEAD'):
231: if '_parts' in record and record['_parts'] != None:
232: if config['noparts'] != 'no' or requested_ranges == None or requested_ranges == spacemap.SpaceMap():
233: needed = record['_parts']
234: else:
235: needed = record['_parts'] & requested_ranges
236: elif config['noparts'] =='no' and requested_ranges != None and requested_ranges != spacemap.SpaceMap():
237: needed = requested_ranges
238: ranges = ()
239: print('Missing ranges: {}, requested ranges: {}, needed ranges: {}.'.format(record['_parts'], requested_ranges, needed))
240: if needed != None and len(needed) > 0:
241: needed.rewind()
242: while True:
243: range = needed.pop()
244: if range[0] == None:
245: break
246: ranges += '{}-{}'.format(range[0], range[1] - 1),
247: my_headers['range'] = 'bytes=' + ','.join(ranges)
248:
d1fa9d0737 2012-01-16 249: my_headers['Accept-Encoding'] = 'gzip'
250: request = urllib2.Request(request, headers = my_headers)
251:
252: source = urllib2.urlopen(request, timeout = 60)
253: new_record = {}
254: new_record['_parts'] = record['_parts']
255: headers = source.info()
256:
257: if 'content-encoding' in headers and headers['content-encoding'] == 'gzip':
258: import gzip
259: source = gzip.GzipFile(fileobj=source)
260:
261: # stripping unneeded headers (XXX make this inplace?)
262: for header in headers:
263: if header in desc_fields:
264: #if header == 'Pragma' and headers[header] != 'no-cache':
265: if header == 'content-length':
266: if 'content-range' not in headers:
267: new_record[header] = int(headers[header])
268: else:
269: new_record[header] = headers[header]
270: elif header == 'content-range':
271: range = re.compile('^bytes (\d+)-(\d+)/(\d+)$').match(headers[header])
272: if range:
273: new_record['content-length'] = int(range.group(3))
274: else:
275: assert False, 'Content-Range unrecognized.'
276: elif not header in ignore_fields:
277: print('Undefined header "', header, '": ', headers[header], sep='')
278:
279: # comparing headers with data found in index
280: # if any header has changed (except Pragma) file is fully downloaded
281: # same if we get more or less headers
282: old_keys = set(record.keys())
283: old_keys.discard('_time')
284: old_keys.discard('pragma')
285: more_keys = set(new_record.keys()) - old_keys
286: more_keys.discard('pragma')
287: less_keys = old_keys - set(new_record.keys())
288: if len(more_keys) > 0:
289: if len(old_keys) != 0:
290: print('More headers appear:', more_keys)
291: reload = True
292: elif len(less_keys) > 0:
293: print('Less headers appear:', less_keys)
294: else:
295: for key in record.keys():
296: if key[0] != '_' and key != 'pragma' and record[key] != new_record[key]:
297: print('Header "', key, '" changed from [', record[key], '] to [', new_record[key], ']', sep='')
298: print(type(record[key]), type(new_record[key]))
299: reload = True
300:
301: if reload:
302: print('Reloading.')
303: if os.access(temp_name, os.R_OK):
304: os.unlink(temp_name)
305: if os.access(file_name, os.R_OK):
306: os.unlink(file_name)
307: if 'content-length' in new_record:
308: new_record['_parts'] = spacemap.SpaceMap({0: int(new_record['content-length'])})
309: if not new_record['_parts']:
310: new_record['_parts'] = spacemap.SpaceMap()
311: print(new_record)
312:
313: # downloading file or segment
314: if 'content-length' in new_record:
315: if needed == None:
316: needed = new_record['_parts']
317: else:
318: if len(needed) > 1:
319: print("Multipart requests currently not supported.")
320: assert False, 'Skip this one for now.'
321: #else:
322: #assert False, 'No content-length or Content-Range header.'
323:
324: new_record['_time'] = datetime.datetime.now()
325: if self.command not in ('HEAD'):
326: # file is created at temporary location and moved in place only when download completes
327: if not os.access(temp_name, os.R_OK):
328: empty_name = config['dir'] + os.sep + '.tmp'
329: with open(empty_name, 'w+b') as some_file:
330: pass
331: os.renames(empty_name, temp_name)
332: temp_file = open(temp_name, 'r+b')
333: if requested_ranges == None and needed == None:
334: needed = new_record['_parts']
335: needed.rewind()
336: while True:
337: # XXX can make this implicit - one request per range
338: (start, end) = needed.pop()
339: if start == None:
340: break
341: stream_last = start
342: old_record = copy.copy(new_record)
343: if end - start < block_size:
344: req_block_size = end - start
345: else:
346: req_block_size = block_size
347: buffer = source.read(req_block_size)
348: length = len(buffer)
349: while length > 0 and stream_last < end:
350: stream_pos = stream_last + length
351: assert stream_pos <= end, 'Received more data then requested: pos:{} start:{} end:{}.'.format(stream_pos, start, end)
352: temp_file.seek(stream_last)
353: temp_file.write(buffer)
354: x = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos})
355: new_record['_parts'] = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos})
356: index[my_path] = old_record
357: index.sync()
358: old_record = copy.copy(new_record)
359: stream_last = stream_pos
360: if end - stream_last < block_size:
361: req_block_size = end - stream_last
362: buffer = source.read(req_block_size)
363: length = len(buffer)
364: # moving downloaded data to real file
365: temp_file.close()
366:
367: index[my_path] = new_record
368: index.sync()
369:
370: except urllib2.HTTPError as error:
371: # in case of error we don't need to do anything actually,
372: # if file download stalls or fails the file would not be moved to it's location
373: print(error)
374:
375: print(index[my_path])
376:
377: if not os.access(file_name, os.R_OK) and os.access(temp_name, os.R_OK) and '_parts' in index[my_path] and index[my_path]['_parts'] == spacemap.SpaceMap():
378: # just moving
379: # drop old dirs XXX
380: print('Moving temporary file to new destination.')
381: os.renames(temp_name, file_name)
382:
383: if not my_path in index:
384: self.send_response(502)
385: self.end_headers()
386: return
387:
388: if self.command == 'HEAD':
389: self.send_response(200)
390: if 'content-length' in index[my_path]:
391: self.send_header('content-length', index[my_path]['content-length'])
392: self.send_header('accept-ranges', 'bytes')
393: self.send_header('content-type', 'application/octet-stream')
394: if 'last-modified' in index[my_path]:
395: self.send_header('last-modified', index[my_path]['last-modified'])
396: self.end_headers()
397: else:
398: if ('_parts' in index[my_path] and index[my_path]['_parts'] != spacemap.SpaceMap()) or not os.access(file_name, os.R_OK):
399: file_name = temp_name
400:
401: with open(file_name, 'rb') as real_file:
402: file_stat = os.stat(file_name)
403: if 'range' in self.headers:
404: self.send_response(206)
405: ranges = ()
406: requested_ranges.rewind()
407: while True:
408: pair = requested_ranges.pop()
409: if pair[0] == None:
410: break
411: ranges += '{}-{}'.format(pair[0], str(pair[1] - 1)),
412: self.send_header('content-range', 'bytes {}/{}'.format(','.join(ranges), index[my_path]['content-length']))
413: else:
414: self.send_response(200)
415: self.send_header('content-length', str(file_stat.st_size))
416: requested_ranges = spacemap.SpaceMap({0: file_stat.st_size})
417: if 'last-modified' in index[my_path]:
418: self.send_header('last-modified', index[my_path]['last-modified'])
419: self.send_header('content-type', 'application/octet-stream')
420: self.end_headers()
421: if self.command in ('GET'):
422: if len(requested_ranges) > 0:
423: requested_ranges.rewind()
424: (start, end) = requested_ranges.pop()
425: else:
426: start = 0
427: # XXX ugly hack
428: if 'content-length' in index[my_path]:
429: end = index[my_path]['content-length']
430: else:
431: end = 0
432: real_file.seek(start)
433: if block_size > end - start:
434: req_block_size = end - start
435: else:
436: req_block_size = block_size
437: buffer = real_file.read(req_block_size)
438: length = len(buffer)
439: while length > 0:
440: self.wfile.write(buffer)
441: start += len(buffer)
442: if req_block_size > end - start:
443: req_block_size = end - start
444: if req_block_size == 0:
445: break
446: buffer = real_file.read(req_block_size)
447: length = len(buffer)
448:
449: def do_HEAD(self):
450: return self.__process()
451: def do_GET(self):
452: return self.__process()
453:
454: config.section('general')
455: server = BaseHTTPServer.HTTPServer(('127.0.0.1', int(config['port'])), MyRequestHandler)
456: server.serve_forever()