Overview
Comment: | multiconfiguration - working with many sites at ones fullfile - avoiding partial transfers Pragma support in requests files are checked both in full dir and part dir file is moved to full dir even if it's left untouched |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | master | trunk |
Files: | files | file ages | folders |
SHA3-256: |
e7b837a681cc6fc40d88be1e5167cc8b |
User & Date: | c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 on 2010-08-25 15:16:41.000 |
Other Links: | branch diff | manifest | tags |
Context
2010-08-26
| ||
15:06 | optimized remains detection check-in: b0975a28fb user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk | |
2010-08-25
| ||
15:16 | multiconfiguration - working with many sites at ones fullfile - avoiding partial transfers Pragma support in requests files are checked both in full dir and part dir file is moved to full dir even if it's left untouched check-in: e7b837a681 user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk | |
2010-08-21
| ||
10:59 | added license and todo many fixes in web server, now it's mostly functional web server was tested by Windows Update (win7, WinXP, Win2003) check-in: fb10031536 user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk | |
Changes
Modified samesite.py
from [a1a6f68de7]
to [f1c4262f2e].
1 2 | #!/usr/bin/env python3.1 | | > > > > > > > > > > > | > > > | < < < | < < < | > | > > > > | | > > | > > > > > > > > > > > > > > | > | > > | > > > > > > > > > > > > > > | > | > | < < | | < < < < < < < | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 | #!/usr/bin/env python3.1 import datetime, http.cookiejar, os, sys, shelve, spacemap, re, urllib.request class Config: __slots__ = frozenset(['_config', '_default', '_section', 'options', 'root']) _default = { 'general': { 'port': '8008', }, '_other': { 'verbose': 'no', 'noetag': 'no', 'noparts': 'no', },} # function to read in config file def __init__(self): import configparser, optparse parser = optparse.OptionParser() parser.add_option('-c', '--config', dest = 'config', help = 'config file location', metavar = 'FILE', default = 'samesite.conf') (self.options, args) = parser.parse_args() assert os.access(self.options.config, os.R_OK), "Fatal error: can't read {}".format(self.options.config) configDir = re.compile('^(.*)/[^/]+$').match(self.options.config) if configDir: self.root = configDir.group(1) else: self.root = os.getcwd() self._config = configparser.ConfigParser() self._config.readfp(open(self.options.config)) for section in self._config.sections(): if section != 'general': if self._config.has_option(section, 'dir'): if re.compile('^/$').match(self._config.get(section, 'dir')): self._config.set(section, 'dir', self.root + os.sep + section) thisDir = re.compile('^(.*)/$').match(self._config.get(section, 'dir')) if thisDir: self._config.set(section, 'dir', thisDir.group(1)) if not re.compile('^/(.*)$').match(self._config.get(section, 'dir')): self._config.set(section, 'dir', self.root + os.sep + self._config.get(section, 'dir')) else: self._config.set(section, 'dir', self.root + os.sep + section) if not self._config.has_option(section, 'root'): self._config.set(section, 'root', section) # function to select config file section or create one def section(self, section): if not self._config.has_section(section): self._config.add_section(section) self._section = section # function to get config parameter, if parameter doesn't exists the default # value or None is substituted def __getitem__(self, name): if not self._config.has_option(self._section, name): if self._section in self._default: if name in self._default[self._section]: self._config.set(self._section, name, self._default[self._section][name]) else: self._config.set(self._section, name, None) elif name in self._default['_other']: self._config.set(self._section, name, self._default['_other'][name]) else: self._config.set(self._section, name, None) return(self._config.get(self._section, name)) config = Config() #assert options.port or os.access(options.log, os.R_OK), 'Log file unreadable' const_desc_fields = set(['Content-Length', 'Pragma', 'Last-Modified']) const_ignore_fields = set(['Accept-Ranges', 'Age', 'Cache-Control', 'Connection', 'Content-Type', 'Date', 'Expires', 'Server', 'Via', 'X-Cache', 'X-Cache-Lookup', 'X-Powered-By']) block_size = 4096 ''' # later, kqueue would be good but later class Connection: __slots__ = frozenset(('__address', '__input', '__socket', '__status', 'error', 'method', 'url', 'http_version')) def __init__(self, socket, address): self.__address = address |
︙ | ︙ | |||
136 137 138 139 140 141 142 | kq.control([select.kevent(kev.ident, select.KQ_FILTER_READ, select.KQ_EV_DELETE)], 0) kq.control([select.kevent(kev.ident, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE)], 0) del(connections[kev.ident]) finally: sock.close() ''' | > | | 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 | kq.control([select.kevent(kev.ident, select.KQ_FILTER_READ, select.KQ_EV_DELETE)], 0) kq.control([select.kevent(kev.ident, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE)], 0) del(connections[kev.ident]) finally: sock.close() ''' # XXX how about rechecking files? if True: import http.server class MyRequestHandler(http.server.BaseHTTPRequestHandler): def __process(self): # reload means file needs to be reloaded to serve request reload = False # recheck means file needs to be checked, this also means that if file hav been modified we can serve older copy |
︙ | ︙ | |||
158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 | info = 'Checking file: ' + self.path myPath = re.compile('^(.*?)(\?.*)$').match(self.path) if myPath: my_path = myPath.group(1) else: my_path = self.path proxy_ignored = ('Accept', 'Accept-Encoding', 'Cache-Control', 'Connection', 'Host', 'If-Modified-Since', 'If-Unmodified-Since', 'User-Agent', 'Via', 'X-Forwarded-For', ) print('===============[ {} request ]==='.format(self.command)) for header in self.headers: if header in proxy_ignored: pass elif header in ('Range'): isRange = re.compile('bytes=(\d+)-(\d+)').match(self.headers[header]) if isRange: | > > > > > > > > > > > > > > > > > | > > > | | | | | | > | | | > > > | 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 | info = 'Checking file: ' + self.path myPath = re.compile('^(.*?)(\?.*)$').match(self.path) if myPath: my_path = myPath.group(1) else: my_path = self.path config.section(self.headers['Host']) if not os.access(config['dir'], os.X_OK): os.mkdir(config['dir']) # this is file index - everything is stored in this file # _parts - list of stored parts of file # _time - last time the file was checked # everything else is just the headers index = shelve.open(config['dir'] + os.sep + '.index') desc_fields = const_desc_fields.copy() ignore_fields = const_ignore_fields.copy() if not config['noetag']: desc_fields.add('ETag') else: ignore_fields.add('ETag') proxy_ignored = ('Accept', 'Accept-Encoding', 'Cache-Control', 'Connection', 'Host', 'If-Modified-Since', 'If-Unmodified-Since', 'User-Agent', 'Via', 'X-Forwarded-For', ) print('===============[ {} request ]==='.format(self.command)) for header in self.headers: if header in proxy_ignored: pass elif header in ('Range'): isRange = re.compile('bytes=(\d+)-(\d+)').match(self.headers[header]) if isRange: requested_ranges = spacemap.SpaceMap({int(isRange.group(1)): int(isRange.group(2)) + 1}) else: return() elif header in ('Pragma'): if my_path in index: index[my_path][header] = self.headers[header] else: print('Unknown header - ', header, ': ', self.headers[header], sep='') return() print(header, self.headers[header]) # creating empty placeholder in index # if there's no space map and there's no file in real directory - we have no file # if there's an empty space map - file is full # space map generally covers every bit of file we don't posess currently if not my_path in index: info += '\nThis one is new.' reload = True record = {} else: record = index[my_path] if not '_parts' in record: record['_parts'] = None # creating file name from my_path file_name = config['dir'] + os.sep + re.compile('%20').sub(' ', my_path) # partial file or unfinished download temp_name = config['dir'] + os.sep + '.parts' + re.compile('%20').sub(' ', my_path) # forcibly checking file if no file present if os.access(file_name, os.R_OK): file_stat = os.stat(file_name) elif '_parts' in record and os.access(temp_name, os.R_OK): file_stat = os.stat(temp_name) elif not reload: print(record) info += '\nFile not found or inaccessible.' record['_parts'] = None reload = True # forcibly checking file if file size doesn't match with index data if not reload: if '_parts' in record and record['_parts'] == spacemap.SpaceMap(): if 'Content-Length' in record and file_stat and file_stat.st_size != int(record['Content-Length']): info += '\nFile size is {} and stored file size is {}.'.format(file_stat.st_size, record['Content-Length']) reload = True # forcibly checking file if index holds Pragma header if not reload and 'Pragma' in record and record['Pragma'] == 'no-cache': info +='\nPragma on: recheck imminent.' recheck = True # skipping file processing if there's no need to recheck it and we have checked it at least 4 hours ago if not recheck and not reload and '_time' in record and (datetime.datetime.now() - datetime.timedelta(hours = 4) - record['_time']).days < 0: recheck = True print(info) if reload or recheck: try: request = 'http://' + config['root'] + my_path needed = None # XXX and if we specify full file we don't go partial? if requested_ranges != None: if '_parts' in record and record['_parts'] != None: needed = record['_parts'] & requested_ranges elif config['noparts']: needed = record['_parts'] else: needed = requested_ranges ranges = () print('Missing ranges: {}, requested ranges: {}, needed ranges: {}.'.format(record['_parts'], requested_ranges, needed)) if len(needed) > 0: needed.rewind() while True: |
︙ | ︙ | |||
303 304 305 306 307 308 309 | if reload: print('Reloading.') if os.access(temp_name, os.R_OK): os.unlink(temp_name) if os.access(file_name, os.R_OK): os.unlink(file_name) if new_record['_parts'] == None or reload: | | | | 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 | if reload: print('Reloading.') if os.access(temp_name, os.R_OK): os.unlink(temp_name) if os.access(file_name, os.R_OK): os.unlink(file_name) if new_record['_parts'] == None or reload: new_record['_parts'] = spacemap.SpaceMap({0: int(new_record['Content-Length'])}) print(new_record) # downloading file or segment if 'Content-Length' in new_record: if needed == None: needed = new_record['_parts'] else: if len(needed) > 1: print("Multipart requests currently not supported.") assert False, 'Skip this one for now.' else: assert False, 'No Content-Length or Content-Range header.' new_record['_time'] = datetime.datetime.now() if self.command not in ('HEAD'): # file is created at temporary location and moved in place only when download completes if not os.access(temp_name, os.R_OK): empty_name = config['dir'] + os.sep + '.tmp' with open(empty_name, 'w+b') as some_file: pass os.renames(empty_name, temp_name) temp_file = open(temp_name, 'r+b') needed.rewind() while True: (start, end) = needed.pop() |
︙ | ︙ | |||
344 345 346 347 348 349 350 | buffer = source.read(req_block_size) length = len(buffer) while length > 0 and stream_last < end: stream_pos = stream_last + length assert not stream_pos > end, 'Received more data then requested: pos:{} start:{} end:{}.'.format(stream_pos, start, end) temp_file.seek(stream_last) temp_file.write(buffer) | | < < < < < > > > > > > | | | 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 | buffer = source.read(req_block_size) length = len(buffer) while length > 0 and stream_last < end: stream_pos = stream_last + length assert not stream_pos > end, 'Received more data then requested: pos:{} start:{} end:{}.'.format(stream_pos, start, end) temp_file.seek(stream_last) temp_file.write(buffer) new_record['_parts'] = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos}) index[my_path] = old_record index.sync() old_record = new_record stream_last = stream_pos if end - stream_last < block_size: req_block_size = end - stream_last buffer = source.read(req_block_size) length = len(buffer) # moving downloaded data to real file temp_file.close() print(new_record) index[my_path] = new_record index.sync() except urllib.error.HTTPError as error: # in case of error we don't need to do anything actually, # if file download stalls or fails the file would not be moved to it's location print(error) if '_parts' in index[my_path] and index[my_path]['_parts'] == spacemap.SpaceMap(): # just moving # drop old dirs XXX print('Moving temporary file to new destination.') os.renames(temp_name, file_name) if self.command == 'HEAD': self.send_response(200) if 'Content-Length' in index[my_path]: self.send_header('Content-Length', index[my_path]['Content-Length']) self.send_header('Accept-Ranges', 'bytes') self.send_header('Content-Type', 'application/octet-stream') if 'Last-Modified' in index[my_path]: self.send_header('Last-Modified', index[my_path]['Last-Modified']) self.end_headers() else: if ('_parts' in index[my_path] and index[my_path]['_parts'] != spacemap.SpaceMap()) or not os.access(file_name, os.R_OK): file_name = temp_name with open(file_name, 'rb') as real_file: file_stat = os.stat(file_name) if 'Range' in self.headers: self.send_response(206) ranges = () requested_ranges.rewind() while True: pair = requested_ranges.pop() if pair[0] == None: break ranges += '{}-{}'.format(pair[0], str(pair[1] - 1)), self.send_header('Content-Range', 'bytes {}/{}'.format(','.join(ranges), index[my_path]['Content-Length'])) else: self.send_response(200) self.send_header('Content-Length', str(file_stat.st_size)) requested_ranges = spacemap.SpaceMap({0: file_stat.st_size}) self.send_header('Last-Modified', index[my_path]['Last-Modified']) self.send_header('Content-Type', 'application/octet-stream') self.end_headers() if self.command in ('GET'): if len(requested_ranges) > 0: requested_ranges.rewind() (start, end) = requested_ranges.pop() |
︙ | ︙ | |||
431 432 433 434 435 436 437 | length = len(buffer) def do_HEAD(self): return self.__process() def do_GET(self): return self.__process() | > | | 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 | length = len(buffer) def do_HEAD(self): return self.__process() def do_GET(self): return self.__process() config.section('general') server = http.server.HTTPServer(('127.0.0.1', int(config['port'])), MyRequestHandler) server.serve_forever() else: while True: unchecked_files = set() checked_files = 0 |
︙ | ︙ |