Overview
Comment: | less logs, more headers, don't saturate disk with writes |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | master | trunk |
Files: | files | file ages | folders |
SHA3-256: |
f57e6e032bfff0324eadefc3721aa2aa |
User & Date: | arcade@b1t.name on 2013-11-05 16:21:14.000 |
Other Links: | branch diff | manifest | tags |
Context
2014-01-13
| ||
17:25 | DNT header check-in: 4fece04acc user: arcade@b1t.name tags: master, trunk | |
2013-11-05
| ||
16:21 | less logs, more headers, don't saturate disk with writes check-in: f57e6e032b user: arcade@b1t.name tags: master, trunk | |
2013-08-23
| ||
12:00 | better error handling check-in: 6cf3431e69 user: arcade@b1t.name tags: master, trunk | |
Changes
Modified samesite.py
from [56275b3bbf]
to [1b83a54cf0].
︙ | ︙ | |||
43 44 45 46 47 48 49 | 'Accept-Ranges', 'Age', 'Cache-Control', 'Connection', 'Content-Type', 'Date', 'Expires', 'Referer', 'Server', 'Via', | | | 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 | 'Accept-Ranges', 'Age', 'Cache-Control', 'Connection', 'Content-Type', 'Date', 'Expires', 'Referer', 'Server', 'Via', 'X-CCC', 'X-CID', 'X-Cache', 'X-Cache-Lookup', 'X-Livetool', 'X-Powered-By', ]) block_size = 8192 import bsddb3.dbshelve, copy, datetime, http.server, spacemap, urllib.request, urllib.error class MyRequestHandler(http.server.BaseHTTPRequestHandler): |
︙ | ︙ | |||
296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 | with open(empty_name, 'w+b') as some_file: pass os.renames(empty_name, temp_name) temp_file = open(temp_name, 'r+b') if requested_ranges == None and needed == None: needed = new_record['_parts'] needed.rewind() while True: # XXX can make this implicit - one request per range (start, end) = needed.pop() if start == None: break stream_last = start old_record = copy.copy(new_record) if end - start < block_size: req_block_size = end - start else: req_block_size = block_size buffer = source.read(req_block_size) length = len(buffer) while length > 0 and stream_last < end: stream_pos = stream_last + length assert stream_pos <= end, 'Received more data then requested: pos:{} start:{} end:{}.'.format(stream_pos, start, end) temp_file.seek(stream_last) temp_file.write(buffer) x = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos}) new_record['_parts'] = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos}) | > > > | | > | | 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 | with open(empty_name, 'w+b') as some_file: pass os.renames(empty_name, temp_name) temp_file = open(temp_name, 'r+b') if requested_ranges == None and needed == None: needed = new_record['_parts'] needed.rewind() countdown = 16 while True: # XXX can make this implicit - one request per range (start, end) = needed.pop() if start == None: break stream_last = start old_record = copy.copy(new_record) if end - start < block_size: req_block_size = end - start else: req_block_size = block_size buffer = source.read(req_block_size) length = len(buffer) while length > 0 and stream_last < end: stream_pos = stream_last + length assert stream_pos <= end, 'Received more data then requested: pos:{} start:{} end:{}.'.format(stream_pos, start, end) temp_file.seek(stream_last) temp_file.write(buffer) x = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos}) new_record['_parts'] = new_record['_parts'] - spacemap.SpaceMap({stream_last: stream_pos}) countdown -= 1 if countdown == 0: index[my_path_b] = old_record index.sync() countdown = 16 old_record = copy.copy(new_record) stream_last = stream_pos if end - stream_last < block_size: req_block_size = end - stream_last buffer = source.read(req_block_size) length = len(buffer) # moving downloaded data to real file temp_file.close() index[my_path_b] = new_record index.sync() except urllib.error.HTTPError as error: # in case of error we don't need to do anything actually, # if file download stalls or fails the file would not be moved to it's location self.send_response(error.code) self.end_headers() print(error, repr(my_headers)) return #print(index[my_path_b]) if not os.access(file_name, os.R_OK) and os.access(temp_name, os.R_OK) and '_parts' in index[my_path_b] and index[my_path_b]['_parts'] == spacemap.SpaceMap(): # just moving # drop old dirs XXX print('Moving temporary file to new destination.') os.renames(temp_name, file_name) |
︙ | ︙ |