Samesite - proxy that can cache partial transfers

Check-in [a81f1a70fb]
anonymous

Check-in [a81f1a70fb]

Overview
Comment:added timeout simplified gzip handling stops rechecking partial files
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | master | trunk
Files: files | file ages | folders
SHA3-256: a81f1a70fb64bcd5dd8eedd935e31857e8b14403a7cd777e64a813d6da32bc3d
User & Date: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 on 2012-01-16 13:33:52.000
Other Links: branch diff | manifest | tags
Context
2012-01-16
13:39
oops, not checking file breaks thins check-in: d1fa9d0737 user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk
13:33
added timeout simplified gzip handling stops rechecking partial files check-in: a81f1a70fb user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk
10:29
wrong dictionary initialised check-in: 31a8af9ff1 user: c.kworr@b84a3442-36b4-a7b2-c7ad-07429f13c525 tags: master, trunk
Changes
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
			record = index[my_path]
			if os.access(file_name, os.R_OK):
				info += '\nFull file found.'
				file_stat = os.stat(file_name)
			elif '_parts' in index[my_path] and os.access(temp_name, os.R_OK):
				info += '\nPartial file found.'
				file_stat = os.stat(temp_name)
				recheck = True
			else:
				info += '\nFile not found or inaccessible.'
				record['_parts'] = None
				reload = True

		if not '_parts' in record:
			record['_parts'] = None







<







182
183
184
185
186
187
188

189
190
191
192
193
194
195
			record = index[my_path]
			if os.access(file_name, os.R_OK):
				info += '\nFull file found.'
				file_stat = os.stat(file_name)
			elif '_parts' in index[my_path] and os.access(temp_name, os.R_OK):
				info += '\nPartial file found.'
				file_stat = os.stat(temp_name)

			else:
				info += '\nFile not found or inaccessible.'
				record['_parts'] = None
				reload = True

		if not '_parts' in record:
			record['_parts'] = None
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
								break
							ranges += '{}-{}'.format(range[0], range[1] - 1),
						my_headers['range'] = 'bytes=' + ','.join(ranges)

				my_headers['Accept-Encoding'] = 'gzip'
				request = urllib2.Request(request, headers = my_headers)

				source = urllib2.urlopen(request)
				new_record = {}
				new_record['_parts'] = record['_parts']
				headers = source.info()

				if 'content-encoding' in headers and headers['content-encoding'] == 'gzip':
					import gzip, StringIO
					buf = StringIO.StringIO(source.read())
					source = gzip.GzipFile(fileobj=buf)

				# stripping unneeded headers (XXX make this inplace?)
				for header in headers:
					if header in desc_fields:
						#if header == 'Pragma' and headers[header] != 'no-cache':
						if header == 'content-length':
							if 'content-range' not in headers:







|





|
<
|







244
245
246
247
248
249
250
251
252
253
254
255
256
257

258
259
260
261
262
263
264
265
								break
							ranges += '{}-{}'.format(range[0], range[1] - 1),
						my_headers['range'] = 'bytes=' + ','.join(ranges)

				my_headers['Accept-Encoding'] = 'gzip'
				request = urllib2.Request(request, headers = my_headers)

				source = urllib2.urlopen(request, timeout = 60)
				new_record = {}
				new_record['_parts'] = record['_parts']
				headers = source.info()

				if 'content-encoding' in headers and headers['content-encoding'] == 'gzip':
					import gzip

					source = gzip.GzipFile(fileobj=source)

				# stripping unneeded headers (XXX make this inplace?)
				for header in headers:
					if header in desc_fields:
						#if header == 'Pragma' and headers[header] != 'no-cache':
						if header == 'content-length':
							if 'content-range' not in headers: