Overview
| Comment: | Errors targetted with last commit were produced by bad logic of match cycle wich replyed twice to each request which can be blocked. |
|---|---|
| Downloads: | Tarball | ZIP archive | SQL archive |
| Timelines: | family | ancestors | descendants | both | master | trunk |
| Files: | files | file ages | folders |
| SHA3-256: |
7e3418d94f584732dfb5054d64c6556d |
| User & Date: | c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 on 2009-10-12 12:58:18.000 |
| Other Links: | branch diff | manifest | tags |
Context
|
2009-10-12
| ||
| 19:43 | Simplied function calls and made them nested. check-in: c90fda69e1 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
| 12:58 | Errors targetted with last commit were produced by bad logic of match cycle wich replyed twice to each request which can be blocked. check-in: 7e3418d94f user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
| 12:45 | * formatting fixes; * added output of full original url instead of -. check-in: 442d7bf53a user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
Changes
Modified squid-tagger.py
from [5b0680413b]
to [4873692447].
| ︙ | ︙ | |||
68 69 70 71 72 73 74 75 76 77 78 79 |
# yes this should be written this way, and yes, this is why I hate threading
if len(self._queue) > 1 and self._lock.locked():
self._lock.release()
req = self._queue.pop(0)
self._lock_queue.release()
self._log.info('trying {}\n'.format(req[1]))
result = self._db.check(req[2], req[1])
for row in result:
if row != None and row[0] != None:
if row[1] != None:
self._log.info('trying regexp "{}" versus "{}"\n'.format(row[1], req[3]))
if re.compile(row[1]).match(req[3]):
| > > | | | | | | 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
# yes this should be written this way, and yes, this is why I hate threading
if len(self._queue) > 1 and self._lock.locked():
self._lock.release()
req = self._queue.pop(0)
self._lock_queue.release()
self._log.info('trying {}\n'.format(req[1]))
result = self._db.check(req[2], req[1])
#reply = '{}://{}/{}'.format(req[4], req[1], req[3])
reply = '-'
for row in result:
if row != None and row[0] != None:
if row[1] != None:
self._log.info('trying regexp "{}" versus "{}"\n'.format(row[1], req[3]))
if re.compile(row[1]).match(req[3]):
reply = '302:' + row[0]
break
else:
continue
else:
reply = '302:' + row[0]
break
writeline('{} {}\n'.format(req[0], reply))
def check(self, line):
request = re.compile('^([0-9]+)\ (http|ftp):\/\/([-\w.:]+)\/([^ ]*)\ ([0-9.]+)\/(-|[\w\.]+)\ (-|\w+)\ (-|GET|HEAD|POST).*$').match(line)
if request:
id = request.group(1)
#proto = request.group(2)
site = request.group(3)
url_path = request.group(4)
ip_address = request.group(5)
self._lock_queue.acquire()
self._queue.append((id, site, ip_address, url_path))
if self._lock.locked():
self._lock.release()
self._lock_queue.release()
self._log.info('request {} queued ({})\n'.format(id, line))
else:
self._log.info('bad request\n')
writeline(line)
|
| ︙ | ︙ |