Overview
| Comment: | * formatting fixes; * added output of full original url instead of -. |
|---|---|
| Downloads: | Tarball | ZIP archive | SQL archive |
| Timelines: | family | ancestors | descendants | both | master | trunk |
| Files: | files | file ages | folders |
| SHA3-256: |
442d7bf53a72cac14ed42c7422333952 |
| User & Date: | c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 on 2009-10-12 12:45:15.000 |
| Other Links: | branch diff | manifest | tags |
Context
|
2009-10-12
| ||
| 12:58 | Errors targetted with last commit were produced by bad logic of match cycle wich replyed twice to each request which can be blocked. check-in: 7e3418d94f user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
| 12:45 | * formatting fixes; * added output of full original url instead of -. check-in: 442d7bf53a user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
|
2009-10-10
| ||
| 10:26 | function to return site id check-in: e74427953f user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
Changes
Modified squid-tagger.py
from [0cfe66321a]
to [5b0680413b].
| ︙ | ︙ | |||
30 31 32 33 34 35 36 |
self._db = False
self._check_stmt = self._curs().prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >> $2::text::inet order by array_length(site, 1) desc")
def _curs(self):
if not self._db:
config.section('database')
self._db = postgresql.open(
| | | 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
self._db = False
self._check_stmt = self._curs().prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >> $2::text::inet order by array_length(site, 1) desc")
def _curs(self):
if not self._db:
config.section('database')
self._db = postgresql.open(
'pq://{}:{}@{}/{}'.format(
config['user'],
config['password'],
config['host'],
config['database'],
) )
return(self._db)
|
| ︙ | ︙ | |||
66 67 68 69 70 71 72 | self._lock.acquire() self._lock_queue.acquire() # yes this should be written this way, and yes, this is why I hate threading if len(self._queue) > 1 and self._lock.locked(): self._lock.release() req = self._queue.pop(0) self._lock_queue.release() | | | | | | > | | | | 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
self._lock.acquire()
self._lock_queue.acquire()
# yes this should be written this way, and yes, this is why I hate threading
if len(self._queue) > 1 and self._lock.locked():
self._lock.release()
req = self._queue.pop(0)
self._lock_queue.release()
self._log.info('trying {}\n'.format(req[1]))
result = self._db.check(req[2], req[1])
for row in result:
if row != None and row[0] != None:
if row[1] != None:
self._log.info('trying regexp "{}" versus "{}"\n'.format(row[1], req[3]))
if re.compile(row[1]).match(req[3]):
writeline('{} 302:{}\n'.format(req[0], row[0]))
break
else:
continue
else:
writeline('{} 302:{}\n'.format(req[0], row[0]))
break
writeline('{} {}://{}/{}\n'.format(req[0], req[4], req[1], req[3]))
def check(self, line):
request = re.compile('^([0-9]+)\ (http|ftp):\/\/([-\w.:]+)\/([^ ]*)\ ([0-9.]+)\/(-|[\w\.]+)\ (-|\w+)\ (-|GET|HEAD|POST).*$').match(line)
if request:
id = request.group(1)
proto = request.group(2)
site = request.group(3)
url_path = request.group(4)
ip_address = request.group(5)
self._lock_queue.acquire()
self._queue.append((id, site, ip_address, url_path, proto))
if self._lock.locked():
self._lock.release()
self._lock_queue.release()
self._log.info('request {} queued ({})\n'.format(id, line))
else:
self._log.info('bad request\n')
writeline(line)
def writeline(string):
log.info('sending: ' + string)
sys.stdout.write(string)
sys.stdout.flush()
class Config:
__slots__ = frozenset(['_config', '_section'])
def __init__(self):
|
| ︙ | ︙ |