Overview
Comment: | moving logging configuration to log added regexp supports - needs testing |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | master | trunk |
Files: | files | file ages | folders |
SHA3-256: |
7d9c268669e4002c9c6cfddda2c5fb8a |
User & Date: | c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 on 2009-10-02 14:19:05.000 |
Other Links: | branch diff | manifest | tags |
Context
2009-10-02
| ||
15:00 | fixed some errors check-in: 38f9c11f97 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
14:19 | moving logging configuration to log added regexp supports - needs testing check-in: 7d9c268669 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
07:52 | a lot of comments, new tripdomain function and function for reverse transformation check-in: ddb0e69dd7 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
Changes
Modified database.sql
from [25e0fc0a65]
to [c2219a89ef].
︙ | ︙ | |||
163 164 165 166 167 168 169 | CREATE UNIQUE INDEX tag_s ON tag USING btree (tag); -- table to hold tag - site links CREATE TABLE urls ( date_added timestamp without time zone DEFAULT ('now'::text)::timestamp(0) without time zone NOT NULL, id_site smallint NOT NULL, | | > | | | | | | | | | | 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 | CREATE UNIQUE INDEX tag_s ON tag USING btree (tag); -- table to hold tag - site links CREATE TABLE urls ( date_added timestamp without time zone DEFAULT ('now'::text)::timestamp(0) without time zone NOT NULL, id_site smallint NOT NULL, id_tag smallint NOT NULL, regex text ); ALTER TABLE ONLY urls ADD CONSTRAINT urls_pkey PRIMARY KEY (date_added); CREATE UNIQUE INDEX urls_id_site ON urls USING btree (id_site); -- rule to join all tables into one to simplify access -- automaticall uses current day and time data CREATE VIEW site_rule AS SELECT a.redirect_url, a.netmask, b.site, b.regexp FROM (( SELECT rules.redirect_url, tag.tag AS rule_tag, rules.netmask FROM rules NATURAL JOIN tag WHERE ('now'::text)::time without time zone >= rules.from_time AND ('now'::text)::time without time zone <= rules.to_time AND date_part('dow'::text, now()) >= (rules.from_weekday)::double precision AND date_part('dow'::text, now()) <= (rules.to_weekday)::double precision ) a JOIN ( SELECT site.site, tag.tag AS url_tag, regexp FROM urls NATURAL JOIN tag NATURAL JOIN site ) b ON (b.url_tag && a.rule_tag)); |
Modified squid-tagger
from [de210bb16a]
to [33d38d1923].
1 2 3 4 5 6 7 | #!/usr/bin/env python3.1 import configparser, optparse, os, postgresql.api, re, sys, _thread class Logger: __slots__ = frozenset(['_silent', '_syslog']) | | > | | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | #!/usr/bin/env python3.1 import configparser, optparse, os, postgresql.api, re, sys, _thread class Logger: __slots__ = frozenset(['_silent', '_syslog']) def __init__(self): config.section('log') if config['silent'] = 'yes': self._silent = True else: import syslog self._syslog = syslog self._syslog.openlog('squidTag') self._silent = False |
︙ | ︙ | |||
24 25 26 27 28 29 30 | class tagDB: __slots__ = frozenset(['_prepared', '_check_stmt', '_db']) def __init__(self): self._prepared = set() self._db = False | | > > > > > | 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 | class tagDB: __slots__ = frozenset(['_prepared', '_check_stmt', '_db']) def __init__(self): self._prepared = set() self._db = False self._check_stmt = self._curs().prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >> $2::text::inet order by array_length(site, 1) desc limit 1") def _curs(self): if not self._db: config.section('database') self._db = postgresql.open( 'pq://{0}:{1}@{2}/{3}'.format( config['user'], config['password'], config['host'], config['database'], ) ) return(self._db) def check(self, ip_address, site): result = self._check_stmt(site, ip_address) if len(result) > 0: if result[1]: if re.compile(result[1]).match(url_path): return result[0] else: return None return result[0] else: return None class CheckerThread: __slots__ = frozenset(['_db', '_lock', '_lock_queue', '_log', '_queue']) |
︙ | ︙ | |||
105 106 107 108 109 110 111 | parser.add_option('-c', '--config', dest = 'config', help = 'config file location', metavar = 'FILE', default = '/usr/local/etc/squid-tagger.conf') (options, args) = parser.parse_args() if not os.access(options.config, os.R_OK): | | | | 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 | parser.add_option('-c', '--config', dest = 'config', help = 'config file location', metavar = 'FILE', default = '/usr/local/etc/squid-tagger.conf') (options, args) = parser.parse_args() if not os.access(options.config, os.R_OK): print("Can't read {}: exitting".format(options.config)) sys.exit(2) self._config = configparser.ConfigParser() self._config.readfp(open(options.config)) def section(self, section): self._section = section def __getitem__(self, name): return self._config.get(self._section, name) config = Config() log = Logger() db = tagDB() checker = CheckerThread(db,log) while True: line = sys.stdin.readline() if len(line) == 0: break checker.check(line) |