Squid url redirector

Check-in [a3d53162db]
anonymous

Check-in [a3d53162db]

Overview
Comment:individual ip support fixed
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | master | trunk
Files: files | file ages | folders
SHA3-256: a3d53162dba578f562d98891783fb759fdef4e0f16a76f127b8d865c2c4107f5
User & Date: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 on 2010-12-07 15:49:00.000
Other Links: branch diff | manifest | tags
Context
2011-04-06
13:26
back to python2.7 moving to gevent check-in: 0ef24b1937 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk
2010-12-07
15:49
individual ip support fixed check-in: a3d53162db user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk
2010-11-03
12:37
Do not return '-' for not matching urls. check-in: ddbf5288b9 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk
Changes
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
				config['host'],
				config['database'],
		) )
		self._check_stmt = None

	def check(self, site, ip_address):
		if self._check_stmt == None:
			self._check_stmt = self._db.prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >> $2::text::inet order by array_length(site, 1) desc")
		return(self._check_stmt(site, ip_address))

	def dump(self):
		return(self._db.prepare("copy (select untrip(site) as site, tag, regexp from urls order by site, tag) to stdout csv header")())

	def load(self, data):
		with self._db.xact():







|







36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
				config['host'],
				config['database'],
		) )
		self._check_stmt = None

	def check(self, site, ip_address):
		if self._check_stmt == None:
			self._check_stmt = self._db.prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >>= $2::text::inet order by array_length(site, 1) desc")
		return(self._check_stmt(site, ip_address))

	def dump(self):
		return(self._db.prepare("copy (select untrip(site) as site, tag, regexp from urls order by site, tag) to stdout csv header")())

	def load(self, data):
		with self._db.xact():