Squid url redirector

Diff
anonymous

Diff

Differences From Artifact [7377ed3160]:

To Artifact [f5c6157d84]:


21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67











68
69
70
71
72
73
74

	def notice(self, message):
		if self._syslog:
			self._syslog.syslog(self._syslog.LOG_NOTICE, message)

# wrapper around database
class tagDB:
	__slots__ = frozenset(('_check_stmt', '_db', '_dump_stmt'))

	def __init__(self):
		config.section('database')
		self._db = postgresql.open(
			'pq://{}:{}@{}/{}'.format(
				config['user'],
				config['password'],
				config['host'],
				config['database'],
		) )
		self._check_stmt = None
		self._dump_stmt = None

	def check(self, site, ip_address):
		if self._check_stmt == None:
			self._check_stmt = self._db.prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >> $2::text::inet order by array_length(site, 1) desc")
		return(self._check_stmt(site, ip_address))

	def dump(self):
		if self._dump_stmt == None:
			self._dump_stmt = self._db.prepare("select untrip(site), tag, regexp from urls natural join site natural join tag order by site, tag")
		return(self._dump_stmt())

	def load(self, csv_data):
		with self._db.xact():
			if config.options.flush_db:
				self._db.execute('delete from urls;')
				if config.options.flush_site:
					self._db.execute('delete from site;');
			insertreg = self._db.prepare("select set($1, $2, $3)")
			insert = self._db.prepare("select set($1, $2)")
			for row in csv_data:
				if len(row[2]) > 0:
					insertreg(row[0], row[1], row[2])
				else:
					insert(row[0], row[1])
		self._db.execute('vacuum analyze site;')
		self._db.execute('vacuum analyze urls;')












# abstract class with basic checking functionality
class Checker:
	__slots__ = frozenset(['_db', '_log'])

	def __init__(self):
		self._db = tagDB()
		self._log = Logger()







|











<







<
|
<

















>
>
>
>
>
>
>
>
>
>
>







21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39

40
41
42
43
44
45
46

47

48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82

	def notice(self, message):
		if self._syslog:
			self._syslog.syslog(self._syslog.LOG_NOTICE, message)

# wrapper around database
class tagDB:
	__slots__ = frozenset(('_check_stmt', '_db'))

	def __init__(self):
		config.section('database')
		self._db = postgresql.open(
			'pq://{}:{}@{}/{}'.format(
				config['user'],
				config['password'],
				config['host'],
				config['database'],
		) )
		self._check_stmt = None


	def check(self, site, ip_address):
		if self._check_stmt == None:
			self._check_stmt = self._db.prepare("select redirect_url, regexp from site_rule where site <@ tripdomain($1) and netmask >> $2::text::inet order by array_length(site, 1) desc")
		return(self._check_stmt(site, ip_address))

	def dump(self):

		return(self._db.prepare("select untrip(site), tag, regexp from urls natural join site natural join tag order by site, tag")())


	def load(self, csv_data):
		with self._db.xact():
			if config.options.flush_db:
				self._db.execute('delete from urls;')
				if config.options.flush_site:
					self._db.execute('delete from site;');
			insertreg = self._db.prepare("select set($1, $2, $3)")
			insert = self._db.prepare("select set($1, $2)")
			for row in csv_data:
				if len(row[2]) > 0:
					insertreg(row[0], row[1], row[2])
				else:
					insert(row[0], row[1])
		self._db.execute('vacuum analyze site;')
		self._db.execute('vacuum analyze urls;')

	def load_conf(self, csv_data):
		with self._db.xact():
			self._db.execute('delete from rules;')
			insertconf = self._db.prepare("insert into rules (netmask, redirect_url, from_weekday, to_weekday, from_time, to_time, id_tag) values ($1::text::cidr, $2, $3, $4, $5::text::time, $6::text::time, get_tag($7::text::text[]))")
			for row in csv_data:
				insertconf(row[0], row[1], int(row[2]), int(row[3]), row[4], row[5], row[6])
		self._db.execute('vacuum analyze rules;')

	def dump_conf(self):
		return(self._db.prepare("select netmask, redirect_url, from_weekday, to_weekday, from_time, to_time, tag from rules natural join tag")())

# abstract class with basic checking functionality
class Checker:
	__slots__ = frozenset(['_db', '_log'])

	def __init__(self):
		self._db = tagDB()
		self._log = Logger()
282
283
284
285
286
287
288






289
290
291
292
293
294
295
			action = 'store_true', metavar = 'bool')
		parser.add_option('-F', '--flush-site', dest = 'flush_site',
			help = 'when flushing previous dtabase flush site index too',
			action = 'store_true', default = False, metavar = 'bool')
		parser.add_option('-l', '--load', dest = 'load',
			help = 'load database', action = 'store_true', metavar = 'bool',
			default = False)







		(self.options, args) = parser.parse_args()

		assert os.access(self.options.config, os.R_OK), "Fatal error: can't read {}".format(self.options.config)

		self._config = configparser.ConfigParser()
		self._config.readfp(open(self.options.config))







>
>
>
>
>
>







290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
			action = 'store_true', metavar = 'bool')
		parser.add_option('-F', '--flush-site', dest = 'flush_site',
			help = 'when flushing previous dtabase flush site index too',
			action = 'store_true', default = False, metavar = 'bool')
		parser.add_option('-l', '--load', dest = 'load',
			help = 'load database', action = 'store_true', metavar = 'bool',
			default = False)
		parser.add_option('-D', '--dump-conf', dest = 'dump_conf',
			help = 'dump filtering rules', default = False, metavar = 'bool',
			action = 'store_true')
		parser.add_option('-L', '--load-conf', dest = 'load_conf',
			help = 'load filtering rules', default = False, metavar = 'bool',
			action = 'store_true')

		(self.options, args) = parser.parse_args()

		assert os.access(self.options.config, os.R_OK), "Fatal error: can't read {}".format(self.options.config)

		self._config = configparser.ConfigParser()
		self._config.readfp(open(self.options.config))
312
313
314
315
316
317
318
319
320
321
322
323


324

325


326
327
328
329
330
331
332
333
334

335

336
337
338


339
340
341






342
343
344
345
346
347
348
349
350
351
352
353
			else:
				self._config.set(self._section, name, None)
		return(self._config.get(self._section, name))

# initializing and reading in config file
config = Config()

if config.options.dump:
	# dumping database
	import csv

	tagdb = tagDB()




	csv_writer = csv.writer(sys.stdout)


	csv_writer.writerow(['site', 'tags', 'regexp'])
	for row in tagdb.dump():
		csv_writer.writerow([row[0], '{' + ','.join(row[1]) + '}', row[2]])

elif config.options.load:
	# loading database
	import csv

	tagdb = tagDB()



	csv_reader = csv.reader(sys.stdin)
	first_row = next(csv_reader)



	assert first_row == ['site', 'tags', 'regexp'], 'File must contain csv data with three columns: "site", "tags" and "regexp".'

	tagdb.load(csv_reader)







else:
	# main loop
	config.section('reactor')
	if config['reactor'] == 'thread':
		checker = CheckerThread()
	elif config['reactor'] == 'plain':
		checker = Checker()
	elif config['reactor'] == 'kqueue':
		checker = CheckerKqueue()

	checker.loop()







|
<



>
>

>
|
>
>
|
|
|

|
|
<
|
|
>

>
|
|

>
>
|

|
>
>
>
>
>
>












326
327
328
329
330
331
332
333

334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349

350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
			else:
				self._config.set(self._section, name, None)
		return(self._config.get(self._section, name))

# initializing and reading in config file
config = Config()

if config.options.dump or config.options.load or config.options.dump_conf or config.options.load_conf:

	import csv

	tagdb = tagDB()
	data_fields = ['site', 'tags', 'regexp']
	conf_fields = ['netmask', 'redirect_url', 'from_weekday', 'to_weekday', 'from_time', 'to_time', 'tag']

	if config.options.dump or config.options.dump_conf:
		csv_writer = csv.writer(sys.stdout)
		if config.options.dump:
			# dumping database
			csv_writer.writerow(data_fields)
			for row in tagdb.dump():
				csv_writer.writerow([row[0], '{' + ','.join(row[1]) + '}', row[2]])

		elif config.options.dump_conf:
			# dumping rules

			csv_writer.writerow(conf_fields)
			for row in tagdb.dump_conf():
				csv_writer.writerow([row[0], row[1], row[2], row[3], row[4], row[5], '{' + ','.join(row[6]) + '}'])

	elif config.options.load or config.options.load_conf:
		csv_reader = csv.reader(sys.stdin)
		first_row = next(csv_reader)

		if config.options.load:
			# loading database
			assert first_row == data_fields, 'File must contain csv data with theese columns: ' + data_fields

			tagdb.load(csv_reader)

		elif config.options.load_conf:
			# loading database
			assert first_row == conf_fields, 'File must contain csv data with theese columns: ' + conf_fields

			tagdb.load_conf(csv_reader)

else:
	# main loop
	config.section('reactor')
	if config['reactor'] == 'thread':
		checker = CheckerThread()
	elif config['reactor'] == 'plain':
		checker = Checker()
	elif config['reactor'] == 'kqueue':
		checker = CheckerKqueue()

	checker.loop()