Squid url redirector

Check-in [67e8b3309d]
anonymous

Check-in [67e8b3309d]

Overview
Comment:switched sg_import to python3.2 reworked SysLogHandlerQueue to stick to one socket reworked FReadlineQueue to use io.FileIO for file operation
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | master | trunk
Files: files | file ages | folders
SHA3-256: 67e8b3309d6e56706b82456eba721093f776de6b6006128cc4d09435abe62f5b
User & Date: arcade@b1t.name on 2012-07-09 14:25:12.000
Other Links: branch diff | manifest | tags
Context
2012-07-10
14:02
remove leftover fix conversion from multibyte check-in: dbc0727869 user: arcade@b1t.name tags: master, trunk
2012-07-09
14:25
switched sg_import to python3.2 reworked SysLogHandlerQueue to stick to one socket reworked FReadlineQueue to use io.FileIO for file operation check-in: 67e8b3309d user: arcade@b1t.name tags: master, trunk
11:26
added gevent wrapper for SysLogHandler automatically recode all log lines from utf-8 to str simplify invoking added some comment check-in: 2654b86697 user: arcade@b1t.name tags: master, trunk
Changes
1
2
3
4
5
6
7
8
#!/usr/bin/env python3.1

# This script converts SquidGuard database into format that can be imported to
# squid-tagger. It should be run in SquidGuard database directory and it would
# produce csv stream that can be redirected to squid-tagger for imports:

# cd /var/db/squidGuard ; path/to/sg_import.py | path/to/squid-tagger.py -l -f

|







1
2
3
4
5
6
7
8
#!/usr/bin/env python3.2

# This script converts SquidGuard database into format that can be imported to
# squid-tagger. It should be run in SquidGuard database directory and it would
# produce csv stream that can be redirected to squid-tagger for imports:

# cd /var/db/squidGuard ; path/to/sg_import.py | path/to/squid-tagger.py -l -f

34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
class Config:
	__slots__ = frozenset(['_config', '_default', '_section', 'options'])
	_default = {
		'log': {
			'silent': 'no',
		},
		'database': {
			'host': 'localhost',
			'database': 'squidTag',
	},}

	# function to read in config file
	def __init__(self):
		import ConfigParser, optparse, os








<







34
35
36
37
38
39
40

41
42
43
44
45
46
47
class Config:
	__slots__ = frozenset(['_config', '_default', '_section', 'options'])
	_default = {
		'log': {
			'silent': 'no',
		},
		'database': {

			'database': 'squidTag',
	},}

	# function to read in config file
	def __init__(self):
		import ConfigParser, optparse, os

96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128

129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148

149
150
151
152

153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
config = Config()

import logging, logging.handlers

# wrapper around logging handler to make it queue records and don't stall when sending them

class SysLogHandlerQueue(logging.handlers.SysLogHandler):
	__slots__ = frozenset(['_event', '_tail', '_workers'])

	def __init__(self):
		logging.handlers.SysLogHandler.__init__(self, '/dev/log')
		self._event = gevent.event.Event()
		self._event.set()
		self._tail = gevent.queue.Queue()
		self._workers = set()

	def emit(self, record):
		# my syslog is broken and cannot into UTF-8 BOM
		record.msg = str(record.msg)
		self._tail.put(record)
		if self._tail.qsize() != 0:
			# in case queue is empty we will spawn new worker
			# all workers are logged so we can kill them on close()
			self._workers.add(gevent.spawn(self._writer))

	def _writer(self):
		# here we are locking the queue so we can be sure we are the only one
		self._event.wait()
		self._event.clear()
		while not self._tail.empty():
			logging.handlers.SysLogHandler.emit(self, self._tail.get())
		self._event.set()
		self._workers.remove(gevent.getcurrent())


	def close(self):
		for worker in self._workers:
			gevent.kill(worker)
		logging.handlers.SysLogHandler.close(self)

logger = logging.getLogger('squidTag')
logger.setLevel(logging.INFO)
handler = SysLogHandlerQueue()
handler.setFormatter(logging.Formatter(str('squidTag[%(process)s]: %(message)s')))
logger.addHandler(handler)

# tiny wrapper around a file to make reads from it geventable
# or should i move this somewhere?

class FReadlineQueue(gevent.queue.Queue):
	# storing file descriptor, leftover
	__slots__ = frozenset(['_fd', '_tail'])

	def __init__(self, fd):

		# initialising class
		gevent.queue.Queue.__init__(self)
		# storing file descriptor
		self._fd = fd

		# using empty tail
		self._tail = ''
		# setting up event
		self._install_wait()

	def _install_wait(self):
		fileno = self._fd.fileno()
		# putting file to nonblocking mode
		fcntl.fcntl(fileno, fcntl.F_SETFL, fcntl.fcntl(fileno, fcntl.F_GETFL)  | os.O_NONBLOCK)
		# installing event handler
		gevent.core.read_event(fileno, self._wait_helper)

	def _wait_helper(self, ev, evtype):
		# reading one buffer from stream
		buf = self._fd.read(4096)
		# splitting stream by line ends
		rows = buf.decode('l1').split('\n')
		# adding tail to the first element if there is some tail
		if len(self._tail) > 0:
			rows[0] = self._tail + rows[0]
		# popping out last (incomplete) element
		self._tail = rows.pop(-1)
		# dropping all complete elements to the queue
		for row in rows:
			self.put_nowait(row)
			logger.info('< ' + row)
		if len(buf) > 0:
			# no EOF, reinstalling event handler
			gevent.core.read_event(self._fd.fileno(), self._wait_helper)
		else:
			# EOF found, sending EOF to queue
			self.put_nowait(None)

stdin = FReadlineQueue(sys.stdin)

# wrapper against file handler that makes possible to queue some writes without stalling

class FWritelineQueue(gevent.queue.JoinableQueue):
	# storing fileno, io interface, leftover
	__slots__ = frozenset(['_fileno', '_io', '_tail'])








|



<
<

|





|


|



|
<


<
|
>


|
|













|

|
>



|
>






<

|

|



|













|




|







95
96
97
98
99
100
101
102
103
104
105


106
107
108
109
110
111
112
113
114
115
116
117
118
119
120

121
122

123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156

157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
config = Config()

import logging, logging.handlers

# wrapper around logging handler to make it queue records and don't stall when sending them

class SysLogHandlerQueue(logging.handlers.SysLogHandler):
	__slots__ = frozenset(['_running', '_tail', '_worker'])

	def __init__(self):
		logging.handlers.SysLogHandler.__init__(self, '/dev/log')


		self._tail = gevent.queue.Queue()
		self._worker = None

	def emit(self, record):
		# my syslog is broken and cannot into UTF-8 BOM
		record.msg = str(record.msg)
		self._tail.put(record)
		if self._worker == None:
			# in case queue is empty we will spawn new worker
			# all workers are logged so we can kill them on close()
			self._worker = gevent.spawn(self._writer)

	def _writer(self):
		# here we are locking the queue so we can be sure we are the only one
		print('syslog start')

		while not self._tail.empty():
			logging.handlers.SysLogHandler.emit(self, self._tail.get())

		self._worker = None
		print('syslog end')

	def close(self):
		if self._worker != None:
			gevent.kill(self._worker)
		logging.handlers.SysLogHandler.close(self)

logger = logging.getLogger('squidTag')
logger.setLevel(logging.INFO)
handler = SysLogHandlerQueue()
handler.setFormatter(logging.Formatter(str('squidTag[%(process)s]: %(message)s')))
logger.addHandler(handler)

# tiny wrapper around a file to make reads from it geventable
# or should i move this somewhere?

class FReadlineQueue(gevent.queue.Queue):
	# storing file descriptor, leftover
	__slots__ = frozenset(['_io', '_fileno', '_tail'])

	def __init__(self, fd, closefd = True):
		import io
		# initialising class
		gevent.queue.Queue.__init__(self)
		# storing file descriptor
		self._fileno = fd.fileno()
		self._io = io.FileIO(self._fileno, 'r', closefd)
		# using empty tail
		self._tail = ''
		# setting up event
		self._install_wait()

	def _install_wait(self):

		# putting file to nonblocking mode
		fcntl.fcntl(self._fileno, fcntl.F_SETFL, fcntl.fcntl(self._fileno, fcntl.F_GETFL)  | os.O_NONBLOCK)
		# installing event handler
		gevent.core.read_event(self._fileno, self._wait_helper)

	def _wait_helper(self, ev, evtype):
		# reading one buffer from stream
		buf = self._io.read(4096)
		# splitting stream by line ends
		rows = buf.decode('l1').split('\n')
		# adding tail to the first element if there is some tail
		if len(self._tail) > 0:
			rows[0] = self._tail + rows[0]
		# popping out last (incomplete) element
		self._tail = rows.pop(-1)
		# dropping all complete elements to the queue
		for row in rows:
			self.put_nowait(row)
			logger.info('< ' + row)
		if len(buf) > 0:
			# no EOF, reinstalling event handler
			gevent.core.read_event(self._fileno, self._wait_helper)
		else:
			# EOF found, sending EOF to queue
			self.put_nowait(None)

stdin = FReadlineQueue(sys.stdin, False)

# wrapper against file handler that makes possible to queue some writes without stalling

class FWritelineQueue(gevent.queue.JoinableQueue):
	# storing fileno, io interface, leftover
	__slots__ = frozenset(['_fileno', '_io', '_tail'])