Overview
Comment: | one more deadlock, some asserts fixed |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | master | trunk |
Files: | files | file ages | folders |
SHA3-256: |
7c13294e9f562461a5bc83064e1ada7e |
User & Date: | c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 on 2010-08-07 21:56:08.000 |
Other Links: | branch diff | manifest | tags |
Context
2010-08-07
| ||
22:02 | I even managed to mess things up one more time check-in: a55552d30f user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
21:56 | one more deadlock, some asserts fixed check-in: 7c13294e9f user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
21:13 | major definition reorder: table creation first then functions check-in: 7828f877c8 user: c.kworr@d4daf22a-8aaf-11de-a64d-234b64dd91b4 tags: master, trunk | |
Changes
Modified squid-tagger.py
from [07733f6b5a]
to [d270a30b3e].
︙ | ︙ | |||
44 45 46 47 48 49 50 51 52 53 54 55 56 57 | # abstract class with basic checking functionality class Checker: __slots__ = frozenset(['_db', '_log']) def __init__(self): self._db = tagDB() self._log = Logger() def process(self, id, site, ip_address, url_path, line = None): self._log.info('trying {}\n'.format(site)) result = self._db.check(site, ip_address) #reply = '{}://{}/{}'.format(req[4], req[1], req[3]) reply = '-' for row in result: | > | 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 | # abstract class with basic checking functionality class Checker: __slots__ = frozenset(['_db', '_log']) def __init__(self): self._db = tagDB() self._log = Logger() self._log.info('started\n') def process(self, id, site, ip_address, url_path, line = None): self._log.info('trying {}\n'.format(site)) result = self._db.check(site, ip_address) #reply = '{}://{}/{}'.format(req[4], req[1], req[3]) reply = '-' for row in result: |
︙ | ︙ | |||
159 160 161 162 163 164 165 | # importing select module import select self._select = select # kreating kqueue self._kq = self._select.kqueue() | | > > > > > > > | | < < < > | > > | | 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 | # importing select module import select self._select = select # kreating kqueue self._kq = self._select.kqueue() assert self._kq.fileno() != -1, "Fatal error: can't initialise kqueue." # watching sys.stdin for data self._kq.control([self._select.kevent(sys.stdin, self._select.KQ_FILTER_READ, self._select.KQ_EV_ADD)], 0) # creating data queue self._queue = [] def loop(self): # Wait for data by default timeout = None eof = False buffer = '' while True: # checking if there is any data or witing for data to arrive kevs = self._kq.control(None, 1, timeout) assert len(kevs) > 0, 'Fatal error: we should receive at least one event.' # detect end of stream and exit if possible if kevs[0].flags >> 15 == 1: eof = True if kevs[0].filter == self._select.KQ_FILTER_READ and kevs[0].data > 0: # reading data in new_buffer = sys.stdin.read(kevs[0].data) # if no data was sent - we have reached end of file if len(new_buffer) == 0: eof = True else: # adding current buffer to old buffer remains buffer += new_buffer # splitting to lines lines = buffer.split('\n') # last line that was not terminate by newline returns to buffer buffer = lines[-1] # an only if there was at least one newline if len(lines) > 1: for line in lines[:-1]: # add data to the queue if self.check(line + '\n'): # don't wait for more data, start processing timeout = 0 else: if len(self._queue) > 0: # get one request and process it req = self._queue.pop(0) Checker.process(self, req[0], req[1], req[2], req[3]) if len(self._queue) == 0: # wait for data - we have nothing to process timeout = None # if queue is empty and we reached end of stream - we can exit if len(self._queue) == 0 and eof: break def process(self, id, site, ip_address, url_path, line): # simply adding data to the queue self._queue.append((id, site, ip_address, url_path)) self._log.info('request {} queued ({})\n'.format(id, line)) # this classes processes config file and substitutes default values |
︙ | ︙ | |||
237 238 239 240 241 242 243 | parser = optparse.OptionParser() parser.add_option('-c', '--config', dest = 'config', help = 'config file location', metavar = 'FILE', default = '/usr/local/etc/squid-tagger.conf') (options, args) = parser.parse_args() | | < < | 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 | parser = optparse.OptionParser() parser.add_option('-c', '--config', dest = 'config', help = 'config file location', metavar = 'FILE', default = '/usr/local/etc/squid-tagger.conf') (options, args) = parser.parse_args() assert os.access(options.config, os.R_OK), "Fatal error: can't read {}".format(options.config) self._config = configparser.ConfigParser() self._config.readfp(open(options.config)) # function to select config file section or create one def section(self, section): if not self._config.has_section(section): |
︙ | ︙ |