class App(Daemon): def __init__(self, *args, **kwargs): super( App, self ).__init__(*args, **kwargs) self._socket = None self._tServer = None self.running = None self.tagsEnabled = None def initApp(self, opt = None): if opt and opt.interval: LOGGER.info('Custom interval is set ' + opt.interval) self.interval = int(opt.interval) else: self.interval = 60 # in sec # apacheLogFilePath if opt and opt.apacheLogFilePath: LOGGER.info('Custom apache log file is set ' + opt.apacheLogFilePath) files = opt.apacheLogFilePath.split(',') self.apacheLogFilePath = files else: self.apacheLogFilePath = ['/var/log/apache2/access.log'] if opt and opt.outputFilePath: LOGGER.info('Custom output file is set ' + opt.outputFilePath) self.outputFilePath = opt.outputFilePath else: scriptPath = os.path.realpath(__file__) pathname = os.path.dirname(scriptPath) self.outputFilePath = pathname + '/af_apache_visited_urls.log' if opt and opt.tags: self.tagsEnabled = opt.tags # try: self.parser = ApacheLogsParser(apacheLogFilePath = self.apacheLogFilePath) self.urlsCounter = UrlsCounter(self.outputFilePath, tags = self.tagsEnabled, statsdPrefix = 'apache_url_counter') except Exception as e: LOGGER.critical('Serious Error occured: %s', e) def sendStatsD(self, pklData = None): LOGGER.debug('creating thread to send statsD ') if options and options.apacheHostName: apacheHostName = options.apacheHostName LOGGER.debug('apacheHostName is set to: ' + apacheHostName) else: apacheHostName = None statsdSender = StatsdSender(apacheHostName = apacheHostName, pklData = pklData) statsdSender.start() LOGGER.debug('stoping statsD thread...') statsdSender.join() if statsdSender.isAlive(): LOGGER.debug('statsD thread NOT stoped') else: LOGGER.debug('statsD thread stoped') def stop(self): if self._tServer is not None: LOGGER.debug('stopping sockets thread') self._tServer.stop() super( App, self ).stop() def run(self): if self.running is None: self.running = True i = 0 self.urlsLength = 0 if self._tServer is None: self._tServer = ProcessThread(host = HOSTNAME, port = PORT) self._tServer.start() self._tServer.setData(0) # @TODO when we trying to send statsD data from here - it fails while True: try: if i >= self.interval: LOGGER.debug (' parsing apache log... ') urls = self.parser.parse() self.urlsLength = len(urls) i = 0 if self.urlsLength > 0: urlsSumm = self.urlsCounter.update(urls) LOGGER.info('serealizing data to pkl file') pklFile = open('/tmp/data.pkl', 'wb') # Pickle dictionary using protocol 0. pickle.dump({'urls': urls, 'urlsSumm': urlsSumm}, pklFile) pklFile.close() self.sendStatsD(pklData = {'urls': urls, 'urlsSumm': urlsSumm}) self.isDataPolled = False LOGGER.debug(' new urls %d ', len(urls)) # if tread is running if self._tServer is not None: self._tServer.setData(self.urlsLength) else: LOGGER.debug('_tServer ProcessThread not running') LOGGER.debug(' --- {i} {n}'.format(i=i,n=self.interval)) time.sleep(1) i = i + 1 except Exception as msg: print 'AF.APACHE.URLS.COUNTER critical - daemon error' LOGGER.debug (msg) else: LOGGER.debug (' application already running') def status(self): LOGGER.debug (self.urlsLength)
class App(Daemon): def __init__(self, *args, **kwargs): super(App, self).__init__(*args, **kwargs) self._socket = None self._tServer = None self.running = None self.tagsEnabled = None def initApp(self, opt=None): if opt and opt.interval: LOGGER.info('Custom interval is set ' + opt.interval) self.interval = int(opt.interval) else: self.interval = 60 # in sec # apacheLogFilePath if opt and opt.apacheLogFilePath: LOGGER.info('Custom apache log file is set ' + opt.apacheLogFilePath) files = opt.apacheLogFilePath.split(',') self.apacheLogFilePath = files else: self.apacheLogFilePath = ['/var/log/apache2/access.log'] if opt and opt.outputFilePath: LOGGER.info('Custom output file is set ' + opt.outputFilePath) self.outputFilePath = opt.outputFilePath else: scriptPath = os.path.realpath(__file__) pathname = os.path.dirname(scriptPath) self.outputFilePath = pathname + '/af_apache_visited_urls.log' if opt and opt.tags: self.tagsEnabled = opt.tags # try: self.parser = ApacheLogsParser( apacheLogFilePath=self.apacheLogFilePath) self.urlsCounter = UrlsCounter(self.outputFilePath, tags=self.tagsEnabled, statsdPrefix='apache_url_counter') except Exception as e: LOGGER.critical('Serious Error occured: %s', e) def sendStatsD(self, pklData=None): LOGGER.debug('creating thread to send statsD ') if options and options.apacheHostName: apacheHostName = options.apacheHostName LOGGER.debug('apacheHostName is set to: ' + apacheHostName) else: apacheHostName = None statsdSender = StatsdSender(apacheHostName=apacheHostName, pklData=pklData) statsdSender.start() LOGGER.debug('stoping statsD thread...') statsdSender.join() if statsdSender.isAlive(): LOGGER.debug('statsD thread NOT stoped') else: LOGGER.debug('statsD thread stoped') def stop(self): if self._tServer is not None: LOGGER.debug('stopping sockets thread') self._tServer.stop() super(App, self).stop() def run(self): if self.running is None: self.running = True i = 0 self.urlsLength = 0 if self._tServer is None: self._tServer = ProcessThread(host=HOSTNAME, port=PORT) self._tServer.start() self._tServer.setData(0) # @TODO when we trying to send statsD data from here - it fails while True: try: if i >= self.interval: LOGGER.debug(' parsing apache log... ') urls = self.parser.parse() self.urlsLength = len(urls) i = 0 if self.urlsLength > 0: urlsSumm = self.urlsCounter.update(urls) LOGGER.info('serealizing data to pkl file') pklFile = open('/tmp/data.pkl', 'wb') # Pickle dictionary using protocol 0. pickle.dump({ 'urls': urls, 'urlsSumm': urlsSumm }, pklFile) pklFile.close() self.sendStatsD(pklData={ 'urls': urls, 'urlsSumm': urlsSumm }) self.isDataPolled = False LOGGER.debug(' new urls %d ', len(urls)) # if tread is running if self._tServer is not None: self._tServer.setData(self.urlsLength) else: LOGGER.debug( '_tServer ProcessThread not running') LOGGER.debug(' --- {i} {n}'.format(i=i, n=self.interval)) time.sleep(1) i = i + 1 except Exception as msg: print 'AF.APACHE.URLS.COUNTER critical - daemon error' LOGGER.debug(msg) else: LOGGER.debug(' application already running') def status(self): LOGGER.debug(self.urlsLength)
class App(Daemon): def __init__(self, *args, **kwargs): super( App, self ).__init__(*args, **kwargs) self._socket = None self._tServer = None self.running = None self.tagsEnabled = None def initApp(self, opt = None): if opt and opt.interval: LOGGER.info('Custom interval is set ' + opt.interval) self.interval = int(opt.interval) else: self.interval = 60 # in sec # apacheLogFilePath if opt and opt.apacheLogFilePath: LOGGER.info('Custom apache log file is set ' + opt.apacheLogFilePath) files = opt.apacheLogFilePath.split(',') self.apacheLogFilePath = files else: self.apacheLogFilePath = ['/var/log/apache2/access.log'] if opt and opt.outputFilePath: self.outputFilePath = opt.outputFilePath else: scriptPath = os.path.realpath(__file__) pathname = os.path.dirname(scriptPath) self.outputFilePath = pathname + '/af_apache_visited_urls.log' if opt and opt.tags: self.tagsEnabled = opt.tags self.parser = ApacheLogsParser(apacheLogFilePath = self.apacheLogFilePath) self.urlsCounter = UrlsCounter(self.outputFilePath, tags = self.tagsEnabled) def stop(self): if self._tServer is not None: LOGGER.debug('stopping sockets thread') self._tServer.stop() super( App, self ).stop() def run(self): if self.running is None: self.running = True i = 0 self.urlsLength = 0 if self._tServer is None: self._tServer = ProcessThread(host = HOSTNAME, port = PORT) self._tServer.start() self._tServer.setData(0) while True: if i >= self.interval: LOGGER.debug (' parsing apache logs ') urls = self.parser.parse() self.urlsLength = len(urls) i = 0 if self.urlsLength > 0: self.urlsCounter.update(urls) self.isDataPolled = False LOGGER.debug(' new urls %d ', len(urls)) # if tread is running if self._tServer is not None: self._tServer.setData(self.urlsLength) # LOGGER.debug(' --- {i} {n}'.format(i=i,n=self.interval)) time.sleep(1) i = i + 1 else: LOGGER.debug (' application already running') def status(self): LOGGER.debug (self.urlsLength)