Example #1
0
def main(p):

    # The mongo bits
    try:
        c, dbh = mdb.getHandle(host=p.dbHost,
                               port=p.dbPort,
                               db=p.db,
                               user=p.dbUser,
                               password=p.dbPassword)
        evCollHandle = dbh[p.eventsCollection]
    except:
        logging.critical('Failed to connect and authenticate', exc_info=True)
        sys.exit()

    # Get the current tags
    tags = cf.getCurrentTags(evCollHandle)
    # Get the current bounding boxes
    queryBBoxes = cf.getQueryBBox(evCollHandle)

    x = 1
    while x == 1:

        # Here's the redis queue for managing the tweets as they come in
        try:
            q = RedisQueue(p.redisName,
                           host=p.redisHost,
                           password=p.redisPassword,
                           port=p.redisPort,
                           db=0)
        except:
            logging.error('Failed to connect to REDIS db.', exc_info=True)
            sys.exit()

        # This call is blocking, so expect it to hang on this point
        tweetStr = q.get()
        tweet = json.loads(tweetStr)

        # Work out which object/event this tweet is associated with
        if tags:
            tweetTags = cf.whichTags(tags, tweet)
            for tweetTag in tweetTags:
                success = dispatchTweet(p, tweet, tweetTag)
                logging.debug("Tag-based message dispatched: %s" % (success))

        if queryBBoxes:
            tweetGeos = cf.matchesCurrentGeos(queryBBoxes, tweet)
            for tweetGeo in tweetGeos:
                success = dispatchTweet(p, tweet, tweetGeo)
                logging.debug("Geo-based message dispatched: %s" % (success))
Example #2
0
class AlertHelper():
    q = None
    
    def __init__(self, config, rhost=None, rport=None, rdb=None, chatq=None, prefix=None):
        if config:
            rhost = config.get("redis", "redis.server").strip('"').strip("'")
            rport = config.get("redis", "redis.port")
            rdb = config.get("redis", "redis.db")
            chatq = config.get("alert_bot", "msg_bot.redis_mq").strip('"').strip("'")
            prefix = config.get("alert_bot", "msg_bot.redis_prefix").strip('"').strip("'")
        
            
            
        self.q = RedisQueue(chatq, prefix, rhost, rport, rdb)
        
    def post_msg(self, msg):
        self.q.put(msg)      
        
    def flush_all(self):
        i = 0
        while not self.q.empty():
            self.q.get()
            i+=1
        return i
def main(p):

    # The mongo bits
    try:
        c, dbh = mdb.getHandle(host=p.dbHost, port=p.dbPort, db=p.db, user=p.dbUser, password=p.dbPassword)
        evCollHandle = dbh[p.eventsCollection]    
    except:
        logging.critical('Failed to connect and authenticate', exc_info=True)
        sys.exit()

    # Get the current tags 
    tags = cf.getCurrentTags(evCollHandle)
    # Get the current bounding boxes
    queryBBoxes = cf.getQueryBBox(evCollHandle)
    
    x = 1
    while x == 1:
        
        # Here's the redis queue for managing the tweets as they come in
        try:
            q = RedisQueue(p.redisName, host=p.redisHost, password=p.redisPassword, port=p.redisPort, db=0)
        except:
            logging.error('Failed to connect to REDIS db.', exc_info=True)
            sys.exit()
        
        # This call is blocking, so expect it to hang on this point
        tweetStr = q.get()
        tweet = json.loads(tweetStr)
        
        # Work out which object/event this tweet is associated with
        if tags:
            tweetTags = cf.whichTags(tags, tweet)
            for tweetTag in tweetTags:
                success = dispatchTweet(p, tweet, tweetTag)
                logging.debug("Tag-based message dispatched: %s" %(success))
        
        if queryBBoxes:
            tweetGeos = cf.matchesCurrentGeos(queryBBoxes, tweet)
            for tweetGeo in tweetGeos:
                success = dispatchTweet(p, tweet, tweetGeo)
                logging.debug("Geo-based message dispatched: %s" %(success))
Example #4
0
class AlertMsgBot(sleekxmpp.ClientXMPP):

    """
    A basic SleekXMPP bot that will log in, send a message,
    and then log out.
    """
    config = None
    rsq= None

    recipients = None
    quit = False
    tlock = None
    

    def __init__(self, config):
        

        self.config = config
        host = config.get("redis", "redis.server").strip('"').strip("'")
        port = config.get("redis", "redis.port")
        db = config.get("redis", "redis.db")
        qname = config.get("alert_bot", "msg_bot.redis_mq").strip('"').strip("'")
        qprefix = config.get("alert_bot", "msg_bot.redis_prefix").strip('"').strip("'")
        
        self.rsq = RedisQueue(qname, qprefix, host, port, db)
        logging.info('Connect to redis on server->%s:%s db->%s  qname->%s:%s' % (host, port, db, qprefix, qname))
        jid = config.get("alert_bot", "msg_bot.jid").strip('"').strip("'")
        password = config.get("alert_bot", "msg_bot.pass").strip('"').strip("'")

        sleekxmpp.ClientXMPP.__init__(self, jid, password)
        
        self.recipients = eval(config.get("alert_bot", "msg_bot.recipients").strip('"').strip("'"))
        self.tlock = Lock()

        # The session_start event will be triggered when
        # the bot establishes its connection with the server
        # and the XML streams are ready for use. We want to
        # listen for this event so that we we can initialize
        # our roster.
        self.add_event_handler("session_start", self.start, threaded=True)

    def start(self, event):
        """
        Process the session_start event.
        Typical actions for the session_start event are
        requesting the roster and broadcasting an initial
        presence stanza.
        Arguments:
            event -- An empty dictionary. The session_start
                     event does not provide any additional
                     data.
        """
        self.send_presence()
        self.get_roster()

        self.rsq.put("Greetings! The alert bot has just started!")
        t = threading.Thread(target = self.process_alerts(), args=())
        t.start()
#         self.send_message(mto=self.recipient,
#                           mbody=self.msg,
#                           mtype='chat')

        
        
        
    def process_alerts(self):
        self.quit = False
        while self.quit <> True:
            if not self.rsq.empty():
                msg = self.rsq.get()
                logging.debug('process_alerts: received msg: {%s}' % msg)
                self.send_msg(msg)
            sleep(1)

        # Using wait=True ensures that the send queue will be
        # emptied before ending the session.
        self.disconnect(wait=True)


    def send_msg(self, msg):
        self.tlock.acquire()
        try:
            for r in self.recipients:
                self.send_message(r, msg, mtype='chat')            
        finally:
            self.tlock.release()  
Example #5
0
def findProjectFiles(dConfig):

    qRedis = RedisQueue(dConfig['redis-queue-name'],
                        namespace='queue',
                        host=dConfig['redis-loc'])
    oES = Elasticsearch(dConfig['es-instance-locs'])

    lIgnoreDirs = ['.git', '.svn']

    dProject = {}
    dSource = {}

    dProject['_index'] = dConfig['es-index-name']
    dProject['_type'] = dConfig['es-index-type']

    dSource['crawl-time'] = dConfig['time-stamp']

    dSource['project-path'] = qRedis.get(block=True)

    lProjectFiles = []

    # if project path is '**done**', then break
    while dSource['project-path'] != '**done**':

        dSource['project-name'] = os.path.basename(dSource['project-path'])

        if dConfig['debug']:
            debug('func: findProjectFiles()', 'project-path:',
                  dSource['project-path'], dSource['project-name'])

        for sRoot, lDirs, lFiles in os.walk(dSource['project-path']):

            if len(lProjectFiles) > dConfig['es-bulk-chunk-size']:

                # ingest chunk into elasticsearch
                helpers.bulk(oES, lProjectFiles)

                if dConfig['debug']:
                    debug('func: findProjectFiles()', str(len(lProjectFiles)),
                          'files loaded into elasticsearch')

                lProjectFiles = []

            for sFile in lFiles:

                sFilePath = os.path.join(sRoot, sFile)

                sRelPath = os.path.relpath(sFilePath, dSource['project-path'])

                dFile = {}

                try:

                    sRelPath.decode('utf-8')

                except (ValueError, UnicodeDecodeError) as e:

                    try:

                        sRelPath.decode('latin-1')

                    except (ValueError, UnicodeDecodeError) as e:

                        try:

                            sRelPath.decode('utf-16')

                        except (ValueError, UnicodeDecodeError) as e:

                            warning(
                                'func findProjectFiles():', 'sProjectPath:',
                                dSource['project-path'], 'sProjectName:',
                                dSource['project-name'], 'sFile:', sFile,
                                'sRelPath:', sRelPath,
                                'utf-8, latin-1, and utf-16 decoding failed',
                                'exception:', e)

                        else:

                            dSource['file'] = sFile.decode('utf-16')
                            dSource['path'] = sRelPath.decode('utf-16')
                            dProject['_source'] = dSource

                            lProjectFiles.append(dProject)

                    else:

                        dSource['file'] = sFile.decode('latin-1')
                        dSource['path'] = sRelPath.decode('latin-1')
                        dProject['_source'] = dSource

                        lProjectFiles.append(dProject)

                else:

                    dSource['file'] = sFile
                    dSource['path'] = sRelPath
                    dProject['_source'] = dSource

                    lProjectFiles.append(dProject)

            lDirs[:] = [sDir for sDir in lDirs if sDir not in lIgnoreDirs]

        # get next project to process
        dSource['project-path'] = qRedis.get(block=True)

    # index any remaining projects
    if len(lProjectFiles) > 0:

        # ingest chunk into elasticsearch
        helpers.bulk(oES, lProjectFiles)

        if dConfig['debug']:
            debug('func: findProjectFiles()', str(len(lProjectFiles)),
                  'files loaded into elasticsearch')