Exemplo n.º 1
0
def main():
    '''
    Get aggregated statistics on incoming events
    to use in alerting/notices/queries about event patterns over time
    '''
    logger.debug('starting')
    logger.debug(options)
    es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
    index = options.index
    stats = esSearch(es)
    logger.debug(json.dumps(stats))
    sleepcycles = 0
    try:
        while not es.index_exists(index):
            sleep(3)
            if sleepcycles == 3:
                logger.debug("The index is not created. Terminating eventStats.py cron job.")
                exit(1)
            sleepcycles += 1
        if es.index_exists(index):
            # post to elastic search servers directly without going through
            # message queues in case there is an availability issue
            es.save_event(index=index, body=json.dumps(stats))

    except Exception as e:
        logger.error("Exception %r when gathering statistics " % e)

    logger.debug('finished')
Exemplo n.º 2
0
def main():
    '''
    Get aggregated statistics on incoming events
    to use in alerting/notices/queries about event patterns over time
    '''
    logger.debug('starting')
    logger.debug(options)
    es = ElasticsearchClient(
        (list('{0}'.format(s) for s in options.esservers)))
    index = options.index
    stats = esSearch(es)
    logger.debug(json.dumps(stats))
    sleepcycles = 0
    try:
        while not es.index_exists(index):
            sleep(3)
            if sleepcycles == 3:
                logger.debug(
                    "The index is not created. Terminating eventStats.py cron job."
                )
                exit(1)
            sleepcycles += 1
        if es.index_exists(index):
            # post to elastic search servers directly without going through
            # message queues in case there is an availability issue
            es.save_event(index=index, body=json.dumps(stats))

    except Exception as e:
        logger.error("Exception %r when gathering statistics " % e)

    logger.debug('finished')
Exemplo n.º 3
0
if alert_index_name not in all_indices:
    print "Creating " + alert_index_name
    client.create_index(alert_index_name, index_config=index_settings)
client.create_alias('alerts', alert_index_name)

if weekly_index_alias not in all_indices:
    print "Creating " + weekly_index_alias
    client.create_alias_multiple_indices(weekly_index_alias, [event_index_name, previous_event_index_name])

if kibana_index_name not in all_indices:
    print "Creating " + kibana_index_name
    client.create_index(kibana_index_name, index_config={"settings": index_options})

# Wait for .kibana index to be ready
num_times = 0
while not client.index_exists(kibana_index_name):
    if num_times < 3:
        print("Waiting for .kibana index to be ready")
        time.sleep(1)
        num_times += 1
    else:
        print(".kibana index not created...exiting")
        sys.exit(1)

# Check to see if index patterns exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'index-pattern'))
results = query.execute(client, indices=[kibana_index_name])
if len(results['hits']) == 0:
    # Create index patterns and assign default index mapping
    index_mappings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index_mappings')
Exemplo n.º 4
0
    print "Creating " + alert_index_name
    client.create_index(alert_index_name)
client.create_alias('alerts', alert_index_name)

if weekly_index_alias not in all_indices:
    print "Creating " + weekly_index_alias
    client.create_alias_multiple_indices(
        weekly_index_alias, [event_index_name, previous_event_index_name])

if kibana_index_name not in all_indices:
    print "Creating " + kibana_index_name
    client.create_index(kibana_index_name)

# Wait for .kibana index to be ready
num_times = 0
while not client.index_exists('.kibana'):
    if num_times < 3:
        print("Waiting for .kibana index to be ready")
        time.sleep(1)
        num_times += 1
    else:
        print(".kibana index not created...exiting")
        sys.exit(1)

# Check to see if index patterns exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'index-pattern'))
results = query.execute(client, indices=['.kibana'])
if len(results['hits']) == 0:
    # Create index patterns and assign default index mapping
    index_mappings_path = os.path.join(
Exemplo n.º 5
0
def main():
    '''
    Get health and status stats and post to ES
    Post both as a historical reference (for charts)
    and as a static docid (for realtime current health/EPS displays)
    '''
    logger.debug('starting')
    logger.debug(options)
    es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
    index = options.index

    with open(options.default_mapping_file, 'r') as mapping_file:
        default_mapping_contents = json.loads(mapping_file.read())

    if not es.index_exists(index):
        try:
            logger.debug('Creating %s index' % index)
            es.create_index(index, default_mapping_contents)
        except Exception as e:
            logger.error("Unhandled exception, terminating: %r" % e)

    auth = HTTPBasicAuth(options.mquser, options.mqpassword)

    for server in options.mqservers:
        logger.debug('checking message queues on {0}'.format(server))
        r = requests.get(
            'http://{0}:{1}/api/queues'.format(server,
                                               options.mqapiport),
            auth=auth)

        mq = r.json()
        # setup a log entry for health/status.
        healthlog = dict(
            utctimestamp=toUTC(datetime.now()).isoformat(),
            hostname=server,
            processid=os.getpid(),
            processname=sys.argv[0],
            severity='INFO',
            summary='mozdef health/status',
            category='mozdef',
            type='mozdefhealth',
            source='mozdef',
            tags=[],
            details=[])

        healthlog['details'] = dict(username='******')
        healthlog['details']['loadaverage'] = list(os.getloadavg())
        healthlog['details']['queues']=list()
        healthlog['details']['total_deliver_eps'] = 0
        healthlog['details']['total_publish_eps'] = 0
        healthlog['details']['total_messages_ready'] = 0
        healthlog['tags'] = ['mozdef', 'status']
        for m in mq:
            if 'message_stats' in m and isinstance(m['message_stats'], dict):
                if 'messages_ready' in m:
                    mready = m['messages_ready']
                    healthlog['details']['total_messages_ready'] += m['messages_ready']
                else:
                    mready = 0
                if 'messages_unacknowledged' in m:
                    munack = m['messages_unacknowledged']
                else:
                    munack = 0
                queueinfo=dict(
                    queue=m['name'],
                    vhost=m['vhost'],
                    messages_ready=mready,
                    messages_unacknowledged=munack)

                if 'deliver_details' in m['message_stats']:
                    queueinfo['deliver_eps'] = round(m['message_stats']['deliver_details']['rate'], 2)
                    healthlog['details']['total_deliver_eps'] += round(m['message_stats']['deliver_details']['rate'], 2)
                if 'deliver_no_ack_details' in m['message_stats']:
                    queueinfo['deliver_eps'] = round(m['message_stats']['deliver_no_ack_details']['rate'], 2)
                    healthlog['details']['total_deliver_eps'] += round(m['message_stats']['deliver_no_ack_details']['rate'], 2)
                if 'publish_details' in m['message_stats']:
                    queueinfo['publish_eps'] = round(m['message_stats']['publish_details']['rate'], 2)
                    healthlog['details']['total_publish_eps'] += round(m['message_stats']['publish_details']['rate'], 2)
                healthlog['details']['queues'].append(queueinfo)

        # post to elastic search servers directly without going through
        # message queues in case there is an availability issue
        es.save_event(index=index, body=json.dumps(healthlog))
        # post another doc with a static docid and tag
        # for use when querying for the latest status
        healthlog['tags'] = ['mozdef', 'status', 'latest']
        es.save_event(index=index, doc_id=getDocID(server), body=json.dumps(healthlog))