Esempio n. 1
0
        sys.exit(1)

# Check to see if index patterns exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'index-pattern'))
results = query.execute(client, indices=[kibana_index_name])
if len(results['hits']) == 0:
    # Create index patterns and assign default index mapping
    index_mappings_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'index_mappings')
    listing = os.listdir(index_mappings_path)
    for infile in listing:
        json_file_path = os.path.join(index_mappings_path, infile)
        with open(json_file_path) as json_data:
            mapping_data = json.load(json_data)
            print "Creating {0} index mapping".format(mapping_data['title'])
            client.save_object(body=mapping_data, index=kibana_index_name, doc_type='index-pattern', doc_id=mapping_data['title'])

    # Assign default index to 'events'
    print "Assigning events as default index mapping"
    index_name = 'events'
    url = '{}/api/kibana/settings/defaultIndex'.format(kibana_url)
    data = {'value': index_name}
    r = requests.post(url, json=data, headers={'kbn-xsrf': "true"})
    if not r.ok:
        print("Failed to set defaultIndex to events : {} {}".format(r.status_code, r.content))


# Check to see if dashboards already exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'dashboard'))
results = query.execute(client, indices=[kibana_index_name])
Esempio n. 2
0
# Check to see if index patterns exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'index-pattern'))
results = query.execute(client, indices=['.kibana'])
if len(results['hits']) == 0:
    # Create index patterns and assign default index mapping
    index_mappings_path = os.path.join(
        os.path.dirname(os.path.abspath(__file__)), 'index_mappings')
    listing = os.listdir(index_mappings_path)
    for infile in listing:
        json_file_path = os.path.join(index_mappings_path, infile)
        with open(json_file_path) as json_data:
            mapping_data = json.load(json_data)
            print "Creating {0} index mapping".format(mapping_data['title'])
            client.save_object(body=mapping_data,
                               index='.kibana',
                               doc_type='index-pattern',
                               doc_id=mapping_data['title'])

    # Assign default index to 'events'
    client.flush('.kibana')
    default_mapping_data = {"defaultIndex": 'events'}
    print "Assigning events as default index mapping"
    client.save_object(default_mapping_data, '.kibana', 'config',
                       kibana_version)

# Check to see if dashboards already exist in .kibana
query = SearchQuery()
query.add_must(TermMatch('_type', 'dashboard'))
results = query.execute(client, indices=['.kibana'])
if len(results['hits']) == 0:
    dashboards_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
Esempio n. 3
0
def main():
    '''
    Get health and status stats and post to ES
    Post both as a historical reference (for charts)
    and as a static docid (for realtime current health/EPS displays)
    '''
    logger.debug('starting')
    logger.debug(options)
    es = ElasticsearchClient(
        (list('{0}'.format(s) for s in options.esservers)))
    index = options.index

    with open(options.default_mapping_file, 'r') as mapping_file:
        default_mapping_contents = json.loads(mapping_file.read())

    if not es.index_exists(index):
        try:
            logger.debug('Creating %s index' % index)
            es.create_index(index, default_mapping_contents)
        except Exception as e:
            logger.error("Unhandled exception, terminating: %r" % e)

    auth = HTTPBasicAuth(options.mquser, options.mqpassword)

    for server in options.mqservers:
        logger.debug('checking message queues on {0}'.format(server))
        r = requests.get('http://{0}:{1}/api/queues'.format(
            server, options.mqapiport),
                         auth=auth)

        mq = r.json()
        # setup a log entry for health/status.
        healthlog = dict(utctimestamp=toUTC(datetime.now()).isoformat(),
                         hostname=server,
                         processid=os.getpid(),
                         processname=sys.argv[0],
                         severity='INFO',
                         summary='mozdef health/status',
                         category='mozdef',
                         type='mozdefhealth',
                         source='mozdef',
                         tags=[],
                         details=[])

        healthlog['details'] = dict(username='******')
        healthlog['details']['loadaverage'] = list(os.getloadavg())
        healthlog['details']['queues'] = list()
        healthlog['details']['total_deliver_eps'] = 0
        healthlog['details']['total_publish_eps'] = 0
        healthlog['details']['total_messages_ready'] = 0
        healthlog['tags'] = ['mozdef', 'status']
        for m in mq:
            if 'message_stats' in m and isinstance(m['message_stats'], dict):
                if 'messages_ready' in m:
                    mready = m['messages_ready']
                    healthlog['details']['total_messages_ready'] += m[
                        'messages_ready']
                else:
                    mready = 0
                if 'messages_unacknowledged' in m:
                    munack = m['messages_unacknowledged']
                else:
                    munack = 0
                queueinfo = dict(queue=m['name'],
                                 vhost=m['vhost'],
                                 messages_ready=mready,
                                 messages_unacknowledged=munack)

                if 'deliver_details' in m['message_stats']:
                    queueinfo['deliver_eps'] = round(
                        m['message_stats']['deliver_details']['rate'], 2)
                    healthlog['details']['total_deliver_eps'] += round(
                        m['message_stats']['deliver_details']['rate'], 2)
                if 'deliver_no_ack_details' in m['message_stats']:
                    queueinfo['deliver_eps'] = round(
                        m['message_stats']['deliver_no_ack_details']['rate'],
                        2)
                    healthlog['details']['total_deliver_eps'] += round(
                        m['message_stats']['deliver_no_ack_details']['rate'],
                        2)
                if 'publish_details' in m['message_stats']:
                    queueinfo['publish_eps'] = round(
                        m['message_stats']['publish_details']['rate'], 2)
                    healthlog['details']['total_publish_eps'] += round(
                        m['message_stats']['publish_details']['rate'], 2)
                healthlog['details']['queues'].append(queueinfo)

        # post to elastic search servers directly without going through
        # message queues in case there is an availability issue
        es.save_object(index=index, body=json.dumps(healthlog))
        # post another doc with a static docid and tag
        # for use when querying for the latest status
        healthlog['tags'] = ['mozdef', 'status', 'latest']
        es.save_object(index=index,
                       doc_id=getDocID(server),
                       body=json.dumps(healthlog))