def custom_api_payload(plugin_hostname, data_validity): ''' Build a custom Payload for ElasticSearch here: HTTP Request Body for getting Disk space values for a specified hostname ''' try: # ElasticSearch Custom Variables: beat_name = plugin_hostname field_name = "system.filesystem.device_name" metricset_module = "system" metricset_name = "filesystem" # Get Data Validity Epoch Timestamp: newest_valid_timestamp, oldest_valid_timestamp = get_data_validity_range(data_validity) # Build the generic part of the API Resquest Body: generic_payload = generic_api_payload(100) custom_payload = {} custom_payload.update(generic_payload) # Add the Query structure with ElasticSearch Variables: custom_payload.update({"query": {"bool": {"must": [], "filter": [], "should": [], "must_not": []}}}) custom_payload["query"]["bool"]["must"].append({"match_all": {}}) custom_payload["query"]["bool"]["must"].append({"exists": {"field": ""+field_name+""}}) custom_payload["query"]["bool"]["must"].append( {"match_phrase": {"event.module": {"query": ""+metricset_module+""}}} ) custom_payload["query"]["bool"]["must"].append( {"match_phrase": {"metricset.name": {"query": ""+metricset_name+""}}} ) custom_payload["query"]["bool"]["must"].append( {"match_phrase": {"host.name": {"query": ""+beat_name+""}}} ) custom_payload["query"]["bool"]["must"].append( {"range": {"@timestamp": { "gte": ""+str(oldest_valid_timestamp)+"", "lte": ""+str(newest_valid_timestamp)+"", "format": "epoch_millis" }}} ) return custom_payload except Exception as e: print("Error calling \"custom_api_payload\"... Exception {}".format(e)) sys.exit(3)
def custom_api_payload(hostname, windows_service, data_validity): try: # ElasticSearch Custom Variables: beat_name = hostname field_name = "windows.service.name" event_module = "windows" metricset_name = "service" # Get Data Validity Epoch Timestamp: newest_valid_timestamp, oldest_valid_timestamp = get_data_validity_range( data_validity) # Build the generic part of the API Resquest Body: generic_payload = generic_api_payload(1) custom_payload = {} custom_payload.update(generic_payload) # Add the Query structure with ElasticSearch Variables: custom_payload.update({ "query": { "bool": { "must": [], "filter": [], "should": [], "must_not": [] } } }) custom_payload["query"]["bool"]["must"].append({"match_all": {}}) custom_payload["query"]["bool"]["must"].append({ "match_phrase": { "" + field_name + "": { "query": "" + windows_service + "" } } }) custom_payload["query"]["bool"]["must"].append({ "match_phrase": { "event.module": { "query": "" + event_module + "" } } }) custom_payload["query"]["bool"]["must"].append({ "match_phrase": { "metricset.name": { "query": "" + metricset_name + "" } } }) custom_payload["query"]["bool"]["must"].append( {"match_phrase": { "host.name": { "query": "" + beat_name + "" } }}) custom_payload["query"]["bool"]["must"].append({ "range": { "@timestamp": { "gte": "" + str(oldest_valid_timestamp) + "", "lte": "" + str(newest_valid_timestamp) + "", "format": "epoch_millis" } } }) return custom_payload except Exception as e: print("Error calling \"custom_api_payload\"... Exception {}".format(e)) sys.exit(3)
def custom_api_payload(hostname, data_validity): ''' Build a custom Payload for ElasticSearch here: HTTP Request Body for getting Disk space values for a specified hostname ''' try: # ElasticSearch Custom Variables: metricset_module = "system" metricset_name = "network" # Get Data Validity Epoch Timestamp: newest_valid_timestamp, oldest_valid_timestamp = get_data_validity_range( data_validity) # Build the generic part of the API Resquest Body: custom_payload = generic_api_payload(40) # Add the Query structure with ElasticSearch Variables: custom_payload.update({ 'query': { 'bool': { 'must': [], 'filter': [], 'should': [], 'must_not': [] } } }) custom_payload['query']['bool']['must'].append({'match_all': {}}) custom_payload['query']['bool']['must'].append( {'match_phrase': { 'event.module': { 'query': metricset_module } }}) custom_payload['query']['bool']['must'].append( {'match_phrase': { 'metricset.name': { 'query': metricset_name } }}) custom_payload['query']['bool']['must'].append( {'match_phrase': { 'host.name': { 'query': hostname } }}) custom_payload['query']['bool']['must'].append( {'exists': { 'field': 'system.network' }}) custom_payload['query']['bool']['must_not'].append( {'match': { 'system.network.name': { 'query': 'lo' } }}) custom_payload['query']['bool']['must'].append({ 'range': { '@timestamp': { 'gte': str(oldest_valid_timestamp), 'lte': str(newest_valid_timestamp), 'format': 'epoch_millis' } } }) return custom_payload except Exception as e: print("Error calling \"custom_api_payload\"... Exception {}".format(e)) sys.exit(3)
def custom_api_payload_get_process(hostname, process_name, data_validity, timestamp): try: if timestamp != "TBD": # ElasticSearch Custom Variables: beat_name = hostname field_name = "process.name" event_module = "system" metricset_name = "process" # Get Data Validity Epoch Timestamp: newest_valid_timestamp, oldest_valid_timestamp = get_data_validity_range( data_validity) # Build the generic part of the API Resquest Body: generic_payload = generic_api_payload(50) payload_get_process = {} payload_get_process.update(generic_payload) # Add the Query structure with ElasticSearch Variables: payload_get_process.update({ "query": { "bool": { "must": [], "filter": [], "should": [], "must_not": [] } } }) payload_get_process["query"]["bool"]["must"].append( {"match_all": {}}) payload_get_process["query"]["bool"]["must"].append({ "match_phrase": { "" + field_name + "": { "query": "" + process_name + "" } } }) payload_get_process["query"]["bool"]["must"].append({ "match_phrase": { "@timestamp": { "query": "" + timestamp + "" } } }) payload_get_process["query"]["bool"]["must"].append({ "match_phrase": { "event.module": { "query": "" + event_module + "" } } }) payload_get_process["query"]["bool"]["must"].append({ "match_phrase": { "metricset.name": { "query": "" + metricset_name + "" } } }) payload_get_process["query"]["bool"]["must"].append({ "match_phrase": { "host.name": { "query": "" + beat_name + "" } } }) payload_get_process["query"]["bool"]["must"].append({ "range": { "@timestamp": { "gte": "" + str(oldest_valid_timestamp) + "", "lte": "" + str(newest_valid_timestamp) + "", "format": "epoch_millis" } } }) else: payload_get_process = "No_Payload" print("No Event found for Process \"{}\".format(process_name)") return payload_get_process except Exception as e: print("Error calling \"custom_api_payload\"... Exception {}".format(e)) sys.exit(3)
def get_interfaces(elastichost, hostname, data_validity, verbose): es = Elasticsearch() # ElasticSearch Custom Variables: metricset_module = "system" metricset_name = "network" # Get Data Validity Epoch Timestamp: newest_valid_timestamp, oldest_valid_timestamp = get_data_validity_range(data_validity) esquery = { 'query': { 'bool': { 'must': [ {'match_all': {}}, {'match_phrase': {'event.module': {'query': metricset_module}}}, {'match_phrase': {'metricset.name': {'query': metricset_name}}}, {'match_phrase': {'host.name': {'query': hostname}}}, {'exists': {'field': 'system.network'}}, {'range': {'@timestamp': { 'gte': str(oldest_valid_timestamp), 'lte': str(newest_valid_timestamp), 'format': 'epoch_millis' }}}, ], 'should': [ ], 'must_not': [ {'match': {'system.network.name': {'query': 'lo'}}}, ], 'filter': [ # {'term': {'hostname': hostname}}, # {'term': {'_type': '_doc'}}, # {'range': {'timestamp': {'gte': str(oldest_valid_timestamp), 'format': 'epoch_millis'}}} ], }, }, 'sort': [ {'timestamp': {'order': 'desc', 'unmapped_type': 'boolean'}}, ], '_source': {"excludes":[]}, 'size': '40', 'version': 'true' } try: # Get prerequisites for ElasticSearch API: # addr, header = generic_api_call(elastichost) # payload = custom_api_payload(hostname,data_validity) # Request the ElasticSearch API: # results = requests.get(url=addr, headers=header, json=payload, verify=False) # results_json = results.json() now = datetime.utcnow() elindexname = 'metricbeat' curindex = "{}-*-{:04d}-{:02d}-{:02d}".format(elindexname, now.year, now.month, now.day) print(curindex) results_json = es.search(index='metricbeat*', body=esquery) if verbose: pp = pprint.PrettyPrinter(indent=4) print("### VERBOSE MODE - API REST HTTP RESPONSE: ##########################################") print("### request payload:") pp.pprint(esquery) print("### JSON output:") print(results_json) print("####################################################################################") if not bool(results_json['timed_out']) and int(results_json["hits"]["total"]['value']) > 0: niclst = [] # get a list of returned fs, then keep only latest item of each mountpoint allfslist = [ i['_source'] for i in results_json['hits']['hits'] ] for nicname in set([ i['system']['network']['name'] for i in allfslist ]): item = max([ i for i in allfslist if i['system']['network']['name'] == nicname ], key=lambda timestamp: timestamp['@timestamp']) niclst.append(item['system']['network']) def sort_list(element): return len(element['name']) return sorted(niclst, key=sort_list) else: # no fs returned return False except Exception as e: print("Error calling \"get_interfaces\"... Exception {}".format(e)) sys.exit(3)