def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {
            '_type': 'netflow_lhcopn'
        }
        if not 'data'in m:
            print(threading.current_thread().name, 'no data in this message!')
            q.task_done()
            continue

        source = m['data']['src_site']
        destination = m['data']['dst_site']
        data['MA'] = 'capc.cern'
        data['srcInterface'] = source
        data['dstInterface'] = destination
        ts = m['data']['timestamp']
        th = m['data']['throughput']
        dati = datetime.utcfromtimestamp(float(ts))
        data['_index'] = "network_weather-" + \
            str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
        data['timestamp'] = int(float(ts) * 1000)
        data['utilization'] = int(th)
        # print(data)
        aLotOfData.append(copy.copy(data))

        q.task_done()
        if len(aLotOfData) > 10:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {
            '_type': 'ps_perf'
        }

        metrics = ['perfSONAR services: ntp', 'perfSONAR esmond freshness', 'OSG datastore freshness',
                   'perfSONAR services: pscheduler']
        if not any(pattern in m['metric'] for pattern in metrics):
            q.task_done()
            continue
        if 'perf_metrics' in m.keys() and not m['perf_metrics']:
            q.task_done()
            continue
        data['host'] = m['host']
        prefix = m['metric'].replace("perfSONAR", "ps").replace(":", "").replace(" ", "_").lower()
        for k in m['perf_metrics'].keys():
            data[prefix+"_"+k] = m['perf_metrics'][k]
        dati = datetime.utcfromtimestamp(float(m['timestamp']))
        data['_index'] = "network_weather-" + str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
        data['timestamp'] = int(float(m['timestamp']) * 1000)
        #print(data)
        aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {'_type': 'doc'}
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de != None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionThroughput(source)
        data['dest_production'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            print(threading.current_thread().name,
                  'no datapoints in this message!')
            q.task_done()
            continue
        su = m['datapoints']
        for ts, th in su.items():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = INDEX_PREFIX + str(dati.year) + "." + str(
                dati.month)  # + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['throughput'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {'_type': 'packet_loss_rate'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de is not None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionLatency(source)
        data['destProduction'] = siteMapping.isProductionLatency(destination)
        if 'datapoints' not in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            continue
        su = m['datapoints']
        # print(su)
        for ts, th in su.iteritems():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + str(dati.year) + "." + str(
                dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['packet_loss'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 500:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'retransmits'
        }
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de != None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionThroughput(source)
        data['destProduction'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints'in m:
            print(threading.current_thread().name,
                  'no datapoints in this message!')
            q.task_done()
            continue
        su = m['datapoints']
        for ts, th in su.iteritems():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + \
                str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['retransmits'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'doc'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'summaries' not in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no summaries found in the message")
            continue
        su = m['summaries']
        for s in su:
            if s['summary_window'] == '60' and s[
                    'summary_type'] == 'statistics':
                results = s['summary_data']
                # print(results)
                for r in results:
                    dati = datetime.utcfromtimestamp(float(r[0]))
                    data['_index'] = INDEX_PREFIX + str(dati.year) + "." + str(
                        dati.month) + "." + str(dati.day)
                    data['timestamp'] = r[0] * 1000
                    data['sim_util'] = r[1]['ml']
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData) > 500:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {
            '_type': 'link_utilization'
        }

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de is not None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionLatency(source)
        data['destProduction'] = siteMapping.isProductionLatency(destination)
        if 'summaries' not in m:
            q.task_done()
            print(threading.current_thread().name, "no summaries found in the message")
            continue
        su = m['summaries']
        for s in su:
            if s['summary_window'] == '60' and s['summary_type'] == 'statistics':
                results = s['summary_data']
                # print(results)
                for r in results:
                    dati = datetime.utcfromtimestamp(float(r[0]))
                    data['_index'] = "network_weather-" + str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
                    data['timestamp'] = r[0] * 1000
                    data['sim_util'] = r[1]['ml']
            #print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData) > 500:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'ps_perf'}

        metrics = [
            'perfSONAR services: ntp', 'perfSONAR esmond freshness',
            'OSG datastore freshness', 'perfSONAR services: pscheduler'
        ]
        if not any(pattern in m['metric'] for pattern in metrics):
            q.task_done()
            continue
        if 'perf_metrics' in m.keys() and not m['perf_metrics']:
            q.task_done()
            continue
        data['host'] = m['host']
        prefix = m['metric'].replace("perfSONAR",
                                     "ps").replace(":",
                                                   "").replace(" ",
                                                               "_").lower()
        for k in m['perf_metrics'].keys():
            data[prefix + "_" + k] = m['perf_metrics'][k]
        dati = datetime.utcfromtimestamp(float(m['timestamp']))
        data['_index'] = "network_weather-" + str(dati.year) + "." + str(
            dati.month) + "." + str(dati.day)
        data['timestamp'] = int(float(m['timestamp']) * 1000)
        #print(data)
        aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
Example #9
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'latency'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de is not None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionLatency(source)
        data['destProduction'] = siteMapping.isProductionLatency(destination)
        su = m['datapoints']
        for ts, th in su.iteritems():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + str(dati.year) + "." + str(
                dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            th_fl = dict((float(k), v) for (k, v) in th.items())

            # mean
            samples = sum([v for k, v in th_fl.items()])
            th_mean = sum(k * v for k, v in th_fl.items()) / samples
            data['delay_mean'] = th_mean
            # std dev
            data['delay_sd'] = math.sqrt(
                sum((k - th_mean)**2 * v for k, v in th_fl.items()) / samples)
            # median
            csum = 0
            ordered_th = [(k, v) for k, v in sorted(th_fl.items())]
            midpoint = samples // 2
            if samples % 2 == 0:  # even number of samples
                for index, entry in enumerate(ordered_th):
                    csum += entry[1]
                    if csum > midpoint + 1:
                        data['delay_median'] = entry[0]
                        break
                    elif csum == midpoint:
                        data['delay_median'] = entry[0] + ordered_th[index +
                                                                     1][0] / 2
                        break
                    elif csum == midpoint + 1 and index == 0:
                        data['delay_median'] = entry[0]
                        break
                    elif csum == midpoint + 1 and index > 0:
                        data['delay_median'] = entry[0] + ordered_th[index -
                                                                     1][0] / 2
                        break
            else:  # odd number of samples
                for index, entry in enumerate(ordered_th):
                    csum += entry[1]
                    if csum >= midpoint + 1:
                        data['delay_median'] = entry[0]
                        break
            aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData) > 500:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
Example #10
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'doc'}

        dati = datetime.utcfromtimestamp(float(m['timestamp']))
        data['_index'] = INDEX_PREFIX + str(dati.year) + "." + str(
            dati.month)  # + "." + str(dati.day)
        data.update(m)
        data.pop('interfaces', None)
        data['timestamp'] = int(float(m['timestamp']) * 1000)
        data['host'] = data.get('external_address', {}).get('dns_name')

        if "services" in data:
            sers = copy.deepcopy(data["services"])
            data["services"] = {}
            for s in sers:
                if "name" in s:
                    service_name = s["name"]
                    del s["name"]
                    tps = {}
                    if "testing_ports" in s:
                        for tp in s["testing_ports"]:
                            if 'type' not in tp:
                                continue
                            tps[tp['type']] = {
                                "min_port": tp["min_port"],
                                "max_port": tp["max_port"]
                            }
                        s['testing_ports'] = tps
                    data["services"][service_name] = s
                else:
                    continue

        clean(data)

        if 'location' in data.keys():
            lat = data['location'].get('latitude', 0)
            lgt = data['location'].get('longitude', 0)
            if lat and lgt:
                data['geolocation'] = "%s,%s" % (lat, lgt)
            del data['location']

        if 'ntp' in data.keys():
            n = data['ntp']
            convert_to_float(n, ['delay', 'dispersion', 'offset'])
            convert_to_int(
                n, ['synchronized', 'stratum', 'reach', 'polling_interval'])

        if 'external_address' in data.keys():
            ea = data['external_address']
            if 'counters' in ea.keys():
                convert_to_int(ea['counters'], ea['counters'].keys())

        convert_to_int(data, ['cpu_cores', 'cpus'])
        convert_to_float(data, ['cpu_speed'])

        # print('-----------')
        # print(data)

        aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 10:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
            else:
                print(aLotOfData)
Example #11
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'traceroute'
        }
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de != None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionThroughput(source)
        data['destProduction'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            continue
        dp = m['datapoints']
        # print(su)
        for ts in dp:
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + \
                str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['hops'] = []
            data['rtts'] = []
            data['ttls'] = []
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop:
                    continue
                nq = int(hop['query'])
                if nq != 1:
                    continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt'] != None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)
                # print(data)
            hs = ''
            for h in data['hops']:
                if h == None:
                    hs += "None"
                else:
                    hs += h
            data['n_hops'] = len(data['hops'])
            if len(data['rtts']):
                data['max_rtt'] = max(data['rtts'])
            data['hash'] = hash(hs)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
Example #12
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        m = q.get()
        # print(m)
        dati = datetime.utcfromtimestamp(float(m['tr_timestamp_start']) / 1000)
        data = {
            '_type': 'docs',
            '_id': m['tr_id'],
            '_index': 'fts-' + str(dati.year) + "-" + str(dati.month).zfill(2) + "-" + str(dati.day).zfill(2),
            'endpnt': m['endpnt'],
            'vo': m['vo'],
            "src_hostname":  m['src_hostname'],
            "dst_hostname":  m['dst_hostname'],
            "f_size":  m['f_size'],
            "retry": m['retry'],
            "processing_start": m['timestamp_tr_st'],
            "processing_stop": m['timestamp_tr_comp'],
            "transfer_start": m['tr_timestamp_start'],
            "transfer_stop": m['tr_timestamp_complete'],
            "final_transfer_state": m['t_final_transfer_state']
        }
        if m['timestamp_chk_src_st'] > 0:
            data['timestamp_chk_src_st'] = m['timestamp_chk_src_st']
            data['timestamp_chk_src_ended'] = m['timestamp_chk_src_ended']

        if m['timestamp_checksum_dest_st'] > 0:
            data['timestamp_chk_dst_st'] = m['timestamp_checksum_dest_st']
            data['timestamp_chk_dst_ended'] = m['timestamp_checksum_dest_ended']

        if m['t_error_code']:
            data['error_code'] = m['t_error_code']

        if m['t_failure_phase'] and m['t_failure_phase'] != '':
            data['failure_phase'] = m['t_failure_phase']

        if m['tr_error_category'] and m['tr_error_category'] != '':
            data['error_category'] = m['tr_error_category']

        if m['t__error_message'] and m['t__error_message'] != '':
            data['error_message'] = m['t__error_message']

        if 'file_metadata' in m and m['file_metadata'] != None and not isinstance(m['file_metadata'], int):
            md = m['file_metadata']
            data['metadata'] = {}

            if 'name' in md and md['name'] != None:
                data['metadata']['name'] = md['name']

            if 'scope' in md and md['scope'] != None:
                data['metadata']['scope'] = md['scope']

            if 'src_type' in md and md['src_type'] != None:
                data['metadata']['src_type'] = md['src_type']
            if 'dst_type' in md and md['dst_type'] != None:
                data['metadata']['dst_type'] = md['dst_type']
            if 'src_rse' in md and md['src_rse'] != None:
                data['metadata']['src_rse'] = md['src_rse']
                so = siteMapping.get_site_from_ddm(md['src_rse'])
                if so is not None:
                    data['metadata']['src_site'] = so
            if 'dst_rse' in md and md['dst_rse'] != None:
                data['metadata']['dst_rse'] = md['dst_rse']
                de = siteMapping.get_site_from_ddm(md['dst_rse'])
                if de is not None:
                    data['metadata']['dst_site'] = de
            if 'request_id' in md:
                data['metadata']['request_id'] = md['request_id']
            if 'activity' in md:
                data['metadata']['activity'] = md['activity']
        aLotOfData.append(copy.copy(data))

        q.task_done()

        if len(aLotOfData) > 500:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'meta'}

        dati = datetime.utcfromtimestamp(float(m['timestamp']))
        data['_index'] = "network_weather-" + str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
        data.update(m)
        data.pop('interfaces', None)
        data['timestamp'] = int(float(m['timestamp']) * 1000)
        data['host'] = data.get('external_address', {}).get('dns_name')

        if "services" in data:
            sers = copy.deepcopy(data["services"])
            data["services"] = {}
            for s in sers:
                if "name" in s:
                    service_name = s["name"]
                    del s["name"]
                    tps = {}
                    if "testing_ports" in s:
                        for tp in s["testing_ports"]:
                            if 'type' not in tp: continue
                            tps[tp['type']] = {"min_port": tp["min_port"], "max_port": tp["max_port"]}
                        s['testing_ports'] = tps
                    data["services"][service_name] = s
                else:
                    continue

        clean(data)

        if 'location' in data.keys():
            lat = data['location'].get('latitude', 0)
            lgt = data['location'].get('longitude', 0)
            if lat and lgt:
                data['geolocation'] = "%s,%s" % (lat, lgt)
            del data['location']

        if 'ntp' in data.keys():
            n = data['ntp']
            convert_to_float(n, ['delay', 'dispersion', 'offset'])
            convert_to_int(n, ['synchronized', 'stratum', 'reach', 'polling_interval'])

        if 'external_address' in data.keys():
            ea = data['external_address']
            if 'counters' in ea.keys():
                convert_to_int(ea['counters'], ea['counters'].keys())

        convert_to_int(data, ['cpu_cores', 'cpus'])
        convert_to_float(data, ['cpu_speed'])

        # print('-----------')
        # print(data)

        aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 10:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
            else:
                print(aLotOfData)
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'traceroute'
        }
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de != None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionThroughput(source)
        data['destProduction'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            continue
        dp = m['datapoints']
        # print(su)
        for ts in dp:
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + \
                str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['hops'] = []
            data['rtts'] = []
            data['ttls'] = []
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop:
                    continue
                nq = int(hop['query'])
                if nq != 1:
                    continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt'] != None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)
                # print(data)
            hs = ''
            for h in data['hops']:
                if h == None:
                    hs += "None"
                else:
                    hs += h
            data['n_hops'] = len(data['hops'])
            data['max_rtt'] = max(data['rtts'])
            data['hash'] = hash(hs)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []