コード例 #1
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {'_type': 'doc'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'datapoints' not in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            continue
        su = m['datapoints']
        # print(su)
        for ts, th in su.items():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = INDEX_PREFIX + str(dati.year) + "." + str(
                dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['packet_loss'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 500:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
コード例 #2
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'doc'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'summaries' not in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no summaries found in the message")
            continue
        su = m['summaries']
        for s in su:
            if s['summary_window'] == '60' and s[
                    'summary_type'] == 'statistics':
                results = s['summary_data']
                # print(results)
                for r in results:
                    dati = datetime.utcfromtimestamp(float(r[0]))
                    data['_index'] = INDEX_PREFIX + str(dati.year) + "." + str(
                        dati.month) + "." + str(dati.day)
                    data['timestamp'] = r[0] * 1000
                    data['sim_util'] = r[1]['ml']
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData) > 500:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
コード例 #3
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'packet_loss_rate'
        }

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de is not None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionLatency(source)
        data['destProduction'] = siteMapping.isProductionLatency(destination)
        if 'datapoints' not in m:
            q.task_done()
            print(threading.current_thread().name, "no datapoints found in the message")
            continue
        su = m['datapoints']
        # print(su)
        for ts, th in su.iteritems():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['packet_loss'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 500:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {
            '_type': 'link_utilization'
        }

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de is not None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionLatency(source)
        data['destProduction'] = siteMapping.isProductionLatency(destination)
        if 'summaries' not in m:
            q.task_done()
            print(threading.current_thread().name, "no summaries found in the message")
            continue
        su = m['summaries']
        for s in su:
            if s['summary_window'] == '60' and s['summary_type'] == 'statistics':
                results = s['summary_data']
                # print(results)
                for r in results:
                    dati = datetime.utcfromtimestamp(float(r[0]))
                    data['_index'] = "network_weather-" + str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
                    data['timestamp'] = r[0] * 1000
                    data['sim_util'] = r[1]['ml']
            #print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData) > 500:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
コード例 #5
0
    def eventCreator(self, message):
        m = json.loads(message)

        data = {}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'datapoints' not in m:
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            return
        su = m['datapoints']
        # print(su)
        for ts, th in su.items():
            data['_index'] = self.INDEX
            data['timestamp'] = int(float(ts) * 1000)
            sha1_hash = hashlib.sha1()
            sha1_hash.update(m['meta']['org_metadata_key'].encode())
            sha1_hash.update(str(data['timestamp']).encode())
            data['_id'] = sha1_hash.hexdigest()
            data['packet_loss'] = th
            # print(data)
            self.aLotOfData.append(copy.copy(data))
コード例 #6
0
    def eventCreator(self, message):
        m = json.loads(message)

        data = {'_type': 'doc'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'datapoints' not in m:
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            return
        su = m['datapoints']
        # print(su)
        for ts, th in su.items():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = self.es_index_prefix + self.INDEX_PREFIX + str(
                dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['_id'] = hash(
                (m['meta']['org_metadata_key'], data['timestamp']))
            data['packet_loss'] = th
            # print(data)
            self.aLotOfData.append(copy.copy(data))
コード例 #7
0
    def eventCreator(self, message):

        m = json.loads(message)
        data = {'_type': 'doc'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'summaries' not in m:
            print(threading.current_thread().name,
                  "no summaries found in the message")
            return
        su = m['summaries']
        for s in su:
            if s['summary_window'] == '60' and s[
                    'summary_type'] == 'statistics':
                results = s['summary_data']
                # print(results)
                for r in results:
                    dati = datetime.utcfromtimestamp(float(r[0]))
                    data['_index'] = self.es_index_prefix + INDEX_PREFIX + str(
                        dati.year) + "." + str(dati.month) + "." + str(
                            dati.day)
                    data['timestamp'] = r[0] * 1000
                    data['_id'] = hash(
                        (m['meta']['org_metadata_key'], data['timestamp']))
                    data['sim_util'] = r[1]['ml']
            # print(data)
            self.aLotOfData.append(copy.copy(data))
コード例 #8
0
    def eventCreator(self, message):

        m = json.loads(message)
        data = {}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        if 'summaries' not in m:
            print(threading.current_thread().name,
                  "no summaries found in the message")
            return
        su = m['summaries']
        for s in su:
            if s['summary_window'] == '60' and s[
                    'summary_type'] == 'statistics':
                results = s['summary_data']
                # print(results)
                for r in results:
                    data['_index'] = self.INDEX
                    data['timestamp'] = r[0] * 1000
                    sha1_hash = hashlib.sha1()
                    sha1_hash.update(m['meta']['org_metadata_key'].encode())
                    sha1_hash.update(str(data['timestamp']).encode())
                    data['_id'] = sha1_hash.hexdigest()
                    data['sim_util'] = r[1]['ml']
            # print(data)
            self.aLotOfData.append(copy.copy(data))
コード例 #9
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)
        data = {'_type': 'latency'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de is not None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionLatency(source)
        data['destProduction'] = siteMapping.isProductionLatency(destination)
        su = m['datapoints']
        for ts, th in su.iteritems():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + str(dati.year) + "." + str(
                dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            th_fl = dict((float(k), v) for (k, v) in th.items())

            # mean
            samples = sum([v for k, v in th_fl.items()])
            th_mean = sum(k * v for k, v in th_fl.items()) / samples
            data['delay_mean'] = th_mean
            # std dev
            data['delay_sd'] = math.sqrt(
                sum((k - th_mean)**2 * v for k, v in th_fl.items()) / samples)
            # median
            csum = 0
            ordered_th = [(k, v) for k, v in sorted(th_fl.items())]
            midpoint = samples // 2
            if samples % 2 == 0:  # even number of samples
                for index, entry in enumerate(ordered_th):
                    csum += entry[1]
                    if csum > midpoint + 1:
                        data['delay_median'] = entry[0]
                        break
                    elif csum == midpoint:
                        data['delay_median'] = entry[0] + ordered_th[index +
                                                                     1][0] / 2
                        break
                    elif csum == midpoint + 1 and index == 0:
                        data['delay_median'] = entry[0]
                        break
                    elif csum == midpoint + 1 and index > 0:
                        data['delay_median'] = entry[0] + ordered_th[index -
                                                                     1][0] / 2
                        break
            else:  # odd number of samples
                for index, entry in enumerate(ordered_th):
                    csum += entry[1]
                    if csum >= midpoint + 1:
                        data['delay_median'] = entry[0]
                        break
            aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData) > 500:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
コード例 #10
0
    def eventCreator(self, message):

        m = json.loads(message)
        data = {'_type': 'doc'}

        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so is not None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de is not None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionLatency(source)
        data['dest_production'] = siteMapping.isProductionLatency(destination)
        su = m['datapoints']
        for ts, th in su.items():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = self.es_index_prefix + self.INDEX_PREFIX + str(
                dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['_id'] = hash((m['meta']['org_metadata_key'], ts))

            th_fl = dict((float(k), v) for (k, v) in th.items())

            # mean
            samples = sum([v for k, v in th_fl.items()])
            th_mean = sum(k * v for k, v in th_fl.items()) / samples
            data['delay_mean'] = th_mean
            # std dev
            data['delay_sd'] = math.sqrt(
                sum((k - th_mean)**2 * v for k, v in th_fl.items()) / samples)
            # median
            csum = 0
            ordered_th = [(k, v) for k, v in sorted(th_fl.items())]
            midpoint = samples // 2
            if samples % 2 == 0:  # even number of samples
                for index, entry in enumerate(ordered_th):
                    csum += entry[1]
                    if csum > midpoint + 1:
                        data['delay_median'] = entry[0]
                        break
                    elif csum == midpoint:
                        data['delay_median'] = entry[0] + ordered_th[index +
                                                                     1][0] / 2
                        break
                    elif csum == midpoint + 1 and index == 0:
                        data['delay_median'] = entry[0]
                        break
                    elif csum == midpoint + 1 and index > 0:
                        data['delay_median'] = entry[0] + ordered_th[index -
                                                                     1][0] / 2
                        break
            else:  # odd number of samples
                for index, entry in enumerate(ordered_th):
                    csum += entry[1]
                    if csum >= midpoint + 1:
                        data['delay_median'] = entry[0]
                        break
            self.aLotOfData.append(copy.copy(data))
コード例 #11
0
def eventCreator():
    aLotOfData=[]
    tries=0
    while(True):
        d=q.get()
        m=json.loads(d)
        
        data = {
            '_type': 'latency'
        }
        
        source=m['meta']['source']
        destination=m['meta']['destination']
        data['MA']=m['meta']['measurement_agent']
        data['src']=source
        data['dest']=destination
        so=siteMapping.getPS(source)
        de=siteMapping.getPS(destination)
        if so!= None:
            data['srcSite']=so[0]
            data['srcVO']=so[1]
        if de!= None:
            data['destSite']=de[0]
            data['destVO']=de[1]
        data['srcProduction']=siteMapping.isProductionLatency(source)
        data['destProduction']=siteMapping.isProductionLatency(destination)
        if not 'summaries'in m: 
            q.task_done()
            print(threading.current_thread().name, "no summaries found in the message")
            continue
        su=m['summaries']
        for s in su:
            if s['summary_window']=='300' and s['summary_type']=='statistics':
                results=s['summary_data']
                #print(results)
                for r in results:
                    dati=datetime.utcfromtimestamp(float(r[0]))
                    data['_index']="network_weather_2-"+str(dati.year)+"."+str(dati.month)+"."+str(dati.day)
                    data['timestamp']=r[0]*1000
                    data['delay_mean']=r[1]['mean']
                    data['delay_median']=r[1]['median']
                    data['delay_sd']=r[1]['standard-deviation']
                    #print(data)
                    aLotOfData.append(copy.copy(data))
        q.task_done()
        if len(aLotOfData)>500:
            reconnect=True
            # print('writing out data...')
            try:
                res = helpers.bulk(es, aLotOfData, raise_on_exception=True,request_timeout=60)
                print(threading.current_thread().name, "\t inserted:",res[0], '\tErrors:',res[1])
                aLotOfData=[]
                reconnect=False
            except es_exceptions.ConnectionError as e:
                print('ConnectionError ', e)
            except es_exceptions.TransportError as e:
                print('TransportError ', e)
            except helpers.BulkIndexError as e:
                print(e[0])
                # for i in e[1]:
                    # print(i)
            except:
                print('Something seriously wrong happened.')
            if reconnect: es = GetESConnection()