def eventCreator():
    aLotOfData=[]
    tries=0
    while(True):
        d=q.get()
        m=json.loads(d)
        
        data = {
            '_type': 'throughput'
        }
        # print(m)        
        source=m['meta']['source']
        destination=m['meta']['destination']
        data['MA']=m['meta']['measurement_agent']
        data['src']=source
        data['dest']=destination
        so=siteMapping.getPS(source)
        de=siteMapping.getPS(destination)
        if so!= None:
            data['srcSite']=so[0]
            data['srcVO']=so[1]
        if de!= None:
            data['destSite']=de[0]
            data['destVO']=de[1]
        data['srcProduction']=siteMapping.isProductionThroughput(source)
        data['destProduction']=siteMapping.isProductionThroughput(destination)
        if not 'datapoints'in m:
            print(threading.current_thread().name, 'no datapoints in this message!')
            q.task_done()
            continue
        su=m['datapoints']
        for ts, th in su.iteritems():
            dati=datetime.utcfromtimestamp(float(ts))
            data['_index']="network_weather_2-"+str(dati.year)+"."+str(dati.month)+"."+str(dati.day)
            data['timestamp']=int(float(ts)*1000)
            data['throughput']=th
            #print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

            
        if len(aLotOfData)>100:
            reconnect=True 
            try:
                res = helpers.bulk(es, aLotOfData, raise_on_exception=False,request_timeout=60)
                print (threading.current_thread().name, "\t inserted:",res[0], '\tErrors:',res[1])
                aLotOfData=[]
                reconnect=False
            except es_exceptions.ConnectionError as e:
                print('ConnectionError ', e)
            except es_exceptions.TransportError as e:
                print('TransportError ', e)
            except helpers.BulkIndexError as e:
                print(e[0])
                print(e[1][0])
                #for i in e[1]:
                #    print(i) 
            except:
                print('Something seriously wrong happened. ') 
            if reconnect: es = GetESConnection()
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {'_type': 'doc'}
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de != None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionThroughput(source)
        data['dest_production'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            print(threading.current_thread().name,
                  'no datapoints in this message!')
            q.task_done()
            continue
        su = m['datapoints']
        for ts, th in su.items():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = INDEX_PREFIX + str(dati.year) + "." + str(
                dati.month)  # + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['throughput'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(
                aLotOfData,
                es_conn=es_conn,
                thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'retransmits'
        }
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de != None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionThroughput(source)
        data['destProduction'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints'in m:
            print(threading.current_thread().name,
                  'no datapoints in this message!')
            q.task_done()
            continue
        su = m['datapoints']
        for ts, th in su.iteritems():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + \
                str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['retransmits'] = th
            # print(data)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
    def eventCreator(self, message):
        """

        """
        m = json.loads(message)

        data = {}

        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de != None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionThroughput(source)
        data['dest_production'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            print(threading.current_thread().name,
                  'no datapoints in this message!')
            return

        su = m['datapoints']
        for ts, th in su.items():
            data['_index'] = self.INDEX
            data['timestamp'] = int(float(ts) * 1000)
            sha1_hash = hashlib.sha1()
            sha1_hash.update(m['meta']['org_metadata_key'].encode())
            sha1_hash.update(str(data['timestamp']).encode())
            data['_id'] = sha1_hash.hexdigest()
            data['throughput'] = th
            # print(data)
            self.aLotOfData.append(copy.copy(data))
    def eventCreator(self, message):
        """

        """
        m = json.loads(message)

        data = {'_type': 'doc'}
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de != None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionThroughput(source)
        data['dest_production'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            print(threading.current_thread().name,
                  'no datapoints in this message!')
            return

        su = m['datapoints']
        for ts, th in su.items():
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = self.es_index_prefix + self.INDEX_PREFIX + str(
                dati.year) + "." + str(dati.month)  # + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['_id'] = hash(
                (m['meta']['org_metadata_key'], data['timestamp']))
            data['throughput'] = th
            # print(data)
            self.aLotOfData.append(copy.copy(data))
    def eventCreator(self, message):

        m = json.loads(message)

        data = {'_type': 'doc'}
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de != None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionThroughput(source)
        data['dest_production'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            return
        dp = m['datapoints']
        # print(su)
        for ts in dp:
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = self.es_index_prefix + self.INDEX_PREFIX + str(
                dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['_id'] = hash(
                (m['meta']['org_metadata_key'], data['timestamp']))
            data['hops'] = []
            data['rtts'] = []
            data['ttls'] = []
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop:
                    continue
                nq = int(hop['query'])
                if nq != 1:
                    continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt'] != None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)
                # print(data)
            hs = ''
            for h in data['hops']:
                if h == None:
                    hs += "None"
                else:
                    hs += h
            data['n_hops'] = len(data['hops'])
            if len(data['rtts']):
                data['max_rtt'] = max(data['rtts'])
            data['hash'] = hash(hs)
            self.aLotOfData.append(copy.copy(data))
Exemple #7
0
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'traceroute'
        }
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de != None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionThroughput(source)
        data['destProduction'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            continue
        dp = m['datapoints']
        # print(su)
        for ts in dp:
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + \
                str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['hops'] = []
            data['rtts'] = []
            data['ttls'] = []
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop:
                    continue
                nq = int(hop['query'])
                if nq != 1:
                    continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt'] != None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)
                # print(data)
            hs = ''
            for h in data['hops']:
                if h == None:
                    hs += "None"
                else:
                    hs += h
            data['n_hops'] = len(data['hops'])
            if len(data['rtts']):
                data['max_rtt'] = max(data['rtts'])
            data['hash'] = hash(hs)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
def eventCreator():
    tries=0
    aLotOfData=[]
    while(True):
        d=q.get()
        m=json.loads(d)
        
        data = {
            '_type': 'traceroute'
        }
        # print(m)
        source=m['meta']['source']
        destination=m['meta']['destination']
        data['MA']=m['meta']['measurement_agent']
        data['src']=source
        data['dest']=destination
        so=siteMapping.getPS(source)
        de=siteMapping.getPS(destination)
        if so!= None:
            data['srcSite']=so[0]
            data['srcVO']=so[1]
        if de!= None:
            data['destSite']=de[0]
            data['destVO']=de[1]
        data['srcProduction']=siteMapping.isProductionThroughput(source)
        data['destProduction']=siteMapping.isProductionThroughput(destination)
        if not 'datapoints' in m: 
            q.task_done()
            print(threading.current_thread().name, "no datapoints found in the message")
            continue
        dp=m['datapoints']
        # print(su)
        for ts in dp:
            dati=datetime.utcfromtimestamp(float(ts))
            data['_index']="network_weather_2-"+str(dati.year)+"."+str(dati.month)+"."+str(dati.day)
            data['timestamp']=int(float(ts)*1000)
            data['hops']=[]
            data['rtts']=[]
            data['ttls']=[]
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop : continue
                nq=int(hop['query'])
                if nq!=1: continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt']!=None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)    
                # print(data)
            hs=''
            for h in data['hops']:
                if h==None: 
					hs+="None"
                else:
					hs+=h
            data['hash']=hash(hs)
            aLotOfData.append(copy.copy(data))
        q.task_done()
        
            
        if len(aLotOfData)>100:
            reconnect=True
            try:
                res = helpers.bulk(es, aLotOfData, raise_on_exception=False,request_timeout=60)
                print(threading.current_thread().name, "\t inserted:",res[0], '\tErrors:',res[1])
                aLotOfData=[]
                reconnect=False
            except es_exceptions.ConnectionError as e:
                print('ConnectionError ', e)
            except es_exceptions.TransportError as e:
                print('TransportError ', e)
            except helpers.BulkIndexError as e:
                print(e[0])
                print(e[1][0])
                # for i in e[1]:
                    # print(i)
            except:
                print('Something seriously wrong happened.')
            if reconnect: es = GetESConnection()
def eventCreator():
    aLotOfData = []
    es_conn = tools.get_es_connection()
    while True:
        d = q.get()
        m = json.loads(d)

        data = {
            '_type': 'traceroute'
        }
        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['srcSite'] = so[0]
            data['srcVO'] = so[1]
        if de != None:
            data['destSite'] = de[0]
            data['destVO'] = de[1]
        data['srcProduction'] = siteMapping.isProductionThroughput(source)
        data['destProduction'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            q.task_done()
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            continue
        dp = m['datapoints']
        # print(su)
        for ts in dp:
            dati = datetime.utcfromtimestamp(float(ts))
            data['_index'] = "network_weather-" + \
                str(dati.year) + "." + str(dati.month) + "." + str(dati.day)
            data['timestamp'] = int(float(ts) * 1000)
            data['hops'] = []
            data['rtts'] = []
            data['ttls'] = []
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop:
                    continue
                nq = int(hop['query'])
                if nq != 1:
                    continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt'] != None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)
                # print(data)
            hs = ''
            for h in data['hops']:
                if h == None:
                    hs += "None"
                else:
                    hs += h
            data['n_hops'] = len(data['hops'])
            data['max_rtt'] = max(data['rtts'])
            data['hash'] = hash(hs)
            aLotOfData.append(copy.copy(data))
        q.task_done()

        if len(aLotOfData) > 100:
            succ = tools.bulk_index(aLotOfData, es_conn=es_conn, thread_name=threading.current_thread().name)
            if succ is True:
                aLotOfData = []
Exemple #10
0
    def eventCreator(self, message):

        m = json.loads(message)

        data = {}

        # print(m)
        source = m['meta']['source']
        destination = m['meta']['destination']
        data['MA'] = m['meta']['measurement_agent']
        data['src'] = source
        data['dest'] = destination
        data['src_host'] = m['meta']['input_source']
        data['dest_host'] = m['meta']['input_destination']
        data['ipv6'] = False
        if ':' in source or ':' in destination:
            data['ipv6'] = True
        so = siteMapping.getPS(source)
        de = siteMapping.getPS(destination)
        if so != None:
            data['src_site'] = so[0]
            data['src_VO'] = so[1]
        if de != None:
            data['dest_site'] = de[0]
            data['dest_VO'] = de[1]
        data['src_production'] = siteMapping.isProductionThroughput(source)
        data['dest_production'] = siteMapping.isProductionThroughput(
            destination)
        if not 'datapoints' in m:
            print(threading.current_thread().name,
                  "no datapoints found in the message")
            return
        dp = m['datapoints']
        # print(su)
        for ts in dp:
            data['_index'] = self.INDEX
            data['timestamp'] = int(float(ts) * 1000)
            sha1_hash = hashlib.sha1()
            sha1_hash.update(m['meta']['org_metadata_key'].encode())
            sha1_hash.update(str(data['timestamp']).encode())
            data['_id'] = sha1_hash.hexdigest()
            data['hops'] = []
            data['asns'] = []
            data['rtts'] = []
            data['ttls'] = []
            hops = dp[ts]
            for hop in hops:
                if 'ttl' not in hop or 'ip' not in hop or 'query' not in hop:
                    continue
                nq = int(hop['query'])
                if nq != 1:
                    continue
                data['hops'].append(hop['ip'])
                data['ttls'].append(int(hop['ttl']))
                if 'rtt' in hop and hop['rtt'] != None:
                    data['rtts'].append(float(hop['rtt']))
                else:
                    data['rtts'].append(0.0)
                if 'as' in hop:
                    data['asns'].append(hop['as']['number'])
                else:
                    data['asns'].append(0)
                # print(data)
            data['n_hops'] = len(data['hops'])
            if len(data['rtts']):
                data['max_rtt'] = max(data['rtts'])

            if len(data['hops']) == 0:
                print('ERROR: we should have no data without any hops.')
                self.aLotOfData.append(copy.copy(data))
                continue

            data['destination_reached'] = False
            core_path = copy.copy(data['hops'])
            if core_path[-1] == data['dest']:
                core_path.remove(data['dest'])
                # destination has been reached if the last hop is == destination
                data['destination_reached'] = True

            route_hash = hashlib.sha1()
            route_hash.update(";".join(core_path).encode())
            data['route-sha1'] = route_hash.hexdigest()

            # path incomplete means len(ttls) is not equal to the last ttl
            data['path_complete'] = False
            if len(data['ttls']) == data['ttls'][-1]:
                data['path_complete'] = True

            # looping path contains at least one non-unique IP. it includes src and dest.
            core_path.append(data['src'])
            core_path.append(data['dest'])
            data['looping'] = len(set(core_path)) != len(core_path)

            self.aLotOfData.append(copy.copy(data))