コード例 #1
0
ファイル: DataSet.py プロジェクト: tartaruszen/codaspy19
def getTrace(visit_file, direction=None):
    with open(visit_file, 'r') as f:
        sample = json.load(f)

    Config.hostname.update(sample[u'ip_to_name'])
    webId, traceId = sample['visit_log'][u'current_url'], sample['visit_log']['visit_id']
    
    trace = Trace(traceId, webId)
    for tcp_conn in sample['tcp_connections']:
        
        connection_id = tcp_conn['connection_id']
        
        TCP = TCPConnection(connection_id, webId, hostip=sample['visit_log'][u'host_ip'])

        for pkt in tcp_conn['packets']:
            pkt_time, pkt_size = pkt[0], abs(pkt[1])
            
            if pkt_size == 0:
                continue

            pkt_dir = Packet.Outgoing if pkt[1] < 0 else Packet.Incoming

            if direction is None or direction == pkt_dir:
                TCP.addPacket(Packet(pkt_time, pkt_size, pkt_dir))
        TCP._packets.sort(key=lambda x: x.getTime())
        trace.addTcpCon(TCP)
    
    return trace
コード例 #2
0
    def getTraceHerrmann(webpageId, traceIndex):
        if config.DATA_SOURCE == 1:
            datasourceId = 4
        elif config.DATA_SOURCE == 2:
            datasourceId = 5

        key = '.'.join([
            'Webpage', 'H',
            str(datasourceId),
            str(webpageId),
            str(traceIndex)
        ])

        trace = mc.get(key)
        if ENABLE_CACHE and trace:
            trace = cPickle.loads(trace)
        else:
            connection = MySQLdb.connect(host=config.MYSQL_HOST,
                                         user=config.MYSQL_USER,
                                         passwd=config.MYSQL_PASSWD,
                                         db=config.MYSQL_DB)

            cursor = connection.cursor()
            command = """SELECT packets.trace_id,
                                      packets.size,
                                      ROUND(packets.abstime*1000)
                                 FROM (SELECT id
                                         FROM traces
                                        WHERE site_id = (SELECT id
                                                           FROM sites
                                                          WHERE dataset_id = """ + str(
                datasourceId) + """
                                                          ORDER BY id
                                                          LIMIT """ + str(
                    webpageId) + """,1)
                                        ORDER BY id
                                        LIMIT """ + str(
                        traceIndex) + """,1) traces,
                                      packets
                                WHERE traces.id = packets.trace_id
                                ORDER BY packets.trace_id, packets.abstime"""
            cursor.execute(command)

            data = cursor.fetchall()
            trace = Trace(webpageId)
            for item in data:
                trace.setId(int(item[0]))
                direction = Packet.UP
                if int(item[1]) > 0:
                    direction = Packet.DOWN
                time = item[2]
                length = int(math.fabs(item[1]))

                trace.addPacket(Packet(direction, time, length))
            connection.close()

            mc.set(key, cPickle.dumps(trace,
                                      protocol=cPickle.HIGHEST_PROTOCOL))

        return trace
コード例 #3
0
    def applyCountermeasure( trace,  metadata ):
        [targetDistributionBi,
         targetDistributionUp,
         targetDistributionDown] = metadata

        newTrace = Trace(trace.getId())

        # primary sampling
        timeCursor = 0
        for packet in trace.getPackets():
            timeCursor = packet.getTime()
            targetDistribution = targetDistributionDown
            if packet.getDirection()==Packet.UP:
                targetDistribution = targetDistributionUp

            packets = DirectTargetSampling.morphPacket( packet, targetDistribution )
            for newPacket in packets:
                newTrace.addPacket( newPacket )

        # secondary sampling
        while True:
            l1Distance = newTrace.calcL1Distance( targetDistributionBi )
            if l1Distance <= DirectTargetSampling.L1_THRESHHOLD:
                break

            timeCursor += 10
            newDirection, newLen = newTrace.getMostSkewedDimension( targetDistributionBi )
            packet = Packet( newDirection, timeCursor, newLen )
            newTrace.addPacket( packet )

        return newTrace
コード例 #4
0
 def loadTraces(self):
     tracesPaths = os.listdir(self.directoryPath)
     for tracePath in tracesPaths:
         path = os.path.join(self.directoryPath, tracePath)
         t = Trace(path)
         t.loadTrace()
         self.traces.append(t)
コード例 #5
0
 def Trace(self):
     self.tracepop = Trace(df=self.model._df)
     try:
         self.tracepop.Trace()
         self.tracepop.show()
     except (KeyError):
         self.ErrorEvent()
コード例 #6
0
def readfile( month, day, hour, webpageId ):
    strId = '.'.join([str(month), str(day), str(hour), str(webpageId)])

    trace = Trace(webpageId)
    start = 0

    absPath    = __constructAbsolutePath( month, day, hour, webpageId )

    if absPath:
        pcapReader = dpkt.pcap.Reader( file( absPath, "rb") )

        for ts, buf in pcapReader:
            eth = dpkt.ethernet.Ethernet(buf)
            ip  = eth.data
            tcp = ip.data
            
            if start==0: start = ts
            direction = Packet.UP
            if (tcp.sport==22):
                direction = Packet.DOWN
            delta     = int(round(((ts - start) * 1000),0))
            length    = ip.len + Packet.HEADER_ETHERNET

            trace.addPacket( Packet(direction, delta, length ) )
            
    return trace
コード例 #7
0
ファイル: tamaraw.py プロジェクト: shibz-islam/BiMorphing
    def applyCountermeasure(trace):
        packets = []
        for packet in trace.getPackets():
            if packet.getDirection() == Packet.UP:
                x = 1 # outgoing
            else:
                x = -1 # incoming
            packets.append([packet.getTime(), x])

        list2 = []
        parameters = [""]

        Tamaraw.Anoa(packets, list2, parameters)
        list2 = sorted(list2, key=lambda list2: list2[0])

        list3 = []

        Tamaraw.AnoaPad(list2, list3, 100, 0)

        newTrace = Trace(trace.getId())
        for item in list3:
            pDirection = Packet.UP
            if (item[1] < 0):
                pDirection = Packet.DOWN
            newTrace.addPacket(Packet(direction=pDirection, time=item[0], length=abs(item[1])))

        return newTrace
コード例 #8
0
    def traces(self):
        "yields all traceroutes of query"
        if self.queryResult == None:
            return None

        for raw_trace in self.queryResult['queries']:
            yield Trace(raw_trace)
コード例 #9
0
ファイル: Folklore.py プロジェクト: ruur/deepdig
    def doCountermeasure(trace):
        # Median trace length in the herrmann dataset is 3500ms
        # Median throughput is 62000 bytes/second
        # 40*1500 = 60000 bytes/second

        newTrace = Trace(trace.getId())

        latency = []
        timer = 0
        bufferUP = Folklore.Buffer()
        bufferDOWN = Folklore.Buffer()
        packetCursor = 0

        # Terminate only if (1) our clock is up, (2) we have no more packets from the source
        # and (3) our buffers are empty
        while timer <= Folklore.MILLISECONDS_TO_RUN \
           or packetCursor < trace.getPacketCount() \
           or bufferUP.hasPackets() \
           or bufferDOWN.hasPackets():

            # calculate max latency
            if bufferUP.hasPackets():
                earliestPacket = bufferUP.queue()[0]
                latency.append(timer - earliestPacket.getTime())
            if bufferDOWN.hasPackets():
                earliestPacket = bufferDOWN.queue()[0]
                latency.append(timer - earliestPacket.getTime())

            # add to buffer: all packets that appeared since last clock
            while packetCursor < trace.getPacketCount()\
              and trace.getPackets()[packetCursor].getTime()<=timer:
                packet = trace.getPackets()[packetCursor]

                if packet.getDirection() == Packet.UP:
                    bufferUP.add(packet)
                elif packet.getDirection() == Packet.DOWN:
                    bufferDOWN.add(packet)

                # increment position in source buffer
                packetCursor += 1

            # check buffer UP: purge at most Packet.MTU bytes
            Folklore.packFromBuffer(
                Folklore.FIXED_PACKET_LEN - Packet.HEADER_LENGTH, bufferUP)

            # check buffer DOWN: purge at most Packet.MTU bytes
            Folklore.packFromBuffer(
                Folklore.FIXED_PACKET_LEN - Packet.HEADER_LENGTH, bufferDOWN)

            # send a byte in both directions
            newTrace.addPacket(
                Packet(Packet.DOWN, timer, Folklore.FIXED_PACKET_LEN))
            newTrace.addPacket(
                Packet(Packet.UP, timer, Folklore.FIXED_PACKET_LEN))

            # go to the next clock cycle
            timer += Folklore.TIMER_CLOCK_SPEED

        return [newTrace, latency]
コード例 #10
0
ファイル: PadRoundLinear.py プロジェクト: ruur/deepdig
    def applyCountermeasure(trace):
        newTrace = Trace(trace.getId())
        for packet in trace.getPackets():
            newPacket = Packet(packet.getDirection(), packet.getTime(),
                               PadRoundLinear.calcLength(packet.getLength()))
            newTrace.addPacket(newPacket)

        return newTrace
コード例 #11
0
def readfile(month, day, hour, webpageId):
    strId = '.'.join([str(month), str(day), str(hour), str(webpageId)])

    trace = Trace(webpageId)
    start = 0

    absPath = __constructAbsolutePath(month, day, hour, webpageId)

    # testing
    #print absPath

    if absPath:
        pcapReader = dpkt.pcap.Reader(file(absPath, "rb"))

        for ts, buf in pcapReader:
            eth = dpkt.ethernet.Ethernet(buf)
            ip = eth.data
            tcp = ip.data

            #dpkt.Packet.__flags__

            if start == 0: start = ts
            direction = Packet.UP
            if (tcp.sport == 22):
                direction = Packet.DOWN
            if (config.DATA_SOURCE == 3):
                if (tcp.sport == 9001 or tcp.sport == 443):
                    direction = Packet.DOWN
            if (config.DATA_SOURCE == 4 or config.DATA_SOURCE == 41
                    or config.DATA_SOURCE == 42):
                if (tcp.sport == 8080 or tcp.sport == 443):
                    direction = Packet.DOWN

            #testing
            #origTimeDiff = ts - start
            #origTimeDiff = (ts - start) * 1000
            #origTimeDiff = round(((ts - start) * 1000),0)
            #origTimeDiff = int(round(((ts - start) * 1000),0))
            #print origTimeDiff

            delta = int(round(((ts - start) * 1000), 0))
            length = ip.len + Packet.HEADER_ETHERNET
            '''
            if (config.DATA_SOURCE==3): # overcoming the packet size greater than 1500
                while True:
                    if length > 1500: # MTU
                        excludedLength = random.randint(595, 1500)
                        trace.addPacket( Packet(direction, delta, excludedLength ) )
                        length = length - excludedLength
                    else:
                        trace.addPacket( Packet(direction, delta, length ) )
                        break
            else:
                trace.addPacket( Packet(direction, delta, length ) )
            '''
            trace.addPacket(Packet(direction, delta, length))

    return trace
コード例 #12
0
ファイル: Datastore.py プロジェクト: shibz-islam/BiMorphing
    def readWangTorFileOld( webpageId, traceIndex ):

        if webpageId < 100:
            file = os.path.join(config.PCAP_ROOT, str(webpageId)+"-"+str(traceIndex))

        else:
            file = os.path.join(config.PCAP_ROOT, str(webpageId-100)) # as the nonMon id starts from 100 and the file names are 0, 1, 2, 3, ...

        fileList = Utils.readFile(file)

        trace = Trace(webpageId)

        '''
        0.0	1 cell
        0.0	1
        0.116133928299	1
        0.499715805054	-1
        0.499715805054	-1
        ...
        '''

        prevcTime = currcTime = fileList[0].split("\t")[0] # previous and current cell time (0.0 in the example above)
        prevcDirection = currcDirection = int(fileList[0].split("\t")[1]) # previous and current cell direction

        #cLength = 512 # cell length is always 512 bytes in Tor
        cellCtr = 1

        for i in range(1,len(fileList)):
            cellArray = fileList[i].split("\t")
            cTime = cellArray[0]
            cDirection = cellArray[1]

            currcTime = cTime
            currcDirection = int(cDirection)

            if currcTime != prevcTime:
                #pLength = cellCtr * cLength
                Datastore.addPacketsFromCells( trace, prevcDirection, prevcTime, cellCtr )
                prevcDirection = currcDirection
                prevcTime = currcTime
                cellCtr = 1
                continue
            elif currcDirection != prevcDirection:
                #pLength = cellCtr * cLength
                Datastore.addPacketsFromCells( trace, prevcDirection, prevcTime, cellCtr )
                prevcDirection = currcDirection
                prevcTime = currcTime
                cellCtr = 1
                continue
            else: # same time, same direciton
                cellCtr = cellCtr + 1
                prevcDirection = currcDirection
                prevcTime = currcTime

        # for the last cell
        Datastore.addPacketsFromCells( trace, prevcDirection, prevcTime, cellCtr )

        return trace
コード例 #13
0
 def __init__(self, foldername):
     """
     Initialize Driver with a foldername.
     """
     self._id = int(os.path.basename(foldername))
     self._traces = []
     files = [f for f in os.listdir(foldername) if f.endswith(".csv")]
     for filename in files:
         self._traces.append(Trace(os.path.join(foldername, filename)))
コード例 #14
0
def Join_Particles(particles=[],
                   start_ID=1,
                   folder='./',
                   overlap_thr=0.5,
                   autotrigger=True,
                   eccentricity_thr=10,
                   verbose=True):

    ID = start_ID

    for filename in tqdm(os.listdir(folder)):

        if not filename.startswith('mean_'):
            continue
        # e.g filename = mean_280519-video7_000-007_opened_cc02.png
        prefix, dot, name = str(filename).partition(
            '-')  # = mean_280519, -, video7_000-007_opened_cc02.png
        segment, dot, suffix1 = name.partition(
            '-')  # = video7_000, -, 007_opened_cc02.png
        frame, dot, suffix2 = suffix1.partition(
            '_')  # = 007, _, opened_cc02.png
        frameID = int(frame)
        previous_name = segment + ('-%03d' % (frameID - 1))

        t = Trace(folder, filename)
        if autotrigger:
            if not Trigger(
                    t, eccentricity_thr=eccentricity_thr, verbose=verbose):
                continue

        found = False
        for p in particles:
            if p.traces[-1].frame_name == previous_name:
                diff_matrix = np.abs(p.traces[-1].matrix - t.matrix)

                # compute white_number for diff_matrix
                n_whites = 0
                for x in diff_matrix:
                    for y in x:
                        if y > 0:
                            n_whites += 1

                overlap = 1.0 - n_whites / (p.traces[-1].white_number +
                                            t.white_number)
                if verbose:
                    print('\n' + previous_name + '-' + t.frame_name +
                          ': overlap = ' + str(overlap))
                if overlap >= overlap_thr:
                    found = True
                    p.traces.append(t)
                    break

        if not found:
            particles.append(Particle(folder, ID, [t]))
            ID += 1

    return particles, ID - start_ID
コード例 #15
0
ファイル: PadRFCRand.py プロジェクト: ruur/deepdig
    def applyCountermeasure(trace):

        newTrace = Trace(trace.getId())
        for packet in trace.getPackets():
            rand = random.choice(range(8, 256, 8))
            length = min(packet.getLength() + rand, Packet.MTU)
            newPacket = Packet(packet.getDirection(), packet.getTime(), length)
            newTrace.addPacket(newPacket)

        return newTrace
コード例 #16
0
 def __init__(self, foldername):
     """
     Initialize Driver with a foldername.
     """
     self._id = int(os.path.basename(foldername))
     self._traces = []
     file_num = len(os.listdir(foldername))
     files = ['%d.csv' % (i) for i in range(1, file_num+1)]
     for filename in files:
         self._traces.append(Trace(os.path.join(foldername, filename)))
コード例 #17
0
    def applyCountermeasure(trace):
        newTrace = Trace(trace.getId())
        # pad all packets to the MTU
        for packet in trace.getPackets():
            newPacket = Packet( packet.getDirection(),
                                packet.getTime(),
                                Packet.MTU )
            newTrace.addPacket( newPacket )

        return newTrace
コード例 #18
0
    def good_traces(self):
        "yields completed traceroutes of query"
        if self.queryResult == None:
            return None

        for raw_trace in self.queryResult['queries']:
            trace = Trace(raw_trace)

            if trace.completed:
                yield trace
 def __init__(self, foldername):
     """
     Reference Driver with a foldername.
     """
     self._id = int(os.path.basename(foldername))
     self._traces = []
     file_num = len(os.listdir(foldername))
     files = ['%d.csv' % (i) for i in range(1, file_num + 1)]
     filename = files[sample(range(1, file_num + 1), 1)[0] - 1]
     self._traces.append(Trace(os.path.join(foldername, filename)))
コード例 #20
0
    def applyCountermeasure(trace, metadata):
        [
            targetDistributionBi, targetDistributionUp, targetDistributionDown,
            srcDistributionUp, srcDistributionDown, morphingMatrixUp,
            morphingMatrixDown
        ] = metadata

        newTrace = Trace(trace.getId())

        # primary sampling
        timeCursor = 0
        for packet in trace.getPackets():
            timeCursor = packet.getLength()
            index = (packet.getLength() - Packet.HEADER_LENGTH) / 8

            targetDistribution = None
            morphingColumn = None
            if packet.getDirection() == Packet.UP:
                if morphingMatrixUp:
                    morphingColumn = morphingMatrixUp[:, index]
                else:
                    targetDistribution = targetDistributionUp
                targetDistributionSecondary = targetDistributionUp
            else:
                if morphingMatrixDown:
                    morphingColumn = morphingMatrixDown[:, index]
                else:
                    targetDistribution = targetDistributionDown
                targetDistributionSecondary = targetDistributionDown

            if morphingColumn:
                targetDistribution = {}
                for i in range(len(morphingColumn)):
                    key = str(packet.getDirection()) + '-' + str(
                        i * 8 + Packet.HEADER_LENGTH)
                    targetDistribution[key] = morphingColumn[i]

            packets = WrightStyleMorphing.morphPacket(
                packet, targetDistribution, targetDistributionSecondary)
            for newPacket in packets:
                newTrace.addPacket(newPacket)

        # secondary sampling
        while True:
            l1Distance = newTrace.calcL1Distance(targetDistributionBi)
            if l1Distance <= WrightStyleMorphing.L1_THRESHHOLD:
                break

            timeCursor += 10
            newDirection, newLen = newTrace.getMostSkewedDimension(
                targetDistributionBi)
            packet = Packet(newDirection, timeCursor, newLen)
            newTrace.addPacket(packet)

        return newTrace
コード例 #21
0
ファイル: Datastore.py プロジェクト: shibz-islam/BiMorphing
    def readEsorics16TorFile( webpageId, traceIndex ):

        if webpageId < 100: # 100 and more is nonMon
            file = os.path.join(config.PCAP_ROOT, str(webpageId)+"-"+str(traceIndex))

        else:
            file = os.path.join(config.PCAP_ROOT, str(webpageId-100)) # as the nonMon id starts from 100 and the file names are 0, 1, 2, 3, ...

        fileList = Utils.readFile(file)

        trace = Trace(webpageId)

        '''
        <time-stamp>\t<directed-packet-length>
        1392354075.204044	-565
        1392354075.20464	565
        1392354075.709575	-565
        1392354075.956634	-565
        1392354075.981538	565
        1392354076.192185	565
        1392354076.36483	-565
        1392354076.647188	-565
        1392354076.685665	-1448
        1392354076.685685	-1448
        ...
        '''
        startTime = 0
        for i in range(0,len(fileList)):
            pArray = fileList[i].split("\t")
            cTime = pArray[0]

            if startTime == 0:
                startTime = float(cTime) # first packet's time as a reference

            cDirection = int(pArray[1])

            pDirection = Packet.UP
            if (cDirection < 0):
                pDirection = Packet.DOWN

            # as in the pcapparser.py
            # delta     = int(round(((ts - start) * 1000),0))

            #pTime = int(round(((float(cTime) - startTime) * 1000),0))
            pTime = int(round(((float(cTime) - startTime) ), 0))

            pLength = abs(int(pArray[1]))

            pkt = Packet(pDirection, pTime, pLength)
            pkt.timeStr = cTime

            trace.addPacket(pkt)


        return trace
コード例 #22
0
ファイル: PadRand.py プロジェクト: ruur/deepdig
    def applyCountermeasure(trace):
        newTrace = Trace(trace.getId())
        for packet in trace.getPackets():
            length = Packet.MTU
            if Packet.MTU - packet.getLength() > 0:
                length = packet.getLength() + random.choice(
                    range(0, Packet.MTU - packet.getLength(), 8))
            newPacket = Packet(packet.getDirection(), packet.getTime(), length)
            newTrace.addPacket(newPacket)

        return newTrace
コード例 #23
0
    def test_readfile(self):
        actualTrace = pcapparser.readfile( month=3, day=14, hour=22, webpageId=8 )

        expectedTrace = Trace(8)
        expectedTrace.addPacket( Packet( Packet.UP  , 0  , 148 ) )
        expectedTrace.addPacket( Packet( Packet.DOWN, 0  , 100 ) )
        expectedTrace.addPacket( Packet( Packet.UP  , 0  , 52  ) )
        expectedTrace.addPacket( Packet( Packet.UP  , 3  , 500 ) )
        expectedTrace.addPacket( Packet( Packet.DOWN, 18 , 244 ) )
        expectedTrace.addPacket( Packet( Packet.UP  , 35 , 436 ) )
        expectedTrace.addPacket( Packet( Packet.DOWN, 75 , 52  ) )
        expectedTrace.addPacket( Packet( Packet.DOWN, 118, 292 ) )
        expectedTrace.addPacket( Packet( Packet.UP  , 158, 52  ) )
コード例 #24
0
    def createTrace(self):
        '''Fit for the position and width of the trace.'''

        self.speak("populating the trace parameters")
        tracefilename = self.directory + 'trace_{0}.npy'.format(self.name)
        skyfilename = self.directory + 'skyMask_{0}.npy'.format(self.name)

        # define the trace object (will either load saved, or remake)
        self.trace = Trace(self)

        self.images['RoughLSF'] = np.exp(
            -0.5 * ((self.s - self.trace.traceCenter(self.w)) /
                    self.trace.tracefitwidth)**2)
コード例 #25
0
    def is_good(self, filename):
        t = Trace(self.raw_path, filename)

        if t.white_perc > self.white_perc_thr:
            print('too crowded: ', t.white_perc)
            return False
        t.compute_inertia()
        if t.eccentricity_from_inertia < self.eccentricity_thr:
            print('too spherical ', t.eccentricity_from_inertia)
            return False

        self.interesting_count += 1
        return True
コード例 #26
0
def get_records():
    trace = request.args.get('trace') == "true"

    domain = request.args.get('domain')
    if domain is None:
        return 'Domain is missing', 400

    domain = domain.split("/")[0]

    try:
        get_cache()

        response: tp.Optional[IpRecord] = None
        if not trace:
            response = domain_cache[domain]
            if response is not None and response.expires_at < time.time():
                response = None

        if response is not None:
            print(f"Using cache for {domain}")
            return jsonify(
                domain=domain,
                ips=response.ips,
                ttl=max(response.expires_at - time.time(), 0),
            )

        data_trace = Trace()
        response = Dns.find_recursive(domain, Dns.ROOT_SERVERS_DNS, data_trace)
    except Exception as e:
        print(e)
        response = None

    if response is None:
        return jsonify(error='Domain is not resolved',
                       trace=data_trace.compose()), 404

    domain_cache[domain] = response

    if trace:
        return jsonify(domain=domain,
                       ips=response.ips,
                       ttl=max(response.expires_at - time.time(), 0),
                       trace=data_trace.compose(),
                       exact_match=any(
                           [d.startswith(domain) for d, _ in response.ips]))
    else:
        return jsonify(domain=domain,
                       ips=response.ips,
                       ttl=max(response.expires_at - time.time(), 0),
                       exact_match=any(
                           [d.startswith(domain) for d, _ in response.ips]))
コード例 #27
0
    def encrypt(self, plaintext):
        # input plaintext -> np.array
        self.trace = Trace(self.b, self.noise)
        self.plain_state = plaintext.reshape(4, 4)
        self.__add_round_key(self.plain_state, self.round_keys[:4])

        for i in range(1, 10):
            self.__round_encrypt(self.plain_state,
                                 self.round_keys[4 * i:4 * (i + 1)])

        self.__sub_bytes(self.plain_state)
        self.__shift_rows(self.plain_state)
        self.__add_round_key(self.plain_state, self.round_keys[40:])
        return self.plain_state.flatten(), self.trace
コード例 #28
0
ファイル: Datastore.py プロジェクト: shibz-islam/BiMorphing
    def readWangTorFile( webpageId, traceIndex ):

        if webpageId < 100: # 100 and more is nonMon
            if config.DATA_SOURCE == 5:
                file = os.path.join(config.PCAP_ROOT, str(webpageId)+"-"+str(traceIndex))
            elif config.DATA_SOURCE == 9:
                file = os.path.join(config.PCAP_ROOT, str(webpageId) + "-" + str(traceIndex)) + ".cell"

        else:
            if config.DATA_SOURCE == 5:
                file = os.path.join(config.PCAP_ROOT, str(webpageId-100)) # as the nonMon id starts from 100 and the file names are 0, 1, 2, 3, ...
            elif config.DATA_SOURCE == 9:
                file = os.path.join(config.PCAP_ROOT, str(webpageId-100)) + ".cell"  # as the nonMon id starts from 100 and the file names are 0, 1, 2, 3, ...
        fileList = Utils.readFile(file)

        trace = Trace(webpageId)

        '''
        0.0	1 cell
        0.0	1
        0.116133928299	1
        0.499715805054	-1
        0.499715805054	-1
        ...
        '''

        for i in range(1,len(fileList)):
            cellArray = fileList[i].split("\t")
            cTime = cellArray[0]
            cDirection = int(cellArray[1])

            pDirection = Packet.UP
            if (cDirection==-1):
                pDirection = Packet.DOWN

            # as in the pcapparser.py
            # delta     = int(round(((ts - start) * 1000),0))
            pTime = int(round((float(cTime) * 1000),0))
            #pTime = int(round((float(cTime) * 10000), 0))
            #pTime = int(round((float(cTime) * 1000000), 0))

            pLength = abs(int(cellArray[1])) # sizes are only 1 and -1

            trace.addPacket(Packet(pDirection, pTime, pLength))


        return trace
コード例 #29
0
    def get_trace(cls,
                  trace_id=None,
                  site_id=None,
                  dataset=2,
                  limit=1,
                  multi=False):
        if cls.conn is None:
            cls.conn = MySQLdb.connect(host=config.MYSQL_HOST,
                                       user=config.MYSQL_USER,
                                       passwd=config.MYSQL_PASSWD,
                                       db=config.MYSQL_DB)
        cur = cls.conn.cursor()
        if trace_id is None:
            cur.execute(
                'SELECT id FROM traces where site_id=%s ORDER BY RAND() LIMIT {}'
                .format(limit), [site_id])
            if limit == 1:
                trace_ids = [cur.fetchone()[0]]
            else:
                trace_ids = [r[0] for r in cur.fetchall()]
        else:
            trace_ids = [trace_id]
        # print('SEL-TRACE', trace_ids)

        traces = []
        for trace_id in trace_ids:
            cur.execute(
                'SELECT size, ROUND(abstime*1000) FROM packets WHERE trace_id=%s ORDER BY abstime',
                [trace_id])
            data = cur.fetchall()
            trace = Trace(trace_id, webpage=site_id)
            for item in data:
                direction = Packet.UP
                if int(item[0]) > 0:
                    direction = Packet.DOWN
                time = item[1]
                length = int(math.fabs(item[0]))
                trace.addPacket(Packet(direction, time, length))
            traces.append(trace)

        if limit == 1 and not multi:
            return traces[0] if traces else None
        return traces
コード例 #30
0
    def consume_packet(self, header_bstr, packet_time):
        """
        Accept new packet and associate it with appropriate trace

        Keyword arguments:
        header_bstr -- binary string of the packet header
        packet_time -- time tuple (sec, ms) since epoch of header
        """
        # Check if packet isn't complete
        new_fragment = Fragment(header_bstr, packet_time)
        if new_fragment.id in self.incomplete_packets:
            packet = self.incomplete_packets[new_fragment.id]
            packet.add_frag(new_fragment)
        else:
            packet = Packet(new_fragment)
            self.incomplete_packets[packet.id] = packet

        # Set start time if very first packet
        if self.ref_time is None:
            self.ref_time = packet.time

        if packet.is_complete():
            del self.incomplete_packets[packet.id]
            if (packet.protocol == Protocol.UDP
                    or (packet.protocol == Protocol.ICMP
                        and packet.type == Type.ECHO)):
                self.traces[packet.get_trace_sig()] = Trace(
                    packet, self.ref_time)
                self.trace_order.append(packet.get_trace_sig())

            elif packet.protocol == Protocol.ICMP and packet.type == Type.TIME_EXCEEDED:
                if packet.get_trace_sig() in [
                        trace.get_sig() for trace in self.traces.values()
                ]:
                    self.traces[packet.get_trace_sig()].add_resp(packet)
                else:
                    if DEV_ENV:
                        print("Error: ICMP receieved for nonexistant probe")
                        print(packet.get_trace_sig())
            else:
                if DEV_ENV:
                    print("Protocol not savable in traces")