Пример #1
0
def _MaybeCache(update_cache, blob_info, pcapdata):
    """Update cache when asked to do so. Cache when no cache found."""

    if update_cache:
        pcapdata.js_packets = None
        pcapdata.js_streams = None

    if pcapdata.js_packets is not None:
        print "We just used cache, didn't we"
        return

    reader = blob_info.open()
    begin = time.time()

    j = []
    pairs = set()
    for i, (p, unused_frame) in enumerate(wifipacket.Packetize(reader)):
        if IS_DEBUG and i > SAMPLE_SIZE:
            print 'Done', i
            break
        j.append(p)
        pairs.add((p.get('ta', 'no_ta'), p.get('ra', 'no_ra')))

    pairs_dict = [{'ta': t[0], 'ra': t[1]} for t in pairs]

    pcapdata.js_packets = json.dumps(j)
    pcapdata.js_streams = json.dumps(pairs_dict)

    end = time.time()
    print 'Spent on caching', (end - begin), 'sec'
Пример #2
0
 def post(self):
     upload_files = self.get_uploads()
     sys.stderr.write('upload: %r\n' % upload_files)
     blob_info = upload_files[0]
     reader = blob_info.open()
     try:
         wifipacket.Packetize(reader).next()  # just check basic file header
     except wifipacket.Error:
         blob_info.delete()
         raise
     self.redirect('/view/%s' % blob_info.key())
Пример #3
0
def _Boxes(blob_info):
    """Re-/store from/to memcache number of packets per mac address."""

    boxes = memcache.get(str(blob_info.key()), namespace='boxes')
    if not boxes:
        reader = blob_info.open()
        boxes = collections.defaultdict(lambda: 0)
        # TODO(katepek): use cache here instead if available
        for p, unused_frame in wifipacket.Packetize(reader):
            if 'flags' in p and p.flags & wifipacket.Flags.BAD_FCS: continue
            if 'ta' in p and 'ra' in p:
                boxes[p.ta] += 1
                boxes[p.ra] += 1
        memcache.add(key=str(blob_info.key()),
                     value=dict(boxes),
                     namespace='boxes')
    return boxes
Пример #4
0
def _MaybeCache(blob_info, pcapdata, start_time, end_time):
    """Update cache when asked to do so. Cache when no cache found."""

    prefix = str(blob_info.key())

    jscache = memcache.get(prefix, namespace='jsindex')
    if jscache:
        try:

            def Getter(i):
                return json.loads(
                    memcache.get('%s_%d' % (prefix, i), namespace='jsdata'))

            packets = _ReadCache(jscache['start_times'], start_time, end_time,
                                 Getter)
        except _CacheMissing:
            pass  # Fall through
        else:
            jscache['js_packets'] = packets
            return jscache

    # Need to process the actual data to fill the cache
    reader = blob_info.open()
    begin = time.time()

    start_times = []
    groups = []
    pairs = set()
    for i, (p, unused_frame) in enumerate(wifipacket.Packetize(reader)):
        if IS_DEBUG and i > SAMPLE_SIZE:
            print 'Done', i
            break
        if not (i % 2000):
            print 'parsing packet %d' % i
            groups.append([])
            start_times.append(p.pcap_secs)
        pairs.add((p.get('ta', 'no_ta'), p.get('ra', 'no_ra')))
        # TODO(apenwarr): use lists instead of dicts.
        #  Repeating the key for every single element is extremely wasteful and
        #  makes generating/parsing slower than needed.
        pdata = dict((key, p.get(key)) for key in ALL_FIELDS)
        groups[-1].append(pdata)

    for i, g in enumerate(groups):
        gstr = json.dumps(g)
        print 'saving %d with %d elements (%d bytes)' % (i, len(g), len(gstr))
        memcache.set(key='%s_%d' % (prefix, i), value=gstr, namespace='jsdata')

    # TODO(apenwarr): don't need streams or start_times, so don't precalculate
    #   these above.

    # jscache = dict(js_streams=pairs_dict,
    #               start_times=start_times)

    jscache = dict()
    memcache.set(key=prefix, value=jscache, namespace='jsindex')

    packets = _ReadCache(start_times, start_time, end_time,
                         lambda i: groups[i])
    jscache['js_packets'] = packets

    end = time.time()
    print 'Spent on caching', (end - begin), 'sec'
    return jscache
Пример #5
0
def _main(aliases):
    """Main program."""
    last_usec = row_start_usec = col_start_usec = rownum = colnum = 0
    row_airtime = col_airtime = time_init = 0
    col_packets = []
    row_macs = set()
    real_macs = set()
    abbrevs = {}
    abbrev_queue = list(reversed(string.ascii_uppercase))

    for opt, unused_frame in wifipacket.Packetize(sys.stdin):
        # TODO(apenwarr): handle control frame timing more carefully
        if opt.type & 0xf0 == 0x10:
            continue

        col_packets.append(opt)
        col_airtime += opt.get('airtime_usec', 0)
        row_airtime += opt.get('airtime_usec', 0)
        ta = opt.get('ta', '???')
        ts = opt.pcap_secs
        ts = '%.09f' % ts
        mac_usecs = opt.get('mac_usecs', last_usec)
        assert mac_usecs
        tsdiff = mac_usecs - last_usec
        bad_fcs = opt.get('flags', 0) & wifipacket.Flags.BAD_FCS
        if not bad_fcs:
            row_macs.add(ta)
            real_macs.add(ta)

        if not bad_fcs and opt.type == 0x08:  # beacon
            ssid = opt.get('ssid')
            if ssid:
                ssid = re.sub(r'[^\w]', '.', ssid)
                aliases.BetterGuess(ta, ssid)

        if not time_init:
            col_start_usec = row_start_usec = mac_usecs
            time_init = 1

        # TODO(apenwarr): deal with mac_usecs wraparound
        while mac_usecs - col_start_usec >= USEC_PER_COL:
            if col_start_usec - row_start_usec >= USEC_PER_ROW:
                print ' %2d%%' % (row_airtime * 100 / USEC_PER_ROW
                                  )  # end of row
                if (rownum % 20) == 0:
                    print
                    print '--- .=Beacon ',
                    for mac in row_macs:
                        nice_mac = aliases.Get(mac)
                        abbrev = abbrevs.get(mac, '')
                        print '%s=%s' % (abbrev, nice_mac),
                    print
                    row_macs.clear()
                rownum += 1
                colnum = 0
                row_start_usec += USEC_PER_ROW
                row_airtime = 0

            most_airtime = 0, None, 0
            for p in col_packets:
                ta = p.get('ta', '???')
                airtime = p.get('airtime_usec', 0)
                if ta in real_macs and airtime > most_airtime[0]:
                    most_airtime = airtime, ta, p.type
            if not most_airtime[1]:
                c = ' '
            elif most_airtime[1] in abbrevs:
                c = abbrevs[most_airtime[1]]
            else:
                mac = most_airtime[1]
                try:
                    nice_mac = aliases.Get(mac)
                except KeyError:
                    aliases.Invent(mac)
                    nice_mac = aliases.Get(mac)
                c = nice_mac[0].upper()  # try first letter of vendor or SSID
                if c in abbrevs.values():
                    # Fallback in case that's already taken
                    c = abbrev_queue.pop(0)
                    abbrev_queue.append(c)
                abbrevs[most_airtime[1]] = c
            if most_airtime[
                    2] == 0x08:  # override if beacon was the biggest thing
                c = '.'
            if col_airtime < USEC_PER_COL / 2:
                c = c.lower()
            sys.stdout.write(c)
            col_start_usec += USEC_PER_COL
            col_airtime = 0
            col_packets = []
            colnum += 1

        if 0:  # pylint: disable=using-constant-test
            print '%-20s %7dM %9db %11s +%-9.3f  %s' % (
                nice_mac,
                opt.rate,
                opt.orig_len,
                '%.3fms' % (tsdiff / 1e3) if tsdiff else '',
                opt.airtime_usec / 1e3,
                opt.typestr,
            )
        sys.stdout.flush()
        last_usec = mac_usecs