def test_measurement_crud(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], extra_data={"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) assert_that( new_measurement.measurement_id).is_not_none().is_instance_of(int) self.assert_measurements_equal( new_measurement, select_measurement_by_id(self.connection, new_measurement.measurement_id)) measurements = select_all_measurements(self.connection, limit=500, offset=0) assert_that(measurements).is_length(1) self.assert_measurements_equal(new_measurement, measurements[0]) new_measurement_2 = Measurement.new(3.0, 4.0, 0.8, 2, [], extra_data={"baz": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement_2.measurement_id = insert_measurement( t, new_measurement_2) measurements = select_all_measurements(self.connection, limit=500, offset=0) assert_that(measurements).is_length(2) self.assert_measurements_equal( new_measurement_2, select_measurement_by_id(self.connection, new_measurement_2.measurement_id)) assert_that(new_measurement_2.to_api_response()).is_instance_of(dict) with transaction_wrapper(self.connection) as t: count = delete_old_measurements(t, 0) assert_that(count).is_equal_to(2) assert_that( select_all_measurements(self.connection, limit=500, offset=0)).is_empty()
def select_measurement_by_id(connection, measurement_id): with cursor_manager(connection) as c: c.execute( """ SELECT * FROM measurement WHERE measurementID=? """, (measurement_id, )) return Measurement.from_row(c.fetchone())
def select_measurements_that_need_upload(connection, limit): clause, params = limit_offset_helper(limit, None, order_by="measurementStartTime") with cursor_manager(connection) as c: c.execute( """ SELECT m.* FROM measurement AS m WHERE hasBeenUploaded=0 """ + clause, params) return [Measurement.from_row(r) for r in c.fetchall()]
def select_latest_channel_measurements(connection, channel_num, limit=None, offset=None): clause, params = limit_offset_helper( limit, offset, order_by="measurementStartTime DESC", extra_params={"channelNum": channel_num}) with cursor_manager(connection) as c: c.execute( """ SELECT * FROM measurement WHERE channel=:channelNum """ + clause, params) return [Measurement.from_row(r) for r in c.fetchall()]
def test_measurement_station_map(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], {"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) new_station = Station.new("01:02:03:04:05:06", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_2 = Station.new("01:02:03:04:05:07", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) with transaction_wrapper(self.connection) as t: new_station.station_id = insert_station(t, new_station) new_station_2.station_id = insert_station(t, new_station_2) my_counter = DataCounters(9, 2, 3, 4, 9, 3, 3, 3, 10, 2000, 1500, 1, power_measurements=[1.0, 2.0, 3.0], rate_measurements=[1, 2, 1, 4]) with transaction_wrapper(self.connection) as t: insert_measurement_station(t, new_measurement.measurement_id, new_station.station_id, DataCounters.zero()) insert_measurement_station(t, new_measurement.measurement_id, new_station_2.station_id, my_counter) stations = select_stations_for_measurement( self.connection, new_measurement.measurement_id) assert_that(stations).is_length(2) self.assert_stations_equal(new_station, stations[0]) self.assert_data_counters_equal(DataCounters.zero(), stations[0].data_counters) self.assert_stations_equal(new_station_2, stations[1]) self.assert_data_counters_equal(my_counter, stations[1].data_counters)
def test_measurement_service_set(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], extra_data={"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) new_service_set = ServiceSet.new("00:01:00:00:01:00", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) new_service_set2 = ServiceSet.new("00:01:00:00:01:01", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) new_station = Station.new("00:02:00:00:02:00", {}) new_station2 = Station.new("00:02:00:00:02:01", {}) with transaction_wrapper(self.connection) as t: new_service_set.service_set_id = insert_service_set( t, new_service_set) new_service_set2.service_set_id = insert_service_set( t, new_service_set2) new_station.station_id = insert_station(t, new_station) new_station2.station_id = insert_station(t, new_station2) with transaction_wrapper(self.connection) as t: insert_service_set_associated_station( t, new_measurement.measurement_id, new_service_set.bssid, new_station.mac_address) insert_service_set_associated_station( t, new_measurement.measurement_id, new_service_set2.bssid, new_station2.mac_address) service_sets = select_service_sets_for_measurement( self.connection, new_measurement.measurement_id) assert_that(service_sets).is_length(2) for ss in service_sets: if ss.service_set_id == new_service_set.service_set_id: self.assert_service_sets_equal(ss, new_service_set) elif ss.service_set_id == new_service_set2.service_set_id: self.assert_service_sets_equal(ss, new_service_set2) else: assert False
def test_upload_related_queries(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], extra_data={"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) m = select_measurements_that_need_upload(self.connection, 100) assert_that(m).is_length(1) self.assert_measurements_equal(m[0], new_measurement) with transaction_wrapper(self.connection) as t: update_measurements_upload_status(t, [new_measurement.measurement_id], True) assert_that(select_measurements_that_need_upload(self.connection, 100)).is_empty() with transaction_wrapper(self.connection) as t: ssid = insert_service_set( t, ServiceSet.new("00:00:00:01:01:01", "test")) sid1 = insert_station(t, Station.new("01:02:03:04:05:06")) sid2 = insert_station(t, Station.new("01:02:03:04:05:07")) insert_service_set_infrastructure_station( t, new_measurement.measurement_id, "00:00:00:01:01:01", "01:02:03:04:05:06") insert_service_set_associated_station( t, new_measurement.measurement_id, "00:00:00:01:01:01", "01:02:03:04:05:07") assert_that( select_infrastructure_mac_addresses_for_measurement_service_set( self.connection, new_measurement.measurement_id, ssid)).is_length(1).contains("01:02:03:04:05:06") assert_that( select_associated_mac_addresses_for_measurement_service_set( self.connection, new_measurement.measurement_id, ssid)).is_length(1).contains("01:02:03:04:05:07")
def test_station_service_set_linking(self): new_service_set = ServiceSet.new("00:A0:C9:00:00:00", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) new_service_set2 = ServiceSet.new("00:A1:C9:01:02:03", "CU Guest Wireless", {}) new_service_set3 = ServiceSet.new("00:A0:C9:00:00:01", "CU Boulder Wireless", {"foo": "bar"}) new_station = Station.new("01:02:03:04:05:06", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_2 = Station.new("01:02:03:04:05:07", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_3 = Station.new("01:02:03:04:05:08", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_4 = Station.new("01:02:03:04:05:09", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) with transaction_wrapper(self.connection) as t: measurement_id = insert_measurement( t, Measurement.new( 0, 0, 0, 1, [], False, )) new_service_set.service_set_id = insert_service_set( t, new_service_set) new_service_set2.service_set_id = insert_service_set( t, new_service_set2) new_service_set3.service_set_id = insert_service_set( t, new_service_set3) new_station.station_id = insert_station(t, new_station) new_station_2.station_id = insert_station(t, new_station_2) new_station_3.station_id = insert_station(t, new_station_3) new_station_4.station_id = insert_station(t, new_station_4) insert_service_set_infrastructure_station(t, measurement_id, new_service_set.bssid, new_station.mac_address) insert_service_set_infrastructure_station( t, measurement_id, new_service_set.bssid, new_station_3.mac_address) insert_service_set_associated_station(t, measurement_id, new_service_set.bssid, new_station_2.mac_address) infra_stations = select_infrastructure_stations_for_service_set( self.connection, new_service_set.service_set_id) assert_that(infra_stations).is_length(2) self.assert_stations_equal(new_station, [ i for i in infra_stations if i.station_id == new_station.station_id ][0]) associated_stations = select_associated_stations_for_service_set( self.connection, new_service_set.service_set_id) assert_that(associated_stations).is_length(1) self.assert_stations_equal(new_station_2, associated_stations[0])
def select_all_measurements(connection, limit=None, offset=None): clause, params = limit_offset_helper(limit, offset) with cursor_manager(connection) as c: c.execute("SELECT * FROM measurement " + clause, params) return [Measurement.from_row(r) for r in c.fetchall()]
def run_offline_analysis(capture_file, start_time, end_time, sample_seconds, channel): weird_frame_count = 0 bssid_to_ssid_map = {} bssid_infra_macs = defaultdict(set) bssid_associated_macs = defaultdict(set) bssid_beacon_timing_payloads = defaultdict(list) bssid_beacon_data = {} bssid_to_jitter_map = {} bssid_to_power_map = defaultdict(list) noise_measurements = [] action_counter = 0 probe_req_counter = 0 station_counters = defaultdict(DataCounters.zero) pcap_offline_dev = pcapy.open_offline(capture_file) header, payload = pcap_offline_dev.next() while header: try: # Use Scapy for the RadioTap decoding as dpkt's Radiotap decoder is totally broken. radiotap_frame = dot11.RadioTap(payload) if radiotap_frame.dBm_AntNoise is not None: noise_measurements.append(radiotap_frame.dBm_AntNoise) frame = dpkt.radiotap.Radiotap(payload).data frame_type = frame.type frame_subtype = frame.subtype if frame_type == dpkt.ieee80211.MGMT_TYPE: mac = binary_to_mac(frame.mgmt.src) current_counter = station_counters[mac] current_counter.management_frame_count += 1 if frame_subtype == dpkt.ieee80211.M_BEACON: beacon = dot11.Dot11Beacon(frame.beacon.pack()) bssid = binary_to_mac(frame.mgmt.bssid) try: beacon_data = bssid_beacon_data[ bssid] = patched_network_stats(beacon) target_channel = beacon_data.get("channel") except: procedure_logger.exception( "Failed to decode network stats...") target_channel = None if hasattr(frame, 'ssid'): bssid_to_ssid_map[bssid] = frame.ssid.data bssid_infra_macs[bssid].add(mac) if target_channel is None or target_channel == channel: bssid_beacon_timing_payloads[bssid].append( (beacon.timestamp, beacon.beacon_interval)) if radiotap_frame.dBm_AntSignal is not None: bssid_to_power_map[bssid].append( radiotap_frame.dBm_AntSignal) else: procedure_logger.warning( "Off channel beacon ({0} vs {1}) seen for BSSID {2}" "".format(target_channel, channel, bssid)) if frame_subtype == dpkt.ieee80211.M_PROBE_RESP: if hasattr(frame, 'ssid'): bssid = binary_to_mac(frame.mgmt.bssid) bssid_to_ssid_map[bssid] = frame.ssid.data bssid_infra_macs[bssid].add(mac) if frame_subtype in (dpkt.ieee80211.M_ASSOC_REQ, dpkt.ieee80211.M_ASSOC_RESP): current_counter.association_frame_count += 1 if frame_subtype in (dpkt.ieee80211.M_REASSOC_REQ, dpkt.ieee80211.M_REASSOC_RESP): current_counter.reassociation_frame_count += 1 if frame_subtype == dpkt.ieee80211.M_DISASSOC: current_counter.disassociation_frame_count += 1 if frame_subtype == dpkt.ieee80211.M_ACTION: action_counter += 1 if frame_subtype == dpkt.ieee80211.M_PROBE_REQ: probe_req_counter += 1 if frame.retry: current_counter.retry_frame_count += 1 if radiotap_frame.dBm_AntSignal is not None: current_counter.power_measurements.append( radiotap_frame.dBm_AntSignal) if radiotap_frame.Rate is not None: current_counter.rate_measurements.append( radiotap_frame.Rate) if radiotap_frame.Flags.badFCS is not None: current_counter.failed_fcs_count += ( 1 if radiotap_frame.Flags.badFCS else 0) elif frame_type == dpkt.ieee80211.CTL_TYPE: include_in_extra_measurements = True if frame_subtype == dpkt.ieee80211.C_RTS: mac = binary_to_mac(frame.rts.src) current_counter = station_counters[mac] current_counter.cts_frame_count += 1 elif frame_subtype == dpkt.ieee80211.C_CTS: mac = binary_to_mac(frame.cts.dst) include_in_extra_measurements = False current_counter = station_counters[mac] current_counter.rts_frame_count += 1 elif frame_subtype == dpkt.ieee80211.C_ACK: mac = binary_to_mac(frame.ack.dst) include_in_extra_measurements = False current_counter = station_counters[mac] current_counter.ack_frame_count += 1 elif frame_subtype == dpkt.ieee80211.C_BLOCK_ACK: mac = binary_to_mac(frame.back.src) current_counter = station_counters[mac] elif frame_subtype == dpkt.ieee80211.C_BLOCK_ACK_REQ: mac = binary_to_mac(frame.bar.src) current_counter = station_counters[mac] elif frame_subtype == dpkt.ieee80211.C_CF_END: mac = binary_to_mac(frame.cf_end.src) current_counter = station_counters[mac] else: continue if frame.retry: current_counter.retry_frame_count += 1 if include_in_extra_measurements: current_counter.control_frame_count += 1 if radiotap_frame.dBm_AntSignal is not None: current_counter.power_measurements.append( radiotap_frame.dBm_AntSignal) if radiotap_frame.Rate is not None: current_counter.rate_measurements.append( radiotap_frame.Rate) if radiotap_frame.Flags.badFCS is not None: current_counter.failed_fcs_count += ( 1 if radiotap_frame.Flags.badFCS else 0) elif frame_type == dpkt.ieee80211.DATA_TYPE: src_mac = binary_to_mac(frame.data_frame.src) dst_mac = binary_to_mac(frame.data_frame.dst) if hasattr(frame.data_frame, 'bssid'): bssid = binary_to_mac(frame.data_frame.bssid) else: bssid = None current_counter = station_counters[src_mac] dst_current_counter = station_counters[dst_mac] if frame.to_ds and bssid: bssid_infra_macs[bssid].add(dst_mac) bssid_associated_macs[bssid].add(src_mac) elif frame.from_ds and bssid: bssid_infra_macs[bssid].add(src_mac) bssid_associated_macs[bssid].add(dst_mac) current_counter.data_throughput_out += len( frame.data_frame.data) dst_current_counter.data_throughput_in += len( frame.data_frame.data) current_counter.data_frame_count += 1 if frame.retry: current_counter.retry_frame_count += 1 if radiotap_frame.dBm_AntSignal is not None: current_counter.power_measurements.append( radiotap_frame.dBm_AntSignal) if radiotap_frame.Rate is not None: current_counter.rate_measurements.append( radiotap_frame.Rate) if radiotap_frame.Flags.badFCS is not None: current_counter.failed_fcs_count += ( 1 if radiotap_frame.Flags.badFCS else 0) else: pass except dpkt.dpkt.UnpackError: logging.warning( "dpkt lacks support for some IE80211 features. This could be causing spurious decode problems.", exc_info=True) weird_frame_count += 1 header, payload = pcap_offline_dev.next() pcap_offline_dev.close() measurement = Measurement.new( start_time, end_time, sample_seconds, channel, noise_measurements, data_counters=sum_data_counters(station_counters.values()), extra_data={'weird_frame_count': weird_frame_count}) stations = [Station.new(mac_addr) for mac_addr in station_counters.keys()] service_sets = [ ServiceSet.new(bssid, network_name=bssid_to_ssid_map.get(bssid), extra_data=bssid_beacon_data.get(bssid, {})) for bssid in set(bssid_infra_macs.keys()).union( set(bssid_associated_macs.keys())) ] for service_set in service_sets: jitter, bad_intervals, intervals = calculate_beacon_jitter( bssid_beacon_timing_payloads.get(service_set.bssid), service_set.bssid) if jitter is not None: bssid_to_jitter_map[service_set.bssid] = (jitter, bad_intervals, intervals) procedure_logger.info("-----------------") procedure_logger.info("Analysis performed on channel: {0}".format(channel)) procedure_logger.info("Noise Level: {0} +/- {1} dBm".format( measurement.average_noise, measurement.std_dev_noise)) procedure_logger.info("Top level result:\n{0}".format( repr(measurement.data_counters))) procedure_logger.info("Action Frames: {0}".format(action_counter)) procedure_logger.info( "Probe Request Frames: {0}".format(probe_req_counter)) if service_sets: procedure_logger.info("Service Sets seen:") for service_set in service_sets: jitter, bad_intervals, intervals = bssid_to_jitter_map.get( service_set.bssid, (None, None, None)) procedure_logger.info("-- {0} ({1})".format( service_set.bssid, service_set.network_name)) if bad_intervals: procedure_logger.info("---- Changing intervals detected!!!") procedure_logger.info( "---- Intervals Seen: {0}".format(intervals)) if jitter: procedure_logger.info( "---- Avg +/- StdDev Beacon Jitter: {0} +/- {1} (ms)". format( altered_mean(jitter) / 1000.0, altered_stddev(jitter) / 1000.0)) procedure_logger.info( "---- Min/Max Beacon Jitter: {0}/{1} (ms)".format( min(jitter) / 1000.0, max(jitter) / 1000.0)) procedure_logger.info("---- Jitter Count: {0}".format( len(jitter))) procedure_logger.info("{0} unique stations seen.".format(len(stations))) procedure_logger.info("-----------------") return { 'measurement': measurement, 'stations': stations, 'service_sets': service_sets, 'station_counters': station_counters, 'bssid_associated_macs': bssid_associated_macs, 'bssid_infra_macs': bssid_infra_macs, 'bssid_to_ssid_map': bssid_to_ssid_map, 'bssid_to_jitter_map': bssid_to_jitter_map, 'bssid_to_power_map': bssid_to_power_map }