def test_service_set_crud(self): new_service_set = ServiceSet.new("00:A0:C9:00:00:00", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) with transaction_wrapper(self.connection) as t: new_service_set.service_set_id = insert_service_set( t, new_service_set) self.assert_service_sets_equal( new_service_set, select_service_set_by_id(self.connection, new_service_set.service_set_id)) self.assert_service_sets_equal( new_service_set, select_service_set_by_bssid(self.connection, new_service_set.bssid)) service_sets = select_all_service_sets(self.connection) assert_that(service_sets).is_length(1) self.assert_service_sets_equal(new_service_set, service_sets[0]) assert_that(new_service_set.to_api_response()).is_instance_of(dict) with transaction_wrapper(self.connection) as t: update_service_set_network_name(t, "00:A0:C9:00:00:00", "other wireless") assert_that(select_service_set_by_id(self.connection, new_service_set.service_set_id).network_name)\ .is_equal_to("other wireless")
def test_kv_functionality(self): assert_that(kv_store_get_all(self.connection)).is_empty() assert_that(kv_store_get_prefix(self.connection, "")).is_empty() with transaction_wrapper(self.connection) as t: kv_store_set(t, "foo/foo", 1) kv_store_set(t, "foo/bar", 2) kv_store_set(t, "bar/bar", 3) assert_that(kv_store_get_all(self.connection)).is_length(3).contains( ("foo/foo", 1), ("foo/bar", 2), ("bar/bar", 3)) assert_that(kv_store_get_prefix(self.connection, "")).is_length(3).contains( ("foo/foo", 1), ("foo/bar", 2), ("bar/bar", 3)) assert_that(kv_store_get_prefix(self.connection, "foo")).is_length(2).does_not_contain( ("bar/bar", 3)) assert_that(kv_store_get(self.connection, "foo/foo")).is_equal_to(1) assert_that(kv_store_get(self.connection, "wat/wat", "default")).is_equal_to("default") with transaction_wrapper(self.connection) as t: kv_store_del(t, "foo/foo") assert_that(kv_store_get_all(self.connection)).is_length(2) assert_that(kv_store_get(self.connection, "foo/foo")).is_none()
def test_measurement_crud(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], extra_data={"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) assert_that( new_measurement.measurement_id).is_not_none().is_instance_of(int) self.assert_measurements_equal( new_measurement, select_measurement_by_id(self.connection, new_measurement.measurement_id)) measurements = select_all_measurements(self.connection, limit=500, offset=0) assert_that(measurements).is_length(1) self.assert_measurements_equal(new_measurement, measurements[0]) new_measurement_2 = Measurement.new(3.0, 4.0, 0.8, 2, [], extra_data={"baz": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement_2.measurement_id = insert_measurement( t, new_measurement_2) measurements = select_all_measurements(self.connection, limit=500, offset=0) assert_that(measurements).is_length(2) self.assert_measurements_equal( new_measurement_2, select_measurement_by_id(self.connection, new_measurement_2.measurement_id)) assert_that(new_measurement_2.to_api_response()).is_instance_of(dict) with transaction_wrapper(self.connection) as t: count = delete_old_measurements(t, 0) assert_that(count).is_equal_to(2) assert_that( select_all_measurements(self.connection, limit=500, offset=0)).is_empty()
def run_janitor(database_location, log_file, verbose, db_timeout_seconds=60, measurement_max_age_days=14, do_vacuum=False, do_optimize=False): try: setup_logging(log_file, verbose) db_conn = create_connection(database_location, db_timeout_seconds) write_schema(db_conn) with transaction_wrapper(db_conn) as t: kv_store_set(t, "janitor/script_start_time", time.time()) kv_store_set(t, 'janitor/script_pid', os.getpid()) procedure_logger.info("Sarting Janitorial tasks...") clean_db(db_conn, measurement_max_age_days, do_vacuum, do_optimize) procedure_logger.info("Database janitorial tasks finished") except BaseException: procedure_logger.exception( "Unhandled exception during upload! Aborting,...") raise else: procedure_logger.info("Janitor completed successfully. Ending...")
def run_upload(database_location, node_id, remote_api_base_url, api_key, log_file, verbose, db_timeout_seconds=60, batch_size=2, round_delay=3): try: setup_logging(log_file, verbose) db_conn = create_connection(database_location, db_timeout_seconds) write_schema(db_conn) with transaction_wrapper(db_conn) as t: kv_store_set(t, "upload/script_start_time", time.time()) kv_store_set(t, 'upload/script_pid', os.getpid()) kv_store_set(t, "upload/remote_url", remote_api_base_url) more_work_to_do = True while more_work_to_do: procedure_logger.info("Pulling and uploading...") more_work_to_do = pull_and_upload_measurements( db_conn, remote_api_base_url, node_id, api_key, batch_size) procedure_logger.info("Snooze {0}".format(round_delay)) time.sleep(round_delay) except BaseException: procedure_logger.exception( "Unhandled exception during upload! Aborting,...") raise else: procedure_logger.info("Upload completed successfully. Ending...")
def test_measurement_station_map(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], {"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) new_station = Station.new("01:02:03:04:05:06", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_2 = Station.new("01:02:03:04:05:07", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) with transaction_wrapper(self.connection) as t: new_station.station_id = insert_station(t, new_station) new_station_2.station_id = insert_station(t, new_station_2) my_counter = DataCounters(9, 2, 3, 4, 9, 3, 3, 3, 10, 2000, 1500, 1, power_measurements=[1.0, 2.0, 3.0], rate_measurements=[1, 2, 1, 4]) with transaction_wrapper(self.connection) as t: insert_measurement_station(t, new_measurement.measurement_id, new_station.station_id, DataCounters.zero()) insert_measurement_station(t, new_measurement.measurement_id, new_station_2.station_id, my_counter) stations = select_stations_for_measurement( self.connection, new_measurement.measurement_id) assert_that(stations).is_length(2) self.assert_stations_equal(new_station, stations[0]) self.assert_data_counters_equal(DataCounters.zero(), stations[0].data_counters) self.assert_stations_equal(new_station_2, stations[1]) self.assert_data_counters_equal(my_counter, stations[1].data_counters)
def test_measurement_service_set(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], extra_data={"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) new_service_set = ServiceSet.new("00:01:00:00:01:00", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) new_service_set2 = ServiceSet.new("00:01:00:00:01:01", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) new_station = Station.new("00:02:00:00:02:00", {}) new_station2 = Station.new("00:02:00:00:02:01", {}) with transaction_wrapper(self.connection) as t: new_service_set.service_set_id = insert_service_set( t, new_service_set) new_service_set2.service_set_id = insert_service_set( t, new_service_set2) new_station.station_id = insert_station(t, new_station) new_station2.station_id = insert_station(t, new_station2) with transaction_wrapper(self.connection) as t: insert_service_set_associated_station( t, new_measurement.measurement_id, new_service_set.bssid, new_station.mac_address) insert_service_set_associated_station( t, new_measurement.measurement_id, new_service_set2.bssid, new_station2.mac_address) service_sets = select_service_sets_for_measurement( self.connection, new_measurement.measurement_id) assert_that(service_sets).is_length(2) for ss in service_sets: if ss.service_set_id == new_service_set.service_set_id: self.assert_service_sets_equal(ss, new_service_set) elif ss.service_set_id == new_service_set2.service_set_id: self.assert_service_sets_equal(ss, new_service_set2) else: assert False
def test_upload_related_queries(self): new_measurement = Measurement.new(1.0, 2.0, 0.9, 1, [], extra_data={"foo": "bar"}) with transaction_wrapper(self.connection) as t: new_measurement.measurement_id = insert_measurement( t, new_measurement) m = select_measurements_that_need_upload(self.connection, 100) assert_that(m).is_length(1) self.assert_measurements_equal(m[0], new_measurement) with transaction_wrapper(self.connection) as t: update_measurements_upload_status(t, [new_measurement.measurement_id], True) assert_that(select_measurements_that_need_upload(self.connection, 100)).is_empty() with transaction_wrapper(self.connection) as t: ssid = insert_service_set( t, ServiceSet.new("00:00:00:01:01:01", "test")) sid1 = insert_station(t, Station.new("01:02:03:04:05:06")) sid2 = insert_station(t, Station.new("01:02:03:04:05:07")) insert_service_set_infrastructure_station( t, new_measurement.measurement_id, "00:00:00:01:01:01", "01:02:03:04:05:06") insert_service_set_associated_station( t, new_measurement.measurement_id, "00:00:00:01:01:01", "01:02:03:04:05:07") assert_that( select_infrastructure_mac_addresses_for_measurement_service_set( self.connection, new_measurement.measurement_id, ssid)).is_length(1).contains("01:02:03:04:05:06") assert_that( select_associated_mac_addresses_for_measurement_service_set( self.connection, new_measurement.measurement_id, ssid)).is_length(1).contains("01:02:03:04:05:07")
def write_offline_analysis_to_database(db_conn, analysis_data): measurement = analysis_data['measurement'] stations = analysis_data['stations'] service_sets = analysis_data['service_sets'] station_counters = analysis_data['station_counters'] bssid_associated_macs = analysis_data['bssid_associated_macs'] bssid_infra_macs = analysis_data['bssid_infra_macs'] bssid_to_ssid_map = analysis_data['bssid_to_ssid_map'] bssid_to_jitter_map = analysis_data['bssid_to_jitter_map'] bssid_to_power_map = analysis_data['bssid_to_power_map'] with transaction_wrapper(db_conn) as t: measurement.measurement_id = insert_measurement(t, measurement) for station in stations: opt_station = select_station_by_mac_address(t, station.mac_address) if opt_station: station.station_id = opt_station.station_id else: station.station_id = insert_station(t, station) insert_measurement_station(t, measurement.measurement_id, station.station_id, station_counters[station.mac_address]) for service_set in service_sets: opt_service_set = select_service_set_by_bssid(t, service_set.bssid) if opt_service_set: service_set.service_set_id = opt_service_set.service_set_id else: service_set.service_set_id = insert_service_set(t, service_set) if service_set.bssid in bssid_to_jitter_map: jitter, bad_intervals, intervals = bssid_to_jitter_map[ service_set.bssid] insert_jitter_measurement( t, ServiceSetJitterMeasurement.new( measurement.measurement_id, service_set.service_set_id, jitter, intervals[0], { 'bad_intervals': bad_intervals, 'average_power': altered_mean( bssid_to_power_map.get(service_set.bssid, [])) })) for bssid, infra_macs in bssid_infra_macs.items(): for mac in infra_macs: insert_service_set_infrastructure_station( t, measurement.measurement_id, bssid, mac) for bssid, associated_macs in bssid_associated_macs.items(): for mac in associated_macs: insert_service_set_associated_station( t, measurement.measurement_id, bssid, mac) for bssid, ssid in bssid_to_ssid_map.items(): update_service_set_network_name(t, bssid, ssid) optimize_db(db_conn)
def clean_db(db_connection, measuement_max_age_days, do_vacuum=False, do_optimize=False): with transaction_wrapper(db_connection) as t: deleted_count = delete_old_measurements(t, measuement_max_age_days) procedure_logger.info( "{0} old measurements deleted from the database".format( deleted_count)) if do_optimize: procedure_logger.info("Beginning DB optimize...") optimize_db(db_connection) procedure_logger.info("DB optimize completed.") if do_vacuum: procedure_logger.info("Beginning DB vacuum...") vacuum_db(db_connection) procedure_logger.info("DB Vacuum completed.") return
def test_station_crud(self): new_station = Station.new("01:02:03:04:05:06", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) with transaction_wrapper(self.connection) as t: new_station.station_id = insert_station(t, new_station) self.assert_stations_equal( new_station, select_station_by_id(self.connection, new_station.station_id)) self.assert_stations_equal( new_station, select_station_by_mac_address(self.connection, new_station.mac_address)) stations = select_all_stations(self.connection, limit=500) assert_that(stations).is_length(1) self.assert_stations_equal(new_station, stations[0]) assert_that(new_station.to_api_response()).is_instance_of(dict)
def test_station_service_set_linking(self): new_service_set = ServiceSet.new("00:A0:C9:00:00:00", "CU Boulder Wireless", {"baz": ["foo", "bar"]}) new_service_set2 = ServiceSet.new("00:A1:C9:01:02:03", "CU Guest Wireless", {}) new_service_set3 = ServiceSet.new("00:A0:C9:00:00:01", "CU Boulder Wireless", {"foo": "bar"}) new_station = Station.new("01:02:03:04:05:06", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_2 = Station.new("01:02:03:04:05:07", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_3 = Station.new("01:02:03:04:05:08", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) new_station_4 = Station.new("01:02:03:04:05:09", { "foo": [1, 2, 3], "bar": [4, 5, 6] }) with transaction_wrapper(self.connection) as t: measurement_id = insert_measurement( t, Measurement.new( 0, 0, 0, 1, [], False, )) new_service_set.service_set_id = insert_service_set( t, new_service_set) new_service_set2.service_set_id = insert_service_set( t, new_service_set2) new_service_set3.service_set_id = insert_service_set( t, new_service_set3) new_station.station_id = insert_station(t, new_station) new_station_2.station_id = insert_station(t, new_station_2) new_station_3.station_id = insert_station(t, new_station_3) new_station_4.station_id = insert_station(t, new_station_4) insert_service_set_infrastructure_station(t, measurement_id, new_service_set.bssid, new_station.mac_address) insert_service_set_infrastructure_station( t, measurement_id, new_service_set.bssid, new_station_3.mac_address) insert_service_set_associated_station(t, measurement_id, new_service_set.bssid, new_station_2.mac_address) infra_stations = select_infrastructure_stations_for_service_set( self.connection, new_service_set.service_set_id) assert_that(infra_stations).is_length(2) self.assert_stations_equal(new_station, [ i for i in infra_stations if i.station_id == new_station.station_id ][0]) associated_stations = select_associated_stations_for_service_set( self.connection, new_service_set.service_set_id) assert_that(associated_stations).is_length(1) self.assert_stations_equal(new_station_2, associated_stations[0])
def pull_and_upload_measurements(db_connection, remote_api_base_url, node_id, api_key, batch_size): with transaction_wrapper(db_connection) as t: target_measurements = select_measurements_that_need_upload( t, batch_size) for measurement in target_measurements: procedure_logger.info( "Pulling stations and service sets info for measurement {0}". format(measurement.measurement_id)) stations = select_stations_for_measurement( t, measurement.measurement_id) service_sets = select_service_sets_for_measurement( t, measurement.measurement_id) infra_macs_map = {} associated_macs_map = {} jitter_measurements = select_jitter_measurements_by_measurement_id( t, measurement.measurement_id) jitter_measurement_map = {} for ss in service_sets: infra_macs_map[ ss. service_set_id] = select_infrastructure_mac_addresses_for_measurement_service_set( t, measurement.measurement_id, ss.service_set_id) associated_macs_map[ ss. service_set_id] = select_associated_mac_addresses_for_measurement_service_set( t, measurement.measurement_id, ss.service_set_id) for j in jitter_measurements: jitter_measurement_map[j.service_set_id] = j bssid_to_network_name_map = { ss.bssid: ss.nice_network_name for ss in service_sets if ss.nice_network_name } procedure_logger.info( "Attempting to do data upload for measurement {0}".format( measurement.measurement_id)) upload_data = measurement.to_api_upload_payload( [s.to_api_upload_payload() for s in stations], [ ss.to_api_upload_payload( infra_macs_map[ss.service_set_id], associated_macs_map[ss.service_set_id], jitter_measurement_map.get(ss.service_set_id)) for ss in service_sets ], bssid_to_network_name_map) response = requests.post(urljoin( remote_api_base_url, '/api/1.0/nodes/{nid}/measurements'.format(nid=node_id)), data=json_dumps(upload_data), headers={ 'Content-Type': 'application/json', 'X-API-Key': api_key }) try: import pprint pprint.pprint(response.json()) except: pass response.raise_for_status() procedure_logger.info( "Info on uploaded measurement {0}: {0}".format( measurement.measurement_id, response.json())) update_measurements_upload_status( t, [m.measurement_id for m in target_measurements], True) return bool(target_measurements)
def run_capture(wireless_interface, log_file, tmp_dir, database_loc, verbose=False, sample_seconds=10, rounds=0, ignore_non_root=False, db_timeout_seconds=60, heartbeat_func=lambda: None, run_with_monitor=True): setup_logging(log_file, verbose) if run_with_monitor: return run_monitored(run_capture, always_restart=False)(wireless_interface, log_file, tmp_dir, database_loc, verbose, sample_seconds, rounds, ignore_non_root, db_timeout_seconds, run_with_monitor=False) try: heartbeat_func() effective_user_id = os.geteuid() if effective_user_id != 0 and ignore_non_root: procedure_logger.warning( "Not running as root, attempting to proceed...") elif effective_user_id != 0: raise OSError( "This script requires root-level permissions to run. " "Please either run as superuser or use the --ignore-non-root flag." ) run_forever = rounds == 0 db_conn = create_connection(database_loc, db_timeout_seconds) write_schema(db_conn) with transaction_wrapper(db_conn) as t: kv_store_set(t, "capture/script_start_time", time.time()) kv_store_set(t, 'capture/script_pid', os.getpid()) kv_store_set(t, "capture/interface", wireless_interface) kv_store_set(t, "capture/sample_seconds", sample_seconds) card = setup_capture_card(wireless_interface) if not os.path.exists(tmp_dir): procedure_logger.warning( "Tmp dir {0} does not exist. Creating...".format(tmp_dir)) os.makedirs(tmp_dir) procedure_logger.info("Beginning channel scan.") heartbeat_func() current_round = 0 while run_forever or rounds > 0: heartbeat_func() procedure_logger.info( "Executing capture round {0}".format(current_round)) with transaction_wrapper(db_conn) as t: kv_store_set(t, "capture/current_script_round", current_round) for channel in range(1, 12): heartbeat_func() procedure_logger.info( "Changing to channel {0}".format(channel)) pyw.down(card) pyw.up(card) pyw.chset(card, channel, None) procedure_logger.info("Opening the pcap driver...") capture_file = os.path.join( tmp_dir, "channel{0}-{1}.pcap".format(channel, time.time())) try: procedure_logger.info("Beginning live capture...") start_time, end_time, duration = run_live_capture( wireless_interface, capture_file, sample_seconds) procedure_logger.info("Starting offline analysis...") data = run_offline_analysis(capture_file, start_time, end_time, duration, channel) procedure_logger.info( "Writing analysis data to database...") write_offline_analysis_to_database(db_conn, data) procedure_logger.info("Data written...") finally: procedure_logger.info("Cleaning up capture file..") if os.path.exists(capture_file): os.unlink(capture_file) if not run_forever: rounds -= 1 current_round += 1 except BaseException: procedure_logger.exception( "Unhandled exception during capture! Aborting,...") raise else: procedure_logger.info("No more data. Ending...")