def visualize(): # step 0: parse the request data myinput = Input.from_json_dict( request.get_json()) # get_json() return a dict myinput.train_percent = train.train_percent # step 1: set up sensor data try: sensor_data = myinput.sensor_data sensor_outputs = np.zeros(len(sensor_data)) for hostname, rss in sensor_data.items(): index = server_support.get_index(hostname) sensor_outputs[index] = rss except Exception as e: print(e) print('most probability a few sensors did not send its data') print(sensor_data) print(hostname, index) return 'Bad Request' # step 2.1: set up ground truth ground_truth = myinput.ground_truth true_locations, true_powers, intruders = server_support.parse_ground_truth( ground_truth, ll_ss) visualize_all_transmitters( Default.grid_len, true_locations, authorized.primaries, authorized.secondaries[int(myinput.experiment_num)], myinput.experiment_num)
def get_index_from_log(log: str): '''for repeating experiments ''' index = [] with open(log, 'r') as f: for line in f: try: myinput = Input.from_json_str(line) index.append(myinput.image_index) except: pass return index
def main1(error_output_file, localization_output_dir, granularity, inter_methods): '''this is for 40 x 40 large grid, evaluation usage ''' f_error = open(error_output_file, 'a') #step 0: arguments dir_full = 'output8' full_grid_len = 40 coarse_gran = granularity inter_methods = inter_methods for method in inter_methods: # step 1: interpolate if method == 'idw': interpolate_func = IpsnInterpolate.idw ildw_dist = -1 elif method == 'ildw': interpolate_func = IpsnInterpolate.ildw ildw_dist = IpsnInterpolate.ILDW_DIST else: raise Exception('method not valid') ipsnInter = IpsnInterpolate(dir_full, full_grid_len) inter_data = ipsnInter.interpolate(coarse_gran, interpolate_func) # step 2: compute error dist_close = [8] dist_far = [32] for d_c, d_f in zip(dist_close, dist_far): myinput = Input(dir_full, full_grid_len, coarse_gran, inter_methods, ildw_dist, d_c, d_f) myoutput = ipsnInter.compute_errors(inter_data, d_c, d_f) myoutput.method = method f_error.write(myinput.log()) f_error.write(myoutput.log()) f_error.write('\n') # step 3: save localization input to file if method == 'ildw': ipsnInter.save_for_localization(inter_data, localization_output_dir) f_error.close()
def main0(error_output_file, localization_output_dir, granularity, inter_methods): '''this is for 10 x 10 small grid, debugging usage ''' f_error = open(error_output_file, 'a') #step 0: arguments dir_full = 'output7' full_grid_len = 10 coarse_gran = 4 inter_methods = inter_methods dist_close = 3 dist_far = 7 myinput = Input(dir_full, full_grid_len, coarse_gran, inter_methods, dist_close, dist_far) for method in inter_methods: # step 1: interpolate if method == 'idw': interpolate_func = IpsnInterpolate.idw elif method == 'ildw': interpolate_func = IpsnInterpolate.ildw else: raise Exception('method not valid') ipsnInter = IpsnInterpolate(dir_full, full_grid_len) inter_data = ipsnInter.interpolate(coarse_gran, interpolate_func) # step 2: compute error myoutput = ipsnInter.compute_errors(inter_data, dist_close, dist_far) myoutput.method = method f_error.write(myinput.log()) f_error.write(myoutput.log()) f_error.write('\n') # step 3: save localization input to file if method == 'ildw': ipsnInter.save_for_localization(inter_data, localization_output_dir) f_error.close()
def read_data(logs): '''Neglect Splot's Output''' data = [] # an element is: Input -> Output for log in logs: f = open(log, 'r') while True: inputline = f.readline() if inputline == '': break myinput = Input.from_json_str(inputline) outputline = f.readline() while outputline != '' and outputline != '\n': output = Output.from_json_str(outputline) if output.method == 'our': # only our method can predict power data.append((myinput, output)) outputline = f.readline() return data
def add_num_authorized_to_input(file1, file2): aut = file1[file1.rfind('-') + 1:] aut = int(aut) file1 = open(file1, 'r') file2 = open(file2, 'w') while True: inputline = file1.readline() if inputline == '': break myinput = Input.from_json_str(inputline) myinput.num_authorized = aut file2.write(myinput.log()) outputline = file1.readline() while outputline != '' and outputline != '\n': output = Output.from_json_str(outputline) file2.write(output.log()) outputline = file1.readline() file2.write('\n')
parser.add_argument( '-p', '--port', type=int, nargs=1, default=[5000], help='different port of the server holds different data') args = parser.parse_args() experimemts = args.exp_number data_source = args.data_source[0] methods = args.methods sensor_density = args.sen_density[0] port = args.port[0] myinput = Input(data_source=data_source, methods=methods) sensor_input_dataset = mydnn_util.SensorInputDatasetTranslation( root_dir=data_source, transform=mydnn_util.tf) total = sensor_input_dataset.__len__() myrange = range(experimemts[0], experimemts[1]) random.seed(1) index = random.sample(range(total), len(myrange)) # index = get_index_from_log('result/11.14/log') print('caitao', len(myrange), len(index)) for i, idx in zip(myrange, index): print(i, idx) myinput.experiment_num = i myinput.image_index = idx myinput.num_intruder = sensor_input_dataset[idx]['target_num'] myinput.sensor_density = get_sen_num(
def localize_ss(): '''ss stands for shared spectrum ''' # step 0: parse the request data myinput = Input.from_json_dict( request.get_json()) # get_json() return a dict myinput.train_percent = train.train_percent # step 1: set up sensor data try: sensor_data = myinput.sensor_data sensor_outputs = np.zeros(len(sensor_data)) for hostname, rss in sensor_data.items(): index = server_support.get_index(hostname) sensor_outputs[index] = rss except Exception as e: print(e) print('most probability a few sensors did not send its data') print(sensor_data) print(hostname, index) return 'Bad Request' # step 2.1: set up ground truth ground_truth = myinput.ground_truth true_locations, true_powers, intruders = server_support.parse_ground_truth( ground_truth, ll_ss) # step 3: do the localization print('\n\n****\n\nNumber =', myinput.experiment_num) outputs = [] if 'our-ss' in myinput.methods: start = time.time() # step 2.2: update the hypothesis data by adding the secondaries ll_ss.add_secondary2(authorized, myinput.experiment_num) pred_locations, pred_power = ll_ss.our_localization( np.copy(sensor_outputs), intruders, myinput.experiment_num) end = time.time() pred_locations = server_support.pred_loc_to_center(pred_locations) visualize_localization(40, true_locations, pred_locations, myinput.experiment_num) all_authorized = authorized.primaries + authorized.secondaries[int( myinput.experiment_num)] print('\nAuthorized are:', all_authorized) errors, miss, false_alarm, power_errors = ll_ss.compute_error( true_locations, true_powers, pred_locations, pred_power) outputs.append( Output('our-ss', errors, false_alarm, miss, power_errors, end - start, pred_locations)) if 'our' in myinput.methods: start = time.time() pred_locations, pred_power = ll.our_localization( np.copy(sensor_outputs), intruders, myinput.experiment_num) end = time.time() pred_locations = server_support.pred_loc_to_center(pred_locations) visualize_localization(40, true_locations, pred_locations, str(myinput.experiment_num) + '-') # deduct the authorized users ll.remove_authorized_users(pred_locations, pred_power, authorized, myinput.experiment_num) errors, miss, false_alarm, power_errors = ll.compute_error( true_locations, true_powers, pred_locations, pred_power) outputs.append( Output('our', errors, false_alarm, miss, power_errors, end - start, pred_locations)) # step 4: log the input and output server_support.log(myinput, outputs) return 'Hello world'
def localize(): '''process the POST request ''' # step 0: parse the request data myinput = Input.from_json_dict( request.get_json()) # get_json() return a dict myinput.train_percent = train.train_percent # step 1: set up sensor data try: sensor_data = myinput.sensor_data sensor_outputs = np.zeros(len(sensor_data)) for hostname, rss in sensor_data.items(): index = server_support.get_index(hostname) sensor_outputs[index] = rss except Exception as e: print(e) print('most probability a few sensors did not send its data') print(sensor_data) print(hostname, index) return 'Bad Request' # step 2: set up ground truth ground_truth = myinput.ground_truth true_locations, true_powers, intruders = server_support.parse_ground_truth( ground_truth, ll) # step 3: do the localization print('\n****\nNumber =', myinput.experiment_num) outputs = [] if 'our' in myinput.methods: start = time.time() pred_locations, pred_power = ll.our_localization( np.copy(sensor_outputs), intruders, myinput.experiment_num) end = time.time() pred_locations = server_support.pred_loc_to_center(pred_locations) visualize_localization(40, true_locations, pred_locations, myinput.experiment_num) errors, miss, false_alarm, power_errors = ll.compute_error( true_locations, true_powers, pred_locations, pred_power) outputs.append( Output('our', errors, false_alarm, miss, power_errors, end - start, pred_locations)) if 'splot' in myinput.methods: start = time.time() pred_locations = ll.splot_localization(np.copy(sensor_outputs), intruders, myinput.experiment_num) end = time.time() pred_locations = server_support.pred_loc_to_center(pred_locations) errors, miss, false_alarm = ll.compute_error2(true_locations, pred_locations) outputs.append( Output('splot', errors, false_alarm, miss, [0], end - start, pred_locations)) if 'cluster' in myinput.methods: start = time.time() pred_locations = ll.cluster_localization_range( intruders, np.copy(sensor_outputs), num_of_intruders=int(myinput.num_intruder)) end = time.time() pred_locations = server_support.pred_loc_to_center(pred_locations) errors, miss, false_alarm = ll.compute_error2(true_locations, pred_locations) outputs.append( Output('cluster', errors, false_alarm, miss, [0], end - start, pred_locations)) # step 4: log the input and output server_support.log(myinput, outputs) return 'Hello world'
help='Different port of the server holds different data') args = parser.parse_args() training_gran = args.training_gran[0] num_intruder = args.num_intruder[0] num_authorized = args.num_authorized[0] sen_density = args.sen_density[0] methods = args.methods repeat = args.repeat port = args.port[0] # Client.test_server(Default.server_ip, port) myinput = Input(num_intruder=num_intruder, data_source='splat', methods=methods, sen_density=sen_density, num_authorized=num_authorized) # initialize a Localization object with the ground truth, use it to generate read data ll = Localization(grid_len=40, case='splat', debug=False) true_data_path = Default.true_data_path.format(sen_density) cov_file = true_data_path + '/cov' sensor_file = true_data_path + '/sensors' hypothesis_file = true_data_path + '/hypothesis' print('client side true data: \n{}\n{}\n{}\n'.format( cov_file, sensor_file, hypothesis_file)) ll.init_data(cov_file, sensor_file, hypothesis_file) ll.init_truehypo(hypothesis_file) authorized = Authorized(grid_len=Default.grid_len,
from input_output import IOUtility, Input, Output from server import Server logs = ['result/11.19/log-differentsensor'] output_dir = 'result/11.19' output_file = 'log-differentsensor2' server = Server(output_dir, output_file) f = open(logs[0], 'r') while True: line = f.readline() if line == '': # EOF break myinput = Input.from_json_str(line) outputs = [] i = 1 line = f.readline() while line != '' and line != '\n': output = Output.from_json_str(line) if i == 2: output.method = 'dl2' i += 1 outputs.append(output) line = f.readline() server.log(myinput, outputs)