max_rssi = np.max(row) if cfg.min_rssi <= max_rssi <= cfg.max_rssi: new_dataset.append(row + [angle]) csvfile.close() dataset[cal_index] = np.array(new_dataset) dataset_angle[cal_index] = dataset[cal_index][:, -1] """ Create test data, one for each AP """ test_data[cal_index] = fn.arrange_data(dataset[cal_index][:, :-1]) dataseta_for_sd = pd.DataFrame(dataset[cal_index][:, :-1], columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3']) # filter noise below -10 dB test_data[cal_index] = fn.noise_filter(np.array(test_data[cal_index])) # read database from file test_data[cal_index] = pd.DataFrame(test_data[cal_index], columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3', 'V_m_H']) dataset_angle[cal_index] = pd.DataFrame(dataset_angle[cal_index], columns=['Angle']) # filter angles out of range valid_angle = (cfg.min_angle <= dataset_angle[cal_index]['Angle']) & \ (cfg.max_angle >= dataset_angle[cal_index]['Angle']) test_data[cal_index] = test_data[cal_index].loc[valid_angle] dataset_angle[cal_index] = dataset_angle[cal_index].loc[valid_angle] dataset_angle[cal_index] = np.array(dataset_angle[cal_index]).ravel()
def minimize_time_delay(dt): """ Get experiment data """ ap = pd.read_csv(fname, index_col=0, names=['MAC', 'Time', 'RSSIs', 'channel']) ap = ap.loc[ap.index == cfg_exp.mac] ap['Time'] += dt # ap = ap.loc[ap['Time'] >= track.track_time[i][0]] # ap = ap.loc[ap['Time'] <= track.track_time[i][-1]] ap_rssis = list(ap['RSSIs']) for k in range(len(ap_rssis)): ap_rssis[k] = parse_rssi(ap_rssis[k]) del ap['RSSIs'] if ap_rssis: ap_rssis = pd.DataFrame(ap_rssis, columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3'], index=ap.index) ap = pd.concat([ap, ap_rssis], axis=1) ap_rssis = np.array(ap_rssis) ap_arranged = fn.arrange_data(ap_rssis) # Arranging model data ap_max = np.apply_along_axis(np.max, 1, ap_rssis) # conditions not_sat_power = ap_max <= config.max_rssi not_low_power = ap_max >= config.min_rssi not_erroneous = ap_arranged[:, 8] > -10 ap_arranged = pd.DataFrame(ap_arranged, columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3', 'VmH'], index=ap.index) ap_arranged_filtered = ap_arranged.loc[not_sat_power & not_low_power & not_erroneous] ap_arranged_filtered = np.array(ap_arranged_filtered) ap_arranged_filtered = fn.noise_filter(ap_arranged_filtered) # Filtered ap_pred_times[i][j] = np.array(ap['Time'].loc[not_sat_power & not_low_power & not_erroneous]) # Predicting basic model result if ap_arranged_filtered.shape[0]: ap_pred[i][j] = cal.rfc[cur_ap_i].predict(ap_arranged_filtered) ap_pred[i][j] = ap_pred[i][j].reshape((ap_pred[i][j].shape[0], 1)) # # plot predictions # plt.plot(ap_pred_times[i][j], ap_pred[i][j], 'go') # plt.show() """ amalgamating predictions for each time frame """ # not valid prediction are saved as 100 ap_timed_pred, ap_timed_sd = timed_predictions(ap_pred[i][j], ap_pred_times[i][j], track.track_time_int[i]) # remove NaNs ap_timed_pred = fn.remove_nan(ap_timed_pred) # plt.plot(track.track_time_int[i], track.doa_true[0][:, j], 'r', # track.track_time_int[i], ap_timed_pred, 'go') # plt.show() rsme = np.sqrt(np.sum((track.doa_true[0][:, j].reshape(ap_timed_pred.shape) - ap_timed_pred) ** 2) / ap_timed_pred.shape[0]) # print 'AP: ', j, 'RSME is: ', rsme return rsme
ap_arranged = fn.arrange_data(ap_rssis) # Arranging model data ap_max = np.apply_along_axis(np.max, 1, ap_rssis) # conditions not_sat_power = ap_max <= config.max_rssi not_low_power = ap_max >= config.min_rssi not_erroneous = ap_arranged[:, 8] > -10 ap_arranged = pd.DataFrame(ap_arranged, columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3', 'VmH'], index=ap.index) ap_arranged_filtered = ap_arranged.loc[not_sat_power & not_low_power & not_erroneous] ap_arranged_filtered = np.array(ap_arranged_filtered) ap_arranged_filtered = fn.noise_filter(ap_arranged_filtered) # Filtered ap_pred_times[i][j] = np.array(ap['Time'].loc[not_sat_power & not_low_power & not_erroneous]) # Predicting basic model result if ap_arranged_filtered.shape[0]: ap_pred[i][j] = cal.rfc[cur_ap_i].predict(ap_arranged_filtered) ap_pred[i][j] = ap_pred[i][j].reshape((ap_pred[i][j].shape[0], 1)) # # plot predictions # plt.plot(ap_pred_times[i][j], ap_pred[i][j], 'go') # plt.show() """ amalgamating predictions for each time frame """
max_rssi = np.max(row) if cfg.min_rssi <= max_rssi <= cfg.max_rssi: new_dataset.append(row + [angle]) csvfile.close() dataset[cal_index] = np.array(new_dataset) dataset_angle[cal_index] = dataset[cal_index][:, -1] """ Create training data, one for each AP """ arranged_data[cal_index] = fn.arrange_data(dataset[cal_index][:, :-1]) dataseta_for_sd = pd.DataFrame(dataset[cal_index][:, :-1], columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3']) # filter noise below -10 dB arranged_data[cal_index] = fn.noise_filter(np.array(arranged_data[cal_index])) # read database from file arranged_data[cal_index] = pd.DataFrame(arranged_data[cal_index], columns=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3', 'V_m_H']) dataset_angle[cal_index] = pd.DataFrame(dataset_angle[cal_index], columns=['Angle']) # filter angles out of range valid_angle = (cfg.min_angle <= dataset_angle[cal_index]['Angle']) & \ (cfg.max_angle >= dataset_angle[cal_index]['Angle']) arranged_data[cal_index] = arranged_data[cal_index].loc[valid_angle] dataset_angle[cal_index] = dataset_angle[cal_index].loc[valid_angle] dataset_angle[cal_index] = np.array(dataset_angle[cal_index]).ravel()
predictions = np.zeros(()) ap_timed_pred = np.zeros((len(track.time_frames), len(track.valid_ants))) ap_timed_sd = np.zeros((len(track.time_frames), len(track.valid_ants))) for i in track.valid_ants: """ Create training data """ # read database from file dataset = pd.read_csv('dataset_ap' + str(i) + '.csv', names=['V0', 'V1', 'V2', 'V3', 'H0', 'H1', 'H2', 'H3', 'V_m_H']) dataset_time = np.zeros((dataset.shape[0], 1)) dataset = np.array(dataset) dataset = np.hstack((dataset_time, dataset)) # filter noise below -10 dB dataset = fn.noise_filter(np.array(dataset)) dataset_angle = pd.read_csv('dataset_angle_ap' + str(i) + '.csv', names=['Angle']) # filter angles out of range valid_angle = (cfg_exp.min_angle <= dataset_angle['Angle']) & (cfg_exp.max_angle >= dataset_angle['Angle']) dataset = fn.filter_rows(dataset, valid_angle) dataset_angle = dataset_angle.loc[valid_angle] dataset_angle = np.array(dataset_angle).ravel() # Fitting to RF clf = RandomForestClassifier() clf.fit(np.array(dataset[:, 1:]), dataset_angle) # creating predicted test set angles