def top_loc_grid_freq(data, mac, size): results = [] drone_data = read_drone_data() time_grouped = group_data(data, 0) for time_group in time_grouped: index_count = {} for row in time_group[1]: loc = find_location(row[1], drone_data) loc_tup = (loc['x_m'], loc['y_m']) index = grid_index(loc_tup, (X_OFFSET, Y_OFFSET), size) index_tup = (index[0], index[1]) try: index_count[index_tup][0] += 1 index_count[index_tup][1] += 100 + int(row[2]) except KeyError: index_count[index_tup] = [1, 100 + int(row[2])] top_loc = max(index_count, key=lambda x: (index_count[x][0], index_count[x][1])) res = get_xyz(top_loc, (X_OFFSET, Y_OFFSET), size) results.append([mac, time_group[0], res[0], res[1]]) return results
def top_loc_signal(data, mac): locs = [] drone_data = read_drone_data() time_grouped = group_data(data, 0) for time_group in time_grouped: max_score = max(time_group[1], key=lambda x: x[2]) loc = find_location(max_score[1], drone_data) locs.append([mac, time_group[0], loc["x_m"], loc["y_m"], loc["z_m"]]) return locs
def main(datafile=DEF_FILE_NAME): data = read_data(datafile) drone_data = read_drone_data() drone_locs = {} for index, row in drone_data.iterrows(): drone_locs[row["drone_id"]] = [float(row["x_m"]), float(row["y_m"]), float(row["z_m"])] time_grouped = group_data(data[PI_MAC], 0) sensors_per_time = [] for time_group in time_grouped: ap_ids = [] sensors = [] used_sensors = set() sorted_data = sorted(time_group[1], key=lambda x: x[2]) for sensor in sorted_data: if(sensor[1] in used_sensors): continue used_sensors.add(sensor[1]) try: loc = drone_locs[sensor[1]] except KeyError: continue ap_ids.append(sensor[1]) sensors.append({'x': loc[0], 'y': loc[1], 'z': loc[2], 'signal': int(sensor[2])}) if count_unique_aps(ap_ids) > 3: sensors_per_time.append([time_group[0], sensors]) else: print('JAMMER') for param in range(0, 301, 5): gamma = param / 100 with open("../results/trilateration/results_trilateration_{}_{}.csv".format(SECONDS_GROUPING, gamma), "a") as f: writer = csv.writer(f, delimiter=";") writer.writerow(["sourcemac", "timestamp", "x_m", "y_m", "z_m", "eta"]) trilateration = Trilateration(gamma) for row in sensors_per_time: est = trilateration.get_location(row[1]) writer.writerow([PI_MAC, row[0]] + list(est.x))
def normal_grid_method(data, mac, size, std, method, alpha=0.5): results = [] drone_data = read_drone_data() prev_pos = None prev_time = None dimensions = get_dimensions(size) std_matrix = create_std_matrix(std) time_grouped = group_data(data, 0) for time_group in time_grouped: values = list(time_group[1]) g_data = [(item[0], item[1], item[2]) for item in values] index_count = np.zeros(dimensions) for row in g_data: loc = find_location(row[1], drone_data) if loc is None: continue loc_tup = (loc['x_m'], loc['y_m']) index = grid_index(loc_tup, (X_OFFSET, Y_OFFSET), size) index_tup = (index[0], index[1]) if method == 'signal_sum': index_count[index_tup] += 100 + int(row[2]) elif method == 'freq': index_count[index_tup] += 1 else: index_count[index_tup] = max(100 + int(row[2]), index_count[index_tup]) # make the matrix sum up to 1 index_count = index_count / np.sum(index_count) if prev_pos is None or (time_group[0] - prev_time) > MAX_SECONDS_DIFF: prev_pos = get_max_matrix_value(index_count) else: norm_matrix = create_norm_matrix((prev_pos[1], prev_pos[0]), std_matrix, dimensions) prev_pos = get_max_matrix_value(alpha * index_count + (1 - alpha) * norm_matrix) res = get_xyz(prev_pos, (X_OFFSET, Y_OFFSET), size) prev_time = time_group[0] results.append([mac, time_group[0], res[0], res[1]]) return results
def top_loc_grid_signal(data, mac, size): results = [] drone_data = read_drone_data() time_grouped = group_data(data, 0) for time_group in time_grouped: max_score = max(time_group[1], key=lambda x: x[2]) loc = find_location(max_score[1], drone_data) loc_tup = (loc['x_m'], loc['y_m']) index = grid_index(loc_tup, (X_OFFSET, Y_OFFSET), size) index_tup = (index[0], index[1]) res = get_xyz(index_tup, (X_OFFSET, Y_OFFSET), size) results.append([mac, time_group[0], res[0], res[1]]) return results
def top_loc_signal_sum(data, mac): locs = [] drone_data = read_drone_data() time_grouped = group_data(data, 0) for time_group in time_grouped: sums = {} for row in time_group[1]: try: sums[row[1]] += 100 + int(row[2]) except KeyError: sums[row[1]] = 100 + int(row[2]) max_score = max(sums, key=lambda x: sums[x]) loc = find_location(max_score, drone_data) locs.append([mac, time_group[0], loc["x_m"], loc["y_m"], loc["z_m"]]) return locs
def top_loc_frequency(data, mac): locs = [] drone_data = read_drone_data() time_grouped = group_data(data, 0) for time_group in time_grouped: frequencies = {} for row in time_group[1]: try: frequencies[row[1]][0] += 1 frequencies[row[1]][1] += 100 + int(row[2]) except KeyError: frequencies[row[1]] = [1, 100 + int(row[2])] max_score = max(frequencies, key=lambda x: (frequencies[x][0], frequencies[x][1])) loc = find_location(max_score, drone_data) locs.append([mac, time_group[0], loc["x_m"], loc["y_m"], loc["z_m"]]) return locs