def path_opt_test(llo): f_ = 0.0 d_ = 0.0 we_ = 0.0 l_ = north_pole for i in range(len(llo)): d_ += haversine(l_, llo[i][1]) we_ += llo[i][2] f_ += d_ * llo[i][2] l_ = llo[i][1] d_ += haversine(l_, north_pole) f_ += d_ * 10 return [f_, d_, we_]
def path_opt_test(llo): f_ = 0.0 d_ = 0.0 we_ = 0.0 l_ = north_pole for i in range(len(llo)): d_ += haversine(l_, llo[i][1]) we_ += llo[i][2] f_ += d_ * llo[i][2] l_ = llo[i][1] d_ += haversine(l_, north_pole) f_ += d_ * 10 return [f_, d_, we_]
def test_distance(self): for i in range(len(points) - 1): # convert haversine's meters to km expected = haversine((points[i][1], points[i][0]), (points[i+1][1], points[i+1][0])) / 1000 actual = ruler.distance(points[i], points[i+1]) print("%s, %s, %s, %s" % (points[i], points[i+1], expected, actual)) assertError(actual, expected, 0.003)
def test_distance(self): for i in range(len(points) - 1): # convert haversine's meters to km expected = haversine((points[i][1], points[i][0]), (points[i + 1][1], points[i + 1][0])) / 1000 actual = ruler.distance(points[i], points[i + 1]) print("%s, %s, %s, %s" % (points[i], points[i + 1], expected, actual)) assertError(actual, expected, 0.003)
def measure_between_two_points(self, point_a, point_b): """ Get haversine distance between two points. Expects points as (latitude, longitude) tuples. """ # cHaversine expects points to be given as (latitude, longitude) pairs. # TODO: Determine if this check for non-null values is necessary. if point_a and point_b: return haversine(tuple(point_a), tuple(point_b))
def spacex(dfmain, desdistance): # previous to next point only if in logical time and space place = zip(dfmain.Latitude.values, dfmain.Longitude.values) dates = dfmain['Captured Time'].values dates = pd.to_datetime(dates) keep = [0] i = 1 while i < dfmain.__len__() - 1: if haversine(place[keep[-1]], place[i]) > desdistance or dates[ keep[-1]].date() != dates[i].date(): keep.append(i) i += 1 if keep: np1 = np.array(keep) else: return return dfmain.iloc[np1]
def geoind_traces(data_path, destination_path, lamdaprv, radius, desdistance): time.sleep(0.2) np.random.seed(None) place_to_write = destination_path epsilon = float(lamdaprv / radius) df = pd.read_csv(data_path) groups = df.sort_values(['User ID', 'Captured Time']) with open(place_to_write, 'wb+') as writerlocation: writer_geoind = csv.writer(writerlocation, delimiter=",") writer_geoind.writerow(df.columns) last_user = -1 last_lat = -1 last_lon = -1 last_noisy_lat = -1 last_noisy_lon = -1 for index, row in groups.iterrows(): if row['User ID'] != last_user: last_user = row['User ID'] last_lat = row['Latitude'] last_lon = row['Longitude'] row['Latitude'], row['Longitude'] = geo_ind( row['Latitude'], row['Longitude'], epsilon) last_noisy_lat = row['Latitude'] last_noisy_lon = row['Longitude'] elif haversine((last_lat, last_lon), (row['Latitude'], row['Longitude'])) > desdistance: flag = 1 last_lat = row['Latitude'] last_lon = row['Longitude'] row['Latitude'], row['Longitude'] = geo_ind( row['Latitude'], row['Longitude'], epsilon) last_noisy_lat = row['Latitude'] last_noisy_lon = row['Longitude'] else: row['Latitude'], row[ 'Longitude'] = last_noisy_lat, last_noisy_lon writer_geoind.writerow(row.values)
def test_very_small_distance(self): dist = haversine((39.8862855,-86.0395778),(39.8862855,-86.0395777)) self.assertFalse(math.isnan(dist))
def test_same_coords(self): # distance between a coord and itself should not be NaN dist = haversine((39.11, -86.7), (39.11, -86.7)) self.assertFalse(math.isnan(dist))
def test_small_distance(self): # floats will store these two coordinates as the same value, resulting # in dist = 0. # doubles are able to distinguish these two points. dist = haversine((39, -88.97223382), (39, -88.972237)) self.assertTrue(dist > 0)
def test_0_to_1(self): dist = haversine((0, 0), (1, 1)) known_dist = 156900 self.assertTrue(abs(dist - known_dist) < 1000)
def test_tokyo_to_santiago(self): dist = haversine(tokyo, santiago) known_dist = 16090 * 1000 self.assertTrue(abs(dist - known_dist) < 5000)
def test_paris_to_cape_town(self): dist = haversine(paris, cape_town) known_dist = 9336 * 1000 self.assertTrue(abs(dist - known_dist) < 5000)
def distance(loc1, loc2): return haversine((tuple(loc1))[0:2], (tuple(loc2))[0:2])
def radiocells_utility(): ensure_dir(utilities_plot) plt.rc('font', family='serif', serif='Times') plt.rc('text', usetex=True) flierprops = dict(marker='+', markerfacecolor='r', markersize=0.3, linestyle='none', markeredgecolor='r') boxprops = dict(linestyle='--') medianprops = dict(linestyle='-', linewidth=2.5, color='k') SMALL_SIZE = 9 MEDIUM_SIZE = 10 BIGGER_SIZE = 11 plt.rc('font', size=MEDIUM_SIZE) # controls default text sizes plt.rc('axes', titlesize=MEDIUM_SIZE) # fontsize of the axes title plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels plt.rc('xtick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels plt.rc('ytick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels plt.rc('legend', fontsize=MEDIUM_SIZE) # legend fontsize plt.rc('figure', titlesize=MEDIUM_SIZE) # fontsize of the figure title rotation = 45 width = 4.5 height = 3.5 defs = [ 'geoind_lamda_1.6_radius_0.05_method_lap', 'geoind_lamda_1.6_radius_0.15_method_lap', 'geoind_lamda_1.6_radius_0.3_method_lap', 'geoind_lamda_1.6_radius_0.05_method_lap_remapping', 'geoind_lamda_1.6_radius_0.15_method_lap_remapping', 'geoind_lamda_1.6_radius_0.3_method_lap_remapping', 'geoind_traces_1.6_radius_0.05_distance_30', 'geoind_traces_1.6_radius_0.05_distance_60', 'geoind_traces_1.6_radius_0.05_distance_90', 'random_sample_80_percent', 'random_sample_60_percent', 'random_sample_40_percent', 'rounded_4_digits', 'rounded_3_digits', 'rounded_2_digits', 'spacex_30_meters', 'spacex_60_meters', 'spacex_90_meters' ] new_name_defs = [ 'GeoInd: 50m', 'GeoInd: 150m', 'GeoInd: 300m', 'GeoInd-OR: 50m', 'GeoInd-OR: 150m', 'GeoInd-OR: 300m', 'Release-GeoInd: 30m', 'Release-GeoInd: 60m', 'Release-GeoInd: 90m', 'Random: 80%', 'Random: 60%', 'Random: 40%', 'Rounding: 4', 'Rounding: 3', 'Rounding: 2', 'Release: 30m', 'Release: 60m', 'Release: 90m' ] cities = ['World'] for city in cities: place = city origpath = defenses_path + place + '/{}.avg.csv'.format(place) thispath = defenses_path print 'reading {}'.format(origpath) dorig = pd.read_csv(origpath, dtype={'mcc': int, 'mnc': int}) print dorig print defenses_path boxplots = [] names = [] for defidx, item in enumerate(defs): try: ddef = pd.read_csv(thispath + '/{}/{}.avg.csv'.format(item, item), dtype={ 'mcc': int, 'mnc': int }) print(ddef) except IOError as e: print e continue df = pd.merge(dorig, ddef, how='inner', on=['mcc', 'mnc', 'lac', 'Cellid']) tmp = [] for row in df.itertuples(): tmp.append(int(haversine((row[5], row[6]), (row[7], row[8])))) boxplots.append(tmp) names.append(new_name_defs[defidx]) ##### difference fig5 = plt.figure(1, figsize=(width, height)) # Create an axes instance ax5 = fig5.gca() # Create the boxplot bp = ax5.boxplot(boxplots, boxprops=boxprops, flierprops=flierprops, medianprops=medianprops, widths=0.35, patch_artist=False) ax5.set_xticklabels(names, rotation=rotation, ha='right', minor=False) ax5.spines["top"].set_visible(False) ax5.spines["bottom"].set_visible(False) ax5.spines["right"].set_visible(False) ax5.spines["left"].set_visible(False) plt.title('Utility loss in Radiocells dataset'.format(city)) plt.ylabel('Meters') plt.tight_layout() plt.savefig(utilities_plot + 'radiocells_utilityloss.png', dpi=360, transparent=False, frameon=False) #plt.show() plt.gcf().clear() fig5.gca().clear()
def distance(pos1, pos2): return haversine((tuple(pos1))[0:2], (tuple(pos2))[0:2])
# For each point on ther interpolation-grid, find the distance to each sample uniqueSampleLoc, uniqueSampleInverse, uniqueSampleIndices, uniqueSampleCount =\ np.unique(sampleLoc, return_inverse=True, return_index=True, return_counts=True, axis=0) if not args.identical: average_multiplier = np.copy(uniqueSampleCount) else: average_multiplier = np.ones_like(uniqueSampleCount) for i in range(len(longs)): for j in range(len(lats)): distances = [] # weighted interpolation for location in range(uniqueSampleLoc.shape[0]): distances.append( haversine(tuple(uniqueSampleLoc[location, :]), tuple([longs[i], lats[j]]))) distances = np.array(distances) dist_matrix[i, j] = (distances**-3) * average_multiplier dist_matrix[i, j] /= np.sum(dist_matrix[i, j]) # Initialize matrices to store rate-of-change, along with weighted and unweighted direction-of-change rateOfChange = np.zeros((len(longs) - 1, len(lats) - 1)) doubledDirectionOfChange = np.copy(rateOfChange) doubledAngle_dX = np.copy(rateOfChange) doubledAngle_dY = np.copy(rateOfChange) # The wombling itself # (for details, see Barbujani, Oden, and Sokal, (1989) # "Detecting regions of abrupt change in maps of biological variables" # in Syst. Zool. 38(4) 376-389