def _generate_all_path_coords(self, waypoints, raster_indices): # Snap the line string nodes to the raster grid snapped_points = [ snap_coords_to_grid(raster_indices, *coords) for coords in waypoints ] # Generate pairs of consecutive (x,y) coords path_pairs = list(map(list, zip(snapped_points, snapped_points[1:]))) headings = [] for i in range(1, len(waypoints)): prev = waypoints[i - 1] next = waypoints[i] x = np.sin(next[0] - prev[0]) * np.cos(next[1]) y = np.cos(prev[1]) * np.sin(next[1]) - np.sin(prev[1]) * np.cos( next[1]) * np.cos(next[0] - prev[0]) angle = (np.arctan2(x, y) + (2 * np.pi)) % (2 * np.pi) headings.append(angle) # Feed these pairs into the Bresenham algo to find the intermediate points path_grid_points = [ bresenham.make_line(*pair[0], *pair[1]) for pair in path_pairs ] for idx, segment in enumerate(path_grid_points): n = len(segment) point_headings = np.full(n, headings[idx]) path_grid_points[idx] = np.column_stack( (np.array(segment), point_headings)) # Bring all these points together and remove duplicate coords # Flip left to right as bresenham spits out in (y,x) order path_grid_points = np.unique(np.concatenate(path_grid_points, axis=0), axis=0) return path_grid_points, headings
def get_path_sum(nx, ny, tx, ty, grid): line = bresenham.make_line(nx, ny, tx, ty) line_points = grid[line[:, 0], line[:, 1]] # If the new line crosses any blocked areas the cost is inf if -1 in line_points: return np.inf else: return line_points.sum()
def test_bounds(self): start = [2, 0] end = [4, 4] path = make_line(*start, *end) flat_points = np.ravel(path) result = (flat_points <= 4).all() and (flat_points >= 0).all() self.assertTrue(result, "Points not bounded within endpoints")
def test_first_quadrant(self): start = [0, 1] # x, y end = [6, 4] # x, y path = make_line(*start, *end) expected = np.array([[1, 0], [1, 1], [2, 2], [2, 3], [3, 4], [3, 5], [4, 6]]) result = (path == expected).all() or (path == np.flipud(expected)).all() self.assertTrue(result, 'First Quadrant path incorrect')
def test_high_gradient(self): start = [0, 6] # x, y end = [6, 0] # x, y path = make_line(*start, *end) expected = np.array([[0, 6], [1, 5], [2, 4], [3, 3], [4, 2], [5, 1], [6, 0]]) result = (path == expected).all() or (path == np.flipud(expected)).all() self.assertTrue(result, 'First Quadrant path incorrect')
def test_fourth_quadrant(self): start = [0, -1] # x, y end = [6, -4] # x, y path = make_line(*start, *end) expected = np.array([[-1, 0], [-1, 1], [-2, 2], [-2, 3], [-3, 4], [-3, 5], [-4, 6]]) result = (path == expected).all() or (path == np.flipud(expected)).all() self.assertTrue(result, 'Fourth Quadrant path incorrect')
def test_third_quadrant(self): start = [0, -1] # x, y end = [-6, -4] # x, y path = make_line(*start, *end) expected = np.array([[-1, 0], [-2, -1], [-2, -2], [-3, -3], [-3, -4], [-4, -5], [-4, -6]]) result = (path == expected).all() or (path == np.flipud(expected)).all() self.assertTrue(result, 'Third Quadrant path incorrect')
def h(self, node: Tuple[int, int], goal: Tuple[int, int]): if node == goal: return 0 dist = abs((node[1] - goal[1])) + abs((node[0] - goal[0])) line = bresenham.make_line(node[1], node[0], goal[1], goal[0]) integral_val = self.environment.grid[line[:, 0], line[:, 1]].sum() if integral_val > 1: return self.k * np.log10(integral_val) + dist else: return dist
def annotate(self, data: List[gpd.GeoDataFrame], raster_data: Tuple[Dict[str, np.array], np.array], resolution=20, **kwargs) -> Overlay: import geoviews as gv import scipy.stats as ss import joblib as jl bounds = (raster_data[0]['Longitude'].min(), raster_data[0]['Latitude'].min(), raster_data[0]['Longitude'].max(), raster_data[0]['Latitude'].max()) line_coords = list(self.dataframe.iloc[0].geometry.coords) # Snap the line string nodes to the raster grid snapped_points = [ snap_coords_to_grid(raster_data[0], *coords) for coords in line_coords ] # Generate pairs of consecutive (x,y) coords path_pairs = list(map(list, zip(snapped_points, snapped_points[1:]))) headings = [] for i in range(1, len(line_coords)): prev = line_coords[i - 1] next = line_coords[i] x = np.sin(next[0] - prev[0]) * np.cos(next[1]) y = np.cos(prev[1]) * np.sin(next[1]) - np.sin(prev[1]) * np.cos( next[1]) * np.cos(next[0] - prev[0]) angle = (np.arctan2(x, y) + (2 * np.pi)) % (2 * np.pi) headings.append(angle) # Feed these pairs into the Bresenham algo to find the intermediate points path_grid_points = [ bresenham.make_line(*pair[0], *pair[1]) for pair in path_pairs ] for idx, segment in enumerate(path_grid_points): n = len(segment) point_headings = np.full(n, headings[idx]) path_grid_points[idx] = np.column_stack( (np.array(segment), point_headings)) # Bring all these points together and remove duplicate coords # Flip left to right as bresenham spits out in (y,x) order path_grid_points = np.unique(np.concatenate(path_grid_points, axis=0), axis=0) bm = BallisticModel(self.aircraft) samples = 1000 # Conjure up our distributions for various things alt = ss.norm(self.alt, 5).rvs(samples) vel = ss.norm(self.vel, 2.5).rvs(samples) wind_vels = ss.norm(self.wind_vel, 1).rvs(samples) wind_dirs = bearing_to_angle( ss.norm(self.wind_dir, np.deg2rad(5)).rvs(samples)) wind_vel_y = wind_vels * np.sin(wind_dirs) wind_vel_x = wind_vels * np.cos(wind_dirs) # Create grid on which to evaluate each point of path with its pdf raster_shape = raster_data[1].shape x, y = np.mgrid[0:raster_shape[0], 0:raster_shape[1]] eval_grid = np.vstack((x.ravel(), y.ravel())).T def wrap_hdg_dists(alt, vel, hdg, wind_vel_y, wind_vel_x): (mean, cov), v_i, a_i = bm.transform( alt, vel, ss.norm(hdg, np.deg2rad(2)).rvs(samples), wind_vel_y, wind_vel_x, 0, 0) return hdg, (mean / resolution, cov / resolution, v_i, a_i) njobs = 1 if len(headings) < 3 else -1 # Hardcode backend to prevent Qt freaking out res = jl.Parallel(n_jobs=njobs, backend='threading', verbose=1)( jl.delayed(wrap_hdg_dists)(alt, vel, hdg, wind_vel_y, wind_vel_x) for hdg in headings) dists_for_hdg = dict(res) def point_distr(c): dist_params = dists_for_hdg[c[2]] pdf = np.array(ss.multivariate_normal( dist_params[0] + np.array([c[0], c[1]]), dist_params[1]).pdf(eval_grid), dtype=np.longdouble) return pdf sm = StrikeModel(raster_data[1].ravel(), resolution * resolution, self.aircraft.width) fm = FatalityModel(0.5, 1e6, 34) ac_mass = self.aircraft.mass def wrap_pipeline(path_point_state): impact_pdf = point_distr(path_point_state) impact_vel = dists_for_hdg[path_point_state[2]][2] impact_angle = dists_for_hdg[path_point_state[2]][3] impact_ke = velocity_to_kinetic_energy(ac_mass, impact_vel) strike_pdf = sm.transform(impact_pdf, impact_angle=impact_angle) fatality_pdf = fm.transform(strike_pdf, impact_ke=impact_ke) return fatality_pdf, fatality_pdf.max(), strike_pdf.max() res = jl.Parallel(n_jobs=-1, backend='threading', verbose=1)(jl.delayed(wrap_pipeline)(c) for c in path_grid_points) fatality_pdfs = [r[0] for r in res] # PDFs come out in input order so sorting not required pathwise_fatality_maxs = np.array([r[1] for r in res], dtype=np.longdouble) pathwise_strike_maxs = np.array([r[2] for r in res], dtype=np.longdouble) import matplotlib.pyplot as mpl import tempfile import subprocess fig, ax = mpl.subplots(1, 1) path_dist = self.dataframe.to_crs('EPSG:27700').iloc[0].geometry.length ax.set_yscale('log') ax.set_ylim(bottom=1e-18) x = np.linspace(0, path_dist, len(pathwise_fatality_maxs)) ax.axhline( y=np.median(pathwise_fatality_maxs), c='y', label='Fatality Median') # This seems to be as stable as fsum ax.plot(x, pathwise_fatality_maxs[::-1], c='r', label='Fatality Risk') ax.axhline(y=np.median(pathwise_strike_maxs), c='g', label='Strike Median') # This seems to be as stable as fsum ax.plot(x, pathwise_strike_maxs[::-1], c='b', label='Strike Risk') ax.legend() ax.set_ylabel('Risk [$h^{-1}$]') ax.set_xlabel('Path Distance [m]') ax.set_title('Casualty Risk along path') tmppath = tempfile.mkstemp()[1] + '.png' fig.savefig(tmppath) subprocess.run("explorer " + tmppath) risk_map = np.sum(fatality_pdfs, axis=0).reshape(raster_shape) * self.event_prob risk_raster = gv.Image(risk_map, vdims=['fatality_risk'], bounds=bounds).options( alpha=0.7, cmap='viridis', tools=['hover'], clipping_colors={'min': (0, 0, 0, 0)}) risk_raster = risk_raster.redim.range(fatality_risk=(risk_map.min() + 1e-15, risk_map.max())) print('Max probability of fatality: ', risk_map.max()) return Overlay([ gv.Contours(self.dataframe).opts(line_width=4, line_color='magenta'), risk_raster ])