Пример #1
0
    def test_third_quad(self):
        self.assertEqual(bearing_to_angle(np.pi + np.pi / 4),
                         np.pi + np.pi / 4)
        self.assertEqual(bearing_to_angle(225, is_rad=False), 225)

        self.assertEqual(bearing_to_angle(np.pi), np.pi + np.pi / 2)
        self.assertEqual(bearing_to_angle(180, is_rad=False), 270)
Пример #2
0
    def test_second_quad(self):
        self.assertEqual(bearing_to_angle(np.pi + 3 * np.pi / 4),
                         3 * np.pi / 4)
        self.assertEqual(bearing_to_angle(315, is_rad=False), 135)

        self.assertEqual(bearing_to_angle(np.pi + np.pi / 2), np.pi)
        self.assertEqual(bearing_to_angle(270, is_rad=False), 180)
    def transform(self, altitude, velocity, heading, wind_vel_y, wind_vel_x, loc_x,
                  loc_y):
        """
        Return the parameters to a multivariate normal distribution describing the ground impact probability under a
        ballistic descent.

        This function takes into account wind and returns the result in the NED frame with x, y corresponding to
         East and North respectively. The distribution takes in the location of the event in the existing NED frame
         and transforms the Path aligned event frame (PAEF) to the NED frame at the specified location.

         If passing an array, all other arrays must be the same shape. This is usually a single dimension of samples
         generated with scipy.stats.<some distribution>.rvs

        The method is as follows:
            1. The ballistic model return one dimensional results from the specified params, if these are arrays then
                a number of samples are created.
            2. The heading(s) are rotated into the NED frame
            3. A vectorised operation is performed to firstly rotate the PAEF results into the NED frame
            4. The second part of the vectorised operation then multiplies the wind vector (in NED) by the time
                taken to impact the ground. This is then added to the first part.
            5. The samples are used to fit a multivariate Gaussian from which the parameters are generated.

        :param altitude: the altitude in metres
        :type altitude: float or np.array
        :param velocity: the velocity over the ground of the aircraft in the direction of flight in m/s
        :type velocity: float or np.array
        :param heading: the ground track bearing of the aircraft in deg (North is 000)
        :type heading: float or np.array
        :param wind_vel_x: the x component of the wind in m/s
        :type wind_vel_x: float or nd.array
        :param wind_vel_y: the y component of the wind in m/s
        :type wind_vel_y: float or nd.array
        :param loc_x: event x location
        :type loc_x: int
        :param loc_y: event y location
        :type loc_y: int
        :return: a tuple of (means, covariances) of the distribution
        :rtype: tuple of np.arrays of shape (2,) for the means and (2,2) for the covariances
        """
        # Compute impact distances and times in the PAE frame
        # The velocity vector is assumed to be aligned with path vector, hence v_y is 0
        d_i, v_i, a_i, t_i = self.bm.compute_ballistic_distance(altitude, velocity, 0)

        # Compensate for x,y axes being rotated compared to bearings
        theta = bearing_to_angle(heading)
        # Form the array structure required and transform
        arr = np.vstack((np.zeros(d_i.shape), d_i, t_i, theta, wind_vel_x, wind_vel_y))
        transformed_arr = np.apply_along_axis(paef_to_ned_with_wind, 0, arr)
        # Remove nan rows
        transformed_arr = transformed_arr[:, ~np.isnan(transformed_arr).all(axis=0)]
        gm = GaussianMixture()
        gm.fit_predict(transformed_arr.T)
        # If there the event and NED origins match, no need to translate
        if not loc_x or not loc_y:
            means = gm.means_[0]
        else:
            means = gm.means_[0] + np.array([loc_x, loc_y])
        # Gaussian Mixture model can deal with up to 3D distributions, but we are only dealing with 2D here,
        # so take first index into the depth
        return (means, gm.covariances_[0]), v_i.mean(), a_i.mean()
Пример #4
0
    def make_strike_map(self, bounds_polygon, hour, raster_shape, resolution):
        generated_layers = [
            layer.generate(bounds_polygon, raster_shape, hour=hour, resolution=resolution) for layer in self._layers]
        raster_grid = np.flipud(np.sum(
            [remove_raster_nans(res[1]) for res in generated_layers if
             res[1] is not None],
            axis=0))
        raster_shape = raster_grid.shape
        x, y = np.mgrid[0:raster_shape[0], 0:raster_shape[1]]
        eval_grid = np.vstack((x.ravel(), y.ravel())).T
        samples = 5000
        # Conjure up our distributions for various things
        alt = ss.norm(self.alt, 5).rvs(samples)
        vel = ss.norm(self.vel, 2.5).rvs(samples)
        wind_vels = ss.norm(self.wind_vel, 1).rvs(samples)
        wind_dirs = bearing_to_angle(ss.norm(self.wind_dir, np.deg2rad(5)).rvs(samples))
        wind_vel_y = wind_vels * np.sin(wind_dirs)
        wind_vel_x = wind_vels * np.cos(wind_dirs)
        (bm_mean, bm_cov), v_ib, a_ib = self.bm.transform(alt, vel,
                                                          ss.uniform(0, 360).rvs(samples),
                                                          wind_vel_y, wind_vel_x,
                                                          0, 0)
        (gm_mean, gm_cov), v_ig, a_ig = self.gm.transform(alt, vel,
                                                          ss.uniform(0, 360).rvs(samples),
                                                          wind_vel_y, wind_vel_x,
                                                          0, 0)
        sm_b = StrikeModel(raster_grid, resolution ** 2, self.aircraft.width, a_ib)
        sm_g = StrikeModel(raster_grid, resolution ** 2, self.aircraft.width, a_ig)
        premult = sm_b.premult_mat + sm_g.premult_mat
        offset_y, offset_x = raster_shape[0] // 2, raster_shape[1] // 2
        bm_pdf = ss.multivariate_normal(bm_mean + np.array([offset_y, offset_x]), bm_cov).pdf(eval_grid)
        gm_pdf = ss.multivariate_normal(gm_mean + np.array([offset_y, offset_x]), gm_cov).pdf(eval_grid)
        pdf = bm_pdf + gm_pdf
        pdf = pdf.reshape(raster_shape)
        padded_pdf = np.zeros(((raster_shape[0] * 3) + 1, (raster_shape[1] * 3) + 1))
        padded_pdf[raster_shape[0]:raster_shape[0] * 2, raster_shape[1]:raster_shape[1] * 2] = pdf
        padded_pdf = padded_pdf * self.event_prob
        padded_centre_y, padded_centre_x = raster_shape[0] + offset_y, raster_shape[1] + offset_x
        # Check if CUDA toolkit available through env var otherwise fallback to CPU bound numba version
        if not os.getenv('CUDA_HOME'):
            print('CUDA NOT found, falling back to Numba JITed CPU code')
            # Leaving parallelisation to Numba seems to be faster
            risk_map = wrap_all_pipeline(raster_shape, padded_pdf, padded_centre_y, padded_centre_x, premult)

        else:

            risk_map = np.zeros(raster_shape, dtype=float)
            threads_per_block = (32, 32)  # 1024 max per block
            blocks_per_grid = (
                int(np.ceil(raster_shape[1] / threads_per_block[1])),
                int(np.ceil(raster_shape[0] / threads_per_block[0]))
            )
            print('CUDA found, using config <<<' + str(blocks_per_grid) + ',' + str(threads_per_block) + '>>>')
            wrap_pipeline_cuda[blocks_per_grid, threads_per_block](raster_shape, padded_pdf, padded_centre_y,
                                                                   padded_centre_x, premult, risk_map)
        ac_mass = self.aircraft.mass
        impact_kes = (velocity_to_kinetic_energy(ac_mass, v_ib), velocity_to_kinetic_energy(ac_mass, v_ig))

        return risk_map, impact_kes
Пример #5
0
    def test_first_quad(self):
        self.assertEqual(bearing_to_angle(np.pi / 4), np.pi / 4)
        self.assertEqual(bearing_to_angle(45, is_rad=False), 45)

        self.assertEqual(bearing_to_angle(np.pi / 2), 0)
        self.assertEqual(bearing_to_angle(90, is_rad=False), 0)

        self.assertEqual(bearing_to_angle(0), np.pi / 2)
        self.assertEqual(bearing_to_angle(0, is_rad=False), 90)

        self.assertEqual(bearing_to_angle(2 * np.pi), np.pi / 2)
        self.assertEqual(bearing_to_angle(360, is_rad=False), 90)
def primitives_to_dist(a_i, d_i, heading, loc_x, loc_y, t_i, v_i, wind_vel_x,
                       wind_vel_y):
    # Compensate for x,y axes being rotated compared to bearings
    theta = bearing_to_angle(heading)
    # Form the array structure required and transform
    arr = np.vstack(
        (np.zeros(d_i.shape), d_i, t_i, theta, wind_vel_x, wind_vel_y))
    transformed_arr = np.apply_along_axis(paef_to_ned_with_wind, 0, arr)
    # Remove nan rows
    transformed_arr = transformed_arr[:,
                                      ~np.isnan(transformed_arr).all(axis=0)]
    gm = GaussianMixture()
    gm.fit_predict(transformed_arr.T)
    # If there the event and NED origins match, no need to translate
    if not loc_x or not loc_y:
        means = gm.means_[0]
    else:
        means = gm.means_[0] + np.array([loc_x, loc_y])
    # Gaussian Mixture model can deal with up to 3D distributions, but we are only dealing with 2D here,
    # so take first index into the depth
    return (means, gm.covariances_[0]), v_i.mean(), a_i.mean()
    def test_full_risk_map(self):

        bm = BallisticModel(self.aircraft)
        gm = GlideDescentModel(self.aircraft)
        fm = FatalityModel(0.3, 1e6, 34)
        ac_mass = self.aircraft.mass

        x, y = np.mgrid[0:self.raster_shape[0], 0:self.raster_shape[1]]
        eval_grid = np.vstack((x.ravel(), y.ravel())).T

        samples = 5000
        # Conjure up our distributions for various things
        alt = ss.norm(self.alt, 5).rvs(samples)
        vel = ss.norm(self.vel, 2.5).rvs(samples)
        wind_vels = ss.norm(self.wind_vel, 1).rvs(samples)
        wind_dirs = bearing_to_angle(
            ss.norm(self.wind_dir, np.deg2rad(5)).rvs(samples))
        wind_vel_y = wind_vels * np.sin(wind_dirs)
        wind_vel_x = wind_vels * np.cos(wind_dirs)

        (bm_mean,
         bm_cov), v_ib, a_ib = bm.transform(alt, vel,
                                            ss.uniform(0, 360).rvs(samples),
                                            wind_vel_y, wind_vel_x, 0, 0)
        (gm_mean,
         gm_cov), v_ig, a_ig = gm.transform(alt, vel,
                                            ss.uniform(0, 360).rvs(samples),
                                            wind_vel_y, wind_vel_x, 0, 0)
        sm_b = StrikeModel(self.raster_grid, self.resolution**2,
                           self.aircraft.width, a_ib)
        sm_g = StrikeModel(self.raster_grid, self.resolution**2,
                           self.aircraft.width, a_ig)
        premult = sm_b.premult_mat + sm_g.premult_mat

        offset_y, offset_x = self.raster_shape[0] // 2, self.raster_shape[
            1] // 2
        bm_pdf = ss.multivariate_normal(
            bm_mean + np.array([offset_y, offset_x]), bm_cov).pdf(eval_grid)
        gm_pdf = ss.multivariate_normal(
            gm_mean + np.array([offset_y, offset_x]), gm_cov).pdf(eval_grid)
        pdf = bm_pdf + gm_pdf
        pdf = pdf.reshape(self.raster_shape)

        padded_pdf = np.zeros(
            ((self.raster_shape[0] * 3) + 1, (self.raster_shape[1] * 3) + 1))
        padded_pdf[self.raster_shape[0]:self.raster_shape[0] * 2,
                   self.raster_shape[1]:self.raster_shape[1] * 2] = pdf
        padded_pdf = padded_pdf * self.event_prob
        padded_centre_y, padded_centre_x = self.raster_shape[
            0] + offset_y, self.raster_shape[1] + offset_x
        impact_ke_b = velocity_to_kinetic_energy(ac_mass, v_ib)
        impact_ke_g = velocity_to_kinetic_energy(ac_mass, v_ig)

        # Check if CUDA toolkit available through env var otherwise fallback to CPU bound numba version
        if not os.getenv('CUDA_HOME'):
            print('CUDA NOT found, falling back to Numba JITed CPU code')
            # Leaving parallelisation to Numba seems to be faster
            res = wrap_all_pipeline(self.raster_shape, padded_pdf,
                                    padded_centre_y, padded_centre_x, premult)

        else:

            res = np.zeros(self.raster_shape, dtype=float)
            threads_per_block = (32, 32)  # 1024 max per block
            blocks_per_grid = (int(
                np.ceil(self.raster_shape[1] / threads_per_block[1])),
                               int(
                                   np.ceil(self.raster_shape[0] /
                                           threads_per_block[0])))
            print('CUDA found, using config <<<' + str(blocks_per_grid) + ',' +
                  str(threads_per_block) + '>>>')
            wrap_pipeline_cuda[blocks_per_grid,
                               threads_per_block](self.raster_shape,
                                                  padded_pdf, padded_centre_y,
                                                  padded_centre_x, premult,
                                                  res)

        # Alternative joblib parallelisation
        # res = jl.Parallel(n_jobs=-1, prefer='threads', verbose=1)(
        #     jl.delayed(wrap_row_pipeline)(c, self.raster_shape, padded_pdf, (padded_centre_y, padded_centre_x), sm)
        #     for c in range(self.raster_shape[0]))

        strike_pdf = res
        # snapped_points = [snap_coords_to_grid(self.raster_indices, *coords) for coords in self.path_coords]

        import matplotlib.pyplot as mpl
        import matplotlib.colors as mc
        fig1, ax1 = mpl.subplots(1, 1)
        m1 = ax1.matshow(self.raster_grid, norm=mc.LogNorm())
        fig1.colorbar(m1, label='Population Density [people/km$^2$]')
        ax1.set_title(f'Population Density at t={self.hour}')
        ax1.set_xticks([0, self.raster_shape[1] - 1])
        ax1.set_yticks([0, self.raster_shape[0] - 1])
        ax1.set_xticklabels(
            [self.test_bound_coords[0], self.test_bound_coords[2]], )
        ax1.set_yticklabels(
            [self.test_bound_coords[3], self.test_bound_coords[1]], )
        fig1.tight_layout()
        fig1.savefig(f'tests/layers/figs/tpe_t{self.hour}.png',
                     bbox_inches='tight')
        fig1.show()

        if self.serialise:
            np.savetxt(f'strike_map_t{self.hour}', strike_pdf, delimiter=',')

        fig2, ax2 = mpl.subplots(1, 1)
        m2 = ax2.matshow(strike_pdf)
        fig2.colorbar(m2, label='Strike Risk [h$^{-1}$]')
        ax2.set_title(f'Strike Risk Map at t={self.hour}')
        ax2.set_xticks([0, self.raster_shape[1] - 1])
        ax2.set_yticks([0, self.raster_shape[0] - 1])
        ax2.set_xticklabels(
            [self.test_bound_coords[0], self.test_bound_coords[2]], )
        ax2.set_yticklabels(
            [self.test_bound_coords[3], self.test_bound_coords[1]], )
        fig2.tight_layout()
        fig2.savefig(f'tests/layers/figs/risk_strike_t{self.hour}.png',
                     bbox_inches='tight')
        fig2.show()

        fatality_pdf = fm.transform(strike_pdf,
                                    impact_ke=impact_ke_g) + fm.transform(
                                        strike_pdf, impact_ke=impact_ke_b)
        if self.serialise:
            np.savetxt(f'fatality_map_t{self.hour}',
                       fatality_pdf,
                       delimiter=',')

        fig3, ax3 = mpl.subplots(1, 1)
        m3 = ax3.matshow(fatality_pdf)
        fig3.colorbar(m3, label='Fatality Risk [h$^{-1}$]')
        ax3.set_title(f'Fatality Risk Map at t={self.hour}')
        ax3.set_xticks([0, self.raster_shape[1] - 1])
        ax3.set_yticks([0, self.raster_shape[0] - 1])
        ax3.set_xticklabels(
            [self.test_bound_coords[0], self.test_bound_coords[2]], )
        ax3.set_yticklabels(
            [self.test_bound_coords[3], self.test_bound_coords[1]], )
        fig3.tight_layout()
        fig3.savefig(f'tests/layers/figs/risk_fatality_t{self.hour}.png',
                     bbox_inches='tight')
        fig3.show()

        import rasterio
        from rasterio import transform
        trans = transform.from_bounds(*self.test_bound_coords,
                                      *self.raster_shape)
        rds = rasterio.open(
            f'tests/layers/tiffs/fatality_risk_h{self.hour}.tif',
            'w',
            driver='GTiff',
            count=1,
            dtype=rasterio.float64,
            crs='EPSG:4326',
            transform=trans,
            compress='lzw',
            width=self.raster_shape[0],
            height=self.raster_shape[1])
        rds.write(fatality_pdf, 1)
        rds.close()
    def test_ballistic_dist(self):
        """
        Profile ballistic model impact distance distributions in the North East Down frame with wind compensation
        """
        make_plot = True  # Set flag to plot result
        samples = 3000

        loc_x, loc_y = 0, 0

        # Conjure up our distributions for various things
        alt_mean = 50
        alt_std = 5

        vx_mean = 18
        vx_std = 2.5

        # In degrees!
        track_mean = 60
        track_std = 2

        # In degrees!
        wind_dir_mean = 120
        wind_dir_std = 5
        wind_vel_mean = 10
        wind_vel_std = 2

        alt = ss.norm(alt_mean, alt_std).rvs(samples)
        vel = ss.norm(vx_mean, vx_std).rvs(samples)
        track = np.deg2rad(ss.norm(track_mean, track_std).rvs(samples))
        wind_vel = ss.norm(wind_vel_mean, wind_vel_std).rvs(samples)
        wind_dir = bearing_to_angle(
            np.deg2rad(ss.norm(wind_dir_mean, wind_dir_std).rvs(samples)))

        wind_vel_x = wind_vel * np.cos(wind_dir)
        wind_vel_y = wind_vel * np.sin(wind_dir)

        bm = BallisticModel(self.ac)
        (means, cov), v_i, a_i = bm.transform(alt, vel, track, wind_vel_y,
                                              wind_vel_x, loc_x, loc_y)
        dist = ss.multivariate_normal(mean=means, cov=cov)

        if make_plot:
            # Make a sampling grid for plotting
            x, y = np.mgrid[(loc_x - 5):(loc_x + 95):0.5,
                            (loc_y - 35):(loc_y + 40):0.5]
            pos = np.vstack([x.ravel(), y.ravel()])
            # Sample KDE PDF on these points
            density = dist.pdf(pos.T)
            # Plot sampled KDE PDF
            import matplotlib.pyplot as mpl
            fig, ax = mpl.subplots(1, 1, figsize=(8, 6))
            sc = ax.scatter(x, y, c=density)
            cbar = fig.colorbar(sc)
            cbar.set_label('Probability')
            ax.set_xlabel('Distance [m]')
            ax.set_ylabel('Distance [m]')
            ax.set_title(
                f'Ballistic Ground Impact Probability Density \n'
                f' Altitude $\sim \mathcal{{N}}({alt_mean},{alt_std}^2)$m,'
                f' Groundspeed $\sim \mathcal{{N}}({vx_mean},{vx_std}^2)$m/s,'
                f' Track $\sim \mathcal{{N}}({track_mean},{track_std}^2)$deg,\n'
                f' Wind speed $\sim \mathcal{{N}}({wind_vel_mean},{wind_vel_std}^2)$m/s,'
                f' Wind bearing $\sim \mathcal{{N}}({wind_dir_mean},{wind_dir_std}^2)$deg'
            )
            ax.arrow(
                loc_x,
                loc_y,
                vx_mean * np.cos(bearing_to_angle(np.deg2rad(track_mean))),
                vx_mean * np.sin(bearing_to_angle(np.deg2rad(track_mean))),
                label='UAS Track',
                width=1,
                color='blue')
            ax.arrow(loc_x,
                     loc_y,
                     wind_vel_mean *
                     np.cos(bearing_to_angle(np.deg2rad(wind_dir_mean))),
                     wind_vel_mean *
                     np.sin(bearing_to_angle(np.deg2rad(wind_dir_mean))),
                     label='Wind Direction',
                     width=1,
                     color='red')
            fig.show()
    def annotate(self,
                 data: List[gpd.GeoDataFrame],
                 raster_data: Tuple[Dict[str, np.array], np.array],
                 resolution=20,
                 **kwargs) -> Overlay:
        import geoviews as gv
        import scipy.stats as ss
        import joblib as jl

        bounds = (raster_data[0]['Longitude'].min(),
                  raster_data[0]['Latitude'].min(),
                  raster_data[0]['Longitude'].max(),
                  raster_data[0]['Latitude'].max())

        line_coords = list(self.dataframe.iloc[0].geometry.coords)
        # Snap the line string nodes to the raster grid
        snapped_points = [
            snap_coords_to_grid(raster_data[0], *coords)
            for coords in line_coords
        ]
        # Generate pairs of consecutive (x,y) coords
        path_pairs = list(map(list, zip(snapped_points, snapped_points[1:])))
        headings = []
        for i in range(1, len(line_coords)):
            prev = line_coords[i - 1]
            next = line_coords[i]
            x = np.sin(next[0] - prev[0]) * np.cos(next[1])
            y = np.cos(prev[1]) * np.sin(next[1]) - np.sin(prev[1]) * np.cos(
                next[1]) * np.cos(next[0] - prev[0])
            angle = (np.arctan2(x, y) + (2 * np.pi)) % (2 * np.pi)
            headings.append(angle)
        # Feed these pairs into the Bresenham algo to find the intermediate points
        path_grid_points = [
            bresenham.make_line(*pair[0], *pair[1]) for pair in path_pairs
        ]
        for idx, segment in enumerate(path_grid_points):
            n = len(segment)
            point_headings = np.full(n, headings[idx])
            path_grid_points[idx] = np.column_stack(
                (np.array(segment), point_headings))
        # Bring all these points together and remove duplicate coords
        # Flip left to right as bresenham spits out in (y,x) order
        path_grid_points = np.unique(np.concatenate(path_grid_points, axis=0),
                                     axis=0)

        bm = BallisticModel(self.aircraft)

        samples = 1000
        # Conjure up our distributions for various things
        alt = ss.norm(self.alt, 5).rvs(samples)
        vel = ss.norm(self.vel, 2.5).rvs(samples)
        wind_vels = ss.norm(self.wind_vel, 1).rvs(samples)
        wind_dirs = bearing_to_angle(
            ss.norm(self.wind_dir, np.deg2rad(5)).rvs(samples))
        wind_vel_y = wind_vels * np.sin(wind_dirs)
        wind_vel_x = wind_vels * np.cos(wind_dirs)

        # Create grid on which to evaluate each point of path with its pdf
        raster_shape = raster_data[1].shape
        x, y = np.mgrid[0:raster_shape[0], 0:raster_shape[1]]
        eval_grid = np.vstack((x.ravel(), y.ravel())).T

        def wrap_hdg_dists(alt, vel, hdg, wind_vel_y, wind_vel_x):
            (mean, cov), v_i, a_i = bm.transform(
                alt, vel,
                ss.norm(hdg, np.deg2rad(2)).rvs(samples), wind_vel_y,
                wind_vel_x, 0, 0)
            return hdg, (mean / resolution, cov / resolution, v_i, a_i)

        njobs = 1 if len(headings) < 3 else -1

        # Hardcode backend to prevent Qt freaking out
        res = jl.Parallel(n_jobs=njobs, backend='threading', verbose=1)(
            jl.delayed(wrap_hdg_dists)(alt, vel, hdg, wind_vel_y, wind_vel_x)
            for hdg in headings)
        dists_for_hdg = dict(res)

        def point_distr(c):
            dist_params = dists_for_hdg[c[2]]
            pdf = np.array(ss.multivariate_normal(
                dist_params[0] + np.array([c[0], c[1]]),
                dist_params[1]).pdf(eval_grid),
                           dtype=np.longdouble)
            return pdf

        sm = StrikeModel(raster_data[1].ravel(), resolution * resolution,
                         self.aircraft.width)
        fm = FatalityModel(0.5, 1e6, 34)
        ac_mass = self.aircraft.mass

        def wrap_pipeline(path_point_state):
            impact_pdf = point_distr(path_point_state)
            impact_vel = dists_for_hdg[path_point_state[2]][2]
            impact_angle = dists_for_hdg[path_point_state[2]][3]
            impact_ke = velocity_to_kinetic_energy(ac_mass, impact_vel)

            strike_pdf = sm.transform(impact_pdf, impact_angle=impact_angle)
            fatality_pdf = fm.transform(strike_pdf, impact_ke=impact_ke)

            return fatality_pdf, fatality_pdf.max(), strike_pdf.max()

        res = jl.Parallel(n_jobs=-1, backend='threading',
                          verbose=1)(jl.delayed(wrap_pipeline)(c)
                                     for c in path_grid_points)
        fatality_pdfs = [r[0] for r in res]
        # PDFs come out in input order so sorting not required
        pathwise_fatality_maxs = np.array([r[1] for r in res],
                                          dtype=np.longdouble)
        pathwise_strike_maxs = np.array([r[2] for r in res],
                                        dtype=np.longdouble)

        import matplotlib.pyplot as mpl
        import tempfile
        import subprocess
        fig, ax = mpl.subplots(1, 1)
        path_dist = self.dataframe.to_crs('EPSG:27700').iloc[0].geometry.length
        ax.set_yscale('log')
        ax.set_ylim(bottom=1e-18)
        x = np.linspace(0, path_dist, len(pathwise_fatality_maxs))
        ax.axhline(
            y=np.median(pathwise_fatality_maxs),
            c='y',
            label='Fatality Median')  # This seems to be as stable as fsum
        ax.plot(x, pathwise_fatality_maxs[::-1], c='r', label='Fatality Risk')
        ax.axhline(y=np.median(pathwise_strike_maxs),
                   c='g',
                   label='Strike Median')  # This seems to be as stable as fsum
        ax.plot(x, pathwise_strike_maxs[::-1], c='b', label='Strike Risk')
        ax.legend()
        ax.set_ylabel('Risk [$h^{-1}$]')
        ax.set_xlabel('Path Distance [m]')
        ax.set_title('Casualty Risk along path')

        tmppath = tempfile.mkstemp()[1] + '.png'
        fig.savefig(tmppath)
        subprocess.run("explorer " + tmppath)

        risk_map = np.sum(fatality_pdfs,
                          axis=0).reshape(raster_shape) * self.event_prob

        risk_raster = gv.Image(risk_map,
                               vdims=['fatality_risk'],
                               bounds=bounds).options(
                                   alpha=0.7,
                                   cmap='viridis',
                                   tools=['hover'],
                                   clipping_colors={'min': (0, 0, 0, 0)})
        risk_raster = risk_raster.redim.range(fatality_risk=(risk_map.min() +
                                                             1e-15,
                                                             risk_map.max()))
        print('Max probability of fatality: ', risk_map.max())

        return Overlay([
            gv.Contours(self.dataframe).opts(line_width=4,
                                             line_color='magenta'), risk_raster
        ])
Пример #10
0
def _make_strike_grid(aircraft, airspeed, altitude, failure_prob, pop_grid,
                      resolution, wind_direction, wind_speed):
    bm = BallisticModel(aircraft)
    gm = GlideDescentModel(aircraft)
    raster_shape = pop_grid.shape
    x, y = np.mgrid[0:raster_shape[0], 0:raster_shape[1]]
    eval_grid = np.vstack((x.ravel(), y.ravel())).T
    samples = 5000
    # Conjure up our distributions for various things
    alt = ss.norm(altitude, 5).rvs(samples)
    vel = ss.norm(airspeed, 2.5).rvs(samples)
    wind_vels = ss.norm(wind_speed, 1).rvs(samples)
    wind_dirs = bearing_to_angle(
        ss.norm(wind_direction, np.deg2rad(5)).rvs(samples))
    wind_vel_y = wind_vels * np.sin(wind_dirs)
    wind_vel_x = wind_vels * np.cos(wind_dirs)
    (bm_mean,
     bm_cov), v_ib, a_ib = bm.transform(alt, vel,
                                        ss.uniform(0, 360).rvs(samples),
                                        wind_vel_y, wind_vel_x, 0, 0)
    (gm_mean,
     gm_cov), v_ig, a_ig = gm.transform(alt, vel,
                                        ss.uniform(0, 360).rvs(samples),
                                        wind_vel_y, wind_vel_x, 0, 0)
    sm_b = StrikeModel(pop_grid, resolution**2, aircraft.width, a_ib)
    sm_g = StrikeModel(pop_grid, resolution**2, aircraft.width, a_ig)
    premult = sm_b.premult_mat + sm_g.premult_mat
    offset_y, offset_x = raster_shape[0] // 2, raster_shape[1] // 2
    bm_pdf = ss.multivariate_normal(bm_mean + np.array([offset_y, offset_x]),
                                    bm_cov).pdf(eval_grid)
    gm_pdf = ss.multivariate_normal(gm_mean + np.array([offset_y, offset_x]),
                                    gm_cov).pdf(eval_grid)
    pdf = bm_pdf + gm_pdf
    pdf = pdf.reshape(raster_shape)
    padded_pdf = np.zeros(
        ((raster_shape[0] * 3) + 1, (raster_shape[1] * 3) + 1))
    padded_pdf[raster_shape[0]:raster_shape[0] * 2,
               raster_shape[1]:raster_shape[1] * 2] = pdf
    padded_pdf = padded_pdf * failure_prob
    padded_centre_y, padded_centre_x = raster_shape[
        0] + offset_y, raster_shape[1] + offset_x

    # Check if CUDA toolkit available through env var otherwise fallback to CPU bound numba version
    if not os.getenv('CUDA_HOME'):
        print('CUDA NOT found, falling back to Numba JITed CPU code')
        # Leaving parallelisation to Numba seems to be faster
        res = wrap_all_pipeline(raster_shape, padded_pdf, padded_centre_y,
                                padded_centre_x, premult)

    else:

        res = np.zeros(raster_shape, dtype=float)
        threads_per_block = (32, 32)  # 1024 max per block
        blocks_per_grid = (int(np.ceil(raster_shape[1] /
                                       threads_per_block[1])),
                           int(np.ceil(raster_shape[0] /
                                       threads_per_block[0])))
        print('CUDA found, using config <<<' + str(blocks_per_grid) + ',' +
              str(threads_per_block) + '>>>')
        wrap_pipeline_cuda[blocks_per_grid,
                           threads_per_block](raster_shape, padded_pdf,
                                              padded_centre_y, padded_centre_x,
                                              premult, res)
    return res, (v_ib, v_ig)
Пример #11
0
 def test_fourth_quad(self):
     self.assertEqual(bearing_to_angle(3 * np.pi / 4),
                      np.pi + 3 * np.pi / 4)
     self.assertEqual(bearing_to_angle(135, is_rad=False), 315)