Пример #1
0
    def prediction_error(self, 
                         predictions,
                         ground_truth = None,
                         beam_search = True,
                         resample_and_interpolation = True):
        if beam_search:
            best_seq_idx = self._best_sequence_idx(self.pred_logprobs)
            predictions = predictions[best_seq_idx, ] # shape of [n_seq, n_time, 6|--> lat lon alt cumT latspd lonspd]
        if resample_and_interpolation:
            ground_truth = self._resample_interpolate_ground_truth() # list of arrays with shape of [n_time, 3]
        
        avg_horizontal_err = []
        avg_vertical_err = []
        all_horizontal_err = []
        all_vertical_err = []
        for i in range(len(ground_truth)):
            n_pnt = min(ground_truth[i].shape[0], predictions[i].shape[0] - self.n_feed)
            # print(n_pnt)
            _, _, dist = g.inv(ground_truth[i][:n_pnt, 1], ground_truth[i][:n_pnt, 0], predictions[i][self.n_feed:self.n_feed+n_pnt, 1], predictions[i][self.n_feed:self.n_feed+n_pnt, 0])
            alt_dist = ground_truth[i][:n_pnt, 2] - predictions[i][self.n_feed:self.n_feed+n_pnt, 2]
            avg_horizontal_err.append(np.sqrt(np.mean((dist/1852)**2))) # in nmi
            avg_vertical_err.append(np.sqrt(np.mean(alt_dist**2)))
            all_horizontal_err += list(dist/1852)
            all_vertical_err += list(alt_dist)

        
        return np.array(avg_horizontal_err), np.array(avg_vertical_err), np.array(all_horizontal_err), np.array(all_vertical_err)
Пример #2
0
    def __init__(
            self,
            train_track_mean,
            train_track_std,
            train_fp_mean,
            train_fp_std,
            feature_cubes_mean,
            feature_cubes_std,
            ncwf_data_rootdir='../../DATA/NCWF/gridded_storm_hourly/',
            test_track_dir='../../DATA/DeepTP/test_flight_tracks.csv',
            test_fp_dir='../../DATA/DeepTP/test_flight_plans.csv',
            flight_plan_util_dir='../../DATA/DeepTP/test_flight_plans_util.CSV',
            wind_data_rootdir='../../DATA/filtered_weather_data/namanl_small_npz/',
            grbs_common_info_dir='/media/storage/DATA/filtered_weather_data/grbs_common_info.npz',
            grbs_lvl_dict_dir='/media/storage/DATA/filtered_weather_data/grbs_level_common_info.pkl',
            grbs_smallgrid_kdtree_dir='/media/storage/DATA/filtered_weather_data/grbs_smallgrid_kdtree.pkl',
            ncwf_arr_dir='../../DATA/NCWF/gridded_storm.npz',
            ncwf_alt_dict_dir='../../DATA/NCWF/alt_dict.pkl',
            large_load=False,
            weather_feature=True,
            **kwargs):
        self.train_track_mean = train_track_mean
        self.train_track_std = train_track_std
        self.train_fp_mean = train_fp_mean
        self.train_fp_std = train_fp_std
        self.train_feature_cubes_mean = feature_cubes_mean
        self.train_feature_cubes_std = feature_cubes_std
        self.ncwf_data_rootdir = ncwf_data_rootdir
        self.large_load = large_load
        self.weather_feature = weather_feature

        self.dep_lat = kwargs.get('dep_lat', 29.98333333)
        self.dep_lon = kwargs.get('dep_lon', -95.33333333)
        self.arr_lat = kwargs.get('arr_lat', 42.3666666667)
        self.arr_lon = kwargs.get('arr_lon', -70.9666666667)

        self.direct_course = kwargs.get(
            'direct_course',
            g.inv(self.dep_lon, self.dep_lat, self.arr_lon, self.arr_lat)[0] *
            np.pi / 180)

        super().__init__(flight_track_dir=test_track_dir,
                         flight_plan_dir=test_fp_dir,
                         flight_plan_util_dir=flight_plan_util_dir,
                         wind_data_rootdir=wind_data_rootdir,
                         grbs_common_info_dir=grbs_common_info_dir,
                         grbs_lvl_dict_dir=grbs_lvl_dict_dir,
                         grbs_smallgrid_kdtree_dir=grbs_smallgrid_kdtree_dir,
                         ncwf_arr_dir=ncwf_arr_dir,
                         ncwf_alt_dict_dir=ncwf_alt_dict_dir,
                         load_ncwf_arr=False,
                         downsample=False)
Пример #3
0
    def generate_predicted_pnt_feature_cube(self,
                                            predicted_final_track,
                                            known_flight_deptime,
                                            shift_xleft=0,
                                            shift_xright=2,
                                            shift_yup=1,
                                            shift_ydown=1,
                                            nx=20,
                                            ny=20):
        """
        predicted_final_track has the shape of [n_seq * n_mixture^i, n_time + t, n_input].
            The last axis coresponds to [Lat, Lon, Alt, cumDT, Speed, course]
        known_flight_deptime is a np array that contains
            FID, Elap_Time (depature time)
        wind_file_info is a dictionary of file time tree (kdtree) and an array of time objects

        """
        predicted_final_track = self.unnormalize_flight_tracks(
            predicted_final_track[:, -2:, :])
        # print(predicted_final_track[0, -1, :4])

        azimuth_arr = g.inv(predicted_final_track[:, -2, 1],
                            predicted_final_track[:, -2, 0],
                            predicted_final_track[:, -1, 1],
                            predicted_final_track[:, -1, 0])[0]
        # Step 0: construct tmp matching dataframe that contains:
        #    elap_time_diff, azimuth, levels, wx_alt, wind_fname, wx_fname
        predicted_matched_info = np.empty((predicted_final_track.shape[0], 13))
        predicted_matched_info = pd.DataFrame(
            predicted_matched_info,
            columns=[
                'FID', 'Lat', 'Lon', 'Alt', 'cumDT', 'Lat_spd', 'Lon_spd',
                'Elap_Time_Diff', 'azimuth', 'levels', 'wx_alt', 'wind_fname',
                'wx_fname'
            ])

        predicted_matched_info.loc[:, [
            'Lat', 'Lon', 'Alt', 'cumDT', 'Lat_spd', 'Lon_spd'
        ]] = predicted_final_track[:, -1, :]

        predicted_matched_info.loc[:, 'azimuth'] = azimuth_arr * np.pi / 180

        # Step 1: map cumDT to Elaps_time
        known_flight_deptime_diff = (known_flight_deptime[:, 1] -
                                     baseline_time)
        known_flight_deptime_diff = np.array(
            [item.total_seconds() for item in known_flight_deptime_diff])
        multiplier = predicted_matched_info.shape[
            0] // known_flight_deptime_diff.shape[0]
        deptime = np.repeat(known_flight_deptime_diff,
                            repeats=multiplier,
                            axis=0)
        FIDs = np.repeat(known_flight_deptime[:, 0],
                         repeats=multiplier,
                         axis=0)

        elap_time_diff = predicted_matched_info.loc[:,
                                                    'cumDT'].values + deptime
        predicted_matched_info.loc[:, 'Elap_Time_Diff'] = elap_time_diff
        predicted_matched_info.loc[:, 'FID'] = FIDs

        # Step 2: Map Elaps_time with wx_fname and wind_fname
        # match with wind/ temperature fname
        wind_query_dist, wind_query_idx = self.wind_ftime_tree.query(
            elap_time_diff.reshape(-1, 1), p=1, distance_upper_bound=3600 * 3)
        wind_valid_query = wind_query_dist < self.wind_time_objs.shape[
            0]  # binary array
        predicted_matched_info.loc[wind_valid_query,
                                   'wind_fname'] = self.wind_time_objs[
                                       wind_query_idx[wind_valid_query], 0]
        predicted_matched_info.loc[~wind_valid_query, 'wind_fname'] = np.nan

        # match with ncwf idx
        wx_query_dist, wx_query_idx = self.wx_ftime_tree.query(
            elap_time_diff.reshape(-1, 1), p=1, distance_upper_bound=3600)
        wx_valid_query = wx_query_dist < self.wx_fname_hourly.shape[
            0]  # binary array
        predicted_matched_info.loc[wx_valid_query,
                                   'wx_fname'] = self.wx_fname_hourly[
                                       wx_query_idx[wx_valid_query]]
        predicted_matched_info.loc[~wx_valid_query, 'wx_fname'] = np.nan

        # Step 3: calculate wind_levels & ncwf_levels
        predicted_matched_info.loc[:, 'levels'] = predicted_matched_info[
            'Alt'].apply(lambda x: proxilvl(x * 100, self.lvls_dict))
        predicted_matched_info.loc[:, 'wx_alt'] = predicted_matched_info[
            'Alt'] // 10

        # Step 4: generate feature cube
        feature_cubes, feature_grid, _ = self.feature_arr_generator(
            flight_tracks=predicted_matched_info,
            shift_xleft=shift_xleft,
            shift_xright=shift_xright,
            shift_yup=shift_yup,
            shift_ydown=shift_ydown,
            nx=nx,
            ny=ny)
        # feature_grid = feature_grid - np.array([self.dep_lon, self.dep_lat])
        # feature_grid = feature_grid.reshape(-1, 20, 20, 2)
        # feature_cubes = np.concatenate((feature_cubes, feature_grid), axis = -1)

        feature_cubes = self.normalize_feature_cubes(feature_cubes)
        feature_cubes = feature_cubes.reshape(-1, 1, nx, ny, 4)

        return feature_cubes, feature_grid, predicted_matched_info
Пример #4
0
    def __init__(self,
                 actual_track_datapath,
                 flight_plan_datapath,
                 flight_plan_utilize_datapath,
                 feature_cubes_datapath,
                 shuffle_or_not=True,
                 split=True,
                 batch_size=128,
                 **kwargs):
        print("State variables as in [Lat, lon, alt, cumDT, lat_spd, lon_spd]")
        self.actual_track_datapath = actual_track_datapath
        self.flight_plan_datapath = flight_plan_datapath
        self.flight_plan_utilize_datapath = flight_plan_utilize_datapath
        self.feature_cubes_datapath = feature_cubes_datapath
        self.shuffle_or_not = shuffle_or_not
        self.split = split
        self.batch_size = batch_size

        self.dep_lat = kwargs.get('dep_lat', 29.98333333)
        self.dep_lon = kwargs.get('dep_lon', -95.33333333)
        self.arr_lat = kwargs.get('arr_lat', 42.3666666667)
        self.arr_lon = kwargs.get('arr_lon', -70.9666666667)
        self.time_dim = kwargs.get('time_dim', False)

        self.direct_course = kwargs.get(
            'direct_course',
            g.inv(self.dep_lon, self.dep_lat, self.arr_lon, self.arr_lat)[0] *
            np.pi / 180)
        self.idx = kwargs.get('idx', 0)

        self.all_tracks, \
         self.all_targets, \
          self.all_targets_end, \
           self.all_targets_end_neg, \
            self.all_seq_lens, \
             self.data_mean, \
              self.data_std, \
               self.all_FP_tracks, \
                self.all_seq_lens_FP, \
                 self.FP_mean, \
                  self.FP_std, \
                   self.tracks_time_id_info = self.load_track_data()

        self.feature_cubes, self.feature_cubes_mean, self.feature_cubes_std = self.load_feature_cubes(
        )
        self.feature_cubes = np.split(self.feature_cubes,
                                      np.cumsum(self.all_seq_lens))[:-1]

        if self.shuffle_or_not:
            self.all_tracks, \
             self.all_targets,\
              self.all_targets_end,\
               self.all_targets_end_neg,\
                self.all_seq_lens, \
                 self.all_FP_tracks, \
                  self.all_seq_lens_FP, \
                   self.feature_cubes,\
                    self.tracks_time_id_info = shuffle(self.all_tracks,
                                                       self.all_targets,
                                                       self.all_targets_end,
                                                       self.all_targets_end_neg,
                                                       self.all_seq_lens,
                                                       self.all_FP_tracks,
                                                       self.all_seq_lens_FP,
                                                       self.feature_cubes,
                                                       self.tracks_time_id_info,
                                                       random_state = 101)

        if self.split:
            self.train_tracks, \
             self.dev_tracks, \
              self.train_targets, \
               self.dev_targets, \
                self.train_targets_end, \
                 self.dev_targets_end, \
                  self.train_targets_end_neg, \
                   self.dev_targets_end_neg, \
                    self.train_seq_lens, \
                     self.dev_seq_lens, \
                      self.train_FP_tracks, \
                       self.dev_FP_tracks, \
                        self.train_seq_lens_FP, \
                         self.dev_seq_lens_FP, \
                          self.train_feature_cubes, \
                           self.dev_feature_cubes, \
                            self.train_tracks_time_id_info, \
                             self.dev_tracks_time_id_info = train_test_split(self.all_tracks,
                                                                             self.all_targets,
                                                                             self.all_targets_end,
                                                                             self.all_targets_end_neg,
                                                                             self.all_seq_lens,
                                                                             self.all_FP_tracks,
                                                                             self.all_seq_lens_FP,
                                                                             self.feature_cubes,
                                                                             self.tracks_time_id_info,
                                                                             random_state = 101,
                                                                             train_size = 0.8,
                                                                             test_size = None)

        self.train_tracks = _pad(self.train_tracks, self.train_seq_lens)
        self.train_targets = _pad(self.train_targets, self.train_seq_lens)
        self.train_targets_end = _pad(self.train_targets_end,
                                      self.train_seq_lens)
        self.train_targets_end_neg = _pad(self.train_targets_end_neg,
                                          self.train_seq_lens)

        self.train_feature_cubes = _pad(self.train_feature_cubes,
                                        self.train_seq_lens)
        self.n_train_data_set = self.train_tracks.shape[0]
Пример #5
0
    def ellipse(self, x0, y0, a, b, n, ax=None, **kwargs):
        """
        Draws a polygon centered at ``x0, y0``. The polygon approximates an
        ellipse on the surface of the Earth with semi-major-axis ``a`` and 
        semi-minor axis ``b`` degrees longitude and latitude, made up of 
        ``n`` vertices.

        For a description of the properties of ellipsis, please refer to [1].

        The polygon is based upon code written do plot Tissot's indicatrix
        found on the matplotlib mailing list at [2].

        Extra keyword ``ax`` can be used to override the default axis instance.

        Other \**kwargs passed on to matplotlib.patches.Polygon

        RETURNS
            poly : a maptplotlib.patches.Polygon object.

        REFERENCES
            [1] : http://en.wikipedia.org/wiki/Ellipse


        """
        ax = kwargs.pop('ax', None) or self._check_ax()
        g = pyproj.Geod(a=self.rmajor, b=self.rminor)
        # Gets forward and back azimuths, plus distances between initial
        # points (x0, y0)
        azf, azb, dist = g.inv([x0, x0], [y0, y0], [x0+a, x0], [y0, y0+b])
        tsid = dist[0] * dist[1] # a * b

        # Initializes list of segments, calculates \del azimuth, and goes on 
        # for every vertex
        seg = [self(x0+a, y0)]
        AZ = np.linspace(azf[0], 360. + azf[0], n)
        for i, az in enumerate(AZ):
            # Skips segments along equator (Geod can't handle equatorial arcs).
            if np.allclose(0., y0) and (np.allclose(90., az) or
                np.allclose(270., az)):
                continue

            # In polar coordinates, with the origin at the center of the 
            # ellipse and with the angular coordinate ``az`` measured from the
            # major axis, the ellipse's equation  is [1]:
            #
            #                           a * b
            # r(az) = ------------------------------------------
            #         ((b * cos(az))**2 + (a * sin(az))**2)**0.5
            #
            # Azymuth angle in radial coordinates and corrected for reference
            # angle.
            azr = 2. * np.pi / 360. * (az + 90.)
            A = dist[0] * np.sin(azr)
            B = dist[1] * np.cos(azr)
            r = tsid / (B**2. + A**2.)**0.5
            lon, lat, azb = g.fwd(x0, y0, az, r)
            x, y = self(lon, lat)

            # Add segment if it is in the map projection region.
            if x < 1e20 and y < 1e20:
                seg.append((x, y))

        poly = Polygon(seg, **kwargs)
        ax.add_patch(poly)

        # Set axes limits to fit map region.
        self.set_axes_limits(ax=ax)

        return poly