Пример #1
0
    def find_model_patch_tracks(self):
        """
        Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass.

        Returns:

        """
        self.model_grid.load_data()
        tracked_model_objects = []
        model_objects = []
        if self.model_grid.data is None:
            print("No model output found")
            return tracked_model_objects
        min_orig = self.model_ew.min_thresh
        max_orig = self.model_ew.max_thresh
        data_increment_orig = self.model_ew.data_increment
        self.model_ew.min_thresh = 0
        self.model_ew.data_increment = 1
        self.model_ew.max_thresh = 100
        for h, hour in enumerate(self.hours):
            # Identify storms at each time step and apply size filter
            print("Finding {0} objects for run {1} Hour: {2:02d}".format(self.ensemble_member,
                                                                         self.run_date.strftime("%Y%m%d%H"), hour))
            if self.mask is not None:
                model_data = self.model_grid.data[h] * self.mask
            else:
                model_data = self.model_grid.data[h]
            model_data[:self.patch_radius] = 0
            model_data[-self.patch_radius:] = 0
            model_data[:, :self.patch_radius] = 0
            model_data[:, -self.patch_radius:] = 0
            scaled_data = np.array(rescale_data(model_data, min_orig, max_orig))
            hour_labels = label_storm_objects(scaled_data, "ew",
                                              self.model_ew.min_thresh, self.model_ew.max_thresh,
                                              min_area=self.size_filter, max_area=self.model_ew.max_size,
                                              max_range=self.model_ew.delta, increment=self.model_ew.data_increment,
                                              gaussian_sd=self.gaussian_window)
            model_objects.extend(extract_storm_patches(hour_labels, model_data, self.model_grid.x,
                                                       self.model_grid.y, [hour],
                                                       dx=self.model_grid.dx,
                                                       patch_radius=self.patch_radius))
            for model_obj in model_objects[-1]:
                dims = model_obj.timesteps[-1].shape
                if h > 0:
                    model_obj.estimate_motion(hour, self.model_grid.data[h-1], dims[1], dims[0])
            del scaled_data
            del model_data
            del hour_labels
        tracked_model_objects.extend(track_storms(model_objects, self.hours,
                                                  self.object_matcher.cost_function_components,
                                                  self.object_matcher.max_values,
                                                  self.object_matcher.weights))
        self.model_ew.min_thresh = min_orig
        self.model_ew.max_thresh = max_orig
        self.model_ew.data_increment = data_increment_orig
        return tracked_model_objects
Пример #2
0
    def find_model_patch_tracks(self):
        """
        Identify storms in gridded model output and extract uniform sized patches around the storm centers of mass.

        Returns:

        """
        self.model_grid.load_data()
        tracked_model_objects = []
        model_objects = []
        if self.model_grid.data is None:
            print("No model output found")
            return tracked_model_objects
        min_orig = self.model_ew.min_thresh
        max_orig = self.model_ew.max_thresh
        data_increment_orig = self.model_ew.data_increment
        self.model_ew.min_thresh = 0
        self.model_ew.data_increment = 1
        self.model_ew.max_thresh = 100
        for h, hour in enumerate(self.hours):
            # Identify storms at each time step and apply size filter
            print("Finding {0} objects for run {1} Hour: {2:02d}".format(self.ensemble_member,
                                                                         self.run_date.strftime("%Y%m%d%H"), hour))
            if self.mask is not None:
                model_data = self.model_grid.data[h] * self.mask
            else:
                model_data = self.model_grid.data[h]
            model_data[:self.patch_radius] = 0
            model_data[-self.patch_radius:] = 0
            model_data[:, :self.patch_radius] = 0
            model_data[:, -self.patch_radius:] = 0
            scaled_data = np.array(rescale_data(model_data, min_orig, max_orig))
            hour_labels = label_storm_objects(scaled_data, "ew",
                                              self.model_ew.min_thresh, self.model_ew.max_thresh,
                                              min_area=self.size_filter, max_area=self.model_ew.max_size,
                                              max_range=self.model_ew.delta, increment=self.model_ew.data_increment,
                                              gaussian_sd=self.gaussian_window)
            model_objects.extend(extract_storm_patches(hour_labels, model_data, self.model_grid.x,
                                                       self.model_grid.y, [hour],
                                                       dx=self.model_grid.dx,
                                                       patch_radius=self.patch_radius))
            for model_obj in model_objects[-1]:
                dims = model_obj.timesteps[-1].shape
                if h > 0:
                    model_obj.estimate_motion(hour, self.model_grid.data[h-1], dims[1], dims[0])
            del scaled_data
            del model_data
            del hour_labels
        tracked_model_objects.extend(track_storms(model_objects, self.hours,
                                                  self.object_matcher.cost_function_components,
                                                  self.object_matcher.max_values,
                                                  self.object_matcher.weights))
        self.model_ew.min_thresh = min_orig
        self.model_ew.max_thresh = max_orig
        self.model_ew.data_increment = data_increment_orig
        return tracked_model_objects
Пример #3
0
 def test_object_identification(self):
     min_thresh = 10
     max_thresh = 50
     label_grid = label_storm_objects(self.model_grid.data[0], "hyst",
                                      min_thresh, max_thresh, min_area=2, max_area=100)
     label_points = self.model_grid.data[0][label_grid > 0]
     self.assertGreaterEqual(label_points.min(), min_thresh, "Labeled points include those below minimum threshold")
     storm_objs = extract_storm_objects(label_grid, self.model_grid.data[0], self.model_grid.x,
                                        self.model_grid.y, np.array([0]))
     self.assertEquals(len(storm_objs[0]), label_grid.max(), "Storm objects do not match number of labeled objects")
Пример #4
0
 def test_object_identification(self):
     min_thresh = 10
     max_thresh = 50
     label_grid = label_storm_objects(self.model_grid.data[0],
                                      "hyst",
                                      min_thresh,
                                      max_thresh,
                                      min_area=2,
                                      max_area=100)
     label_points = self.model_grid.data[0][label_grid > 0]
     self.assertGreaterEqual(
         label_points.min(), min_thresh,
         "Labeled points include those below minimum threshold")
     storm_objs = extract_storm_objects(label_grid, self.model_grid.data[0],
                                        self.model_grid.x,
                                        self.model_grid.y, np.array([0]))
     self.assertEquals(
         len(storm_objs[0]), label_grid.max(),
         "Storm objects do not match number of labeled objects")
Пример #5
0
 def create_patches_hourly(self, num, data, lats, lons, thetimes,
                           times_thisfile):
     """Function to find storm patches in WRF CONUS1 dataset. Saves output to Xarray dataset with metadata.
     
     Args:
         num (int): Number of job assignment (enumerated loop).
         data (numpy array): Dbz data to use for storm patch extraction.
         lats (numpy array): Latitudes of dbz data being used for storm patch extraction.
         lons (numpy array): Longitudes of dbz data being used for storm patch extraction.
         thetimes (numpy array): Time indices of the full time period of the climate simulations (2000-2013).
         times_thisfile (DatetimeIndex): Pandas date range.
         
     Returns:
         num (int): Number of job assignment (enumerated loop).
     
     """
     thelabels = label_storm_objects(data,
                                     method=self.method,
                                     min_intensity=self.min_dbz,
                                     max_intensity=self.max_dbz,
                                     min_area=1,
                                     max_area=100,
                                     max_range=1,
                                     increment=1,
                                     gaussian_sd=0)
     print(num, "Postlabel")
     storm_objs = extract_storm_patches(label_grid=thelabels,
                                        data=data,
                                        x_grid=lons,
                                        y_grid=lats,
                                        times=thetimes,
                                        dx=1,
                                        dt=1,
                                        patch_radius=self.patch_radius)
     print(num, f"Done {times_thisfile[num].strftime('%Y-%m-%d')}")
     data_assemble = xr.Dataset(
         {
             'grid':
             (['starttime', 'y', 'x'],
              np.array([
                  other.timesteps[0] for obj in storm_objs
                  for other in obj if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'mask':
             (['starttime', 'y', 'x'],
              np.array([
                  other.masks[0] for obj in storm_objs
                  for other in obj if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'row_indices':
             (['starttime', 'y', 'x'],
              np.array([
                  other.i[0] for obj in storm_objs
                  for other in obj if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'col_indices':
             (['starttime', 'y', 'x'],
              np.array([
                  other.j[0] for obj in storm_objs
                  for other in obj if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'lats':
             (['starttime', 'y', 'x'],
              np.array([
                  other.y[0] for obj in storm_objs
                  for other in obj if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'lons':
             (['starttime', 'y', 'x'],
              np.array([
                  other.x[0] for obj in storm_objs
                  for other in obj if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
         },
         coords={
             'starttime':
             (['starttime'],
              np.array([
                  other.start_time for obj in storm_objs for other in obj
                  if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'endtime':
             np.array([
                 other.end_time for obj in storm_objs for other in obj
                 if other.timesteps[0].shape[0] *
                 other.timesteps[0].shape[1] == self.total_pixels()
             ]),
             'x_speed':
             (['starttime'],
              np.array([
                  other.u[0]
                  for obj in storm_objs for other in obj
                  if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ])),
             'y_speed':
             (['starttime'],
              np.array([
                  other.v[0]
                  for obj in storm_objs for other in obj
                  if other.timesteps[0].shape[0] *
                  other.timesteps[0].shape[1] == self.total_pixels()
              ]))
         })
     data_assemble.to_netcdf(
         f"/{self.destination_path}/{self.climate}_SPhourly_{times_thisfile[num].strftime('%Y%m%d')}.nc"
     )
     return (num)