Example #1
0
 def test_download_icon(self):
     """Value Error if date to old"""
     try:
         with self.assertRaises(ValueError):
             download_icon_grib(dt.datetime(2020, 1, 1))
     except IOError:
         pass
Example #2
0
 def test_icon_eu_forecast_download(self):
     """Test availability of DWD icon forecast."""
     run_datetime = dt.datetime.utcnow() - dt.timedelta(hours=5)
     run_datetime = run_datetime.replace(hour=run_datetime.hour // 12 * 12,
                                         minute=0,
                                         second=0,
                                         microsecond=0)
     icon_file = download_icon_grib(run_datetime, max_lead_time=1)
     self.assertEqual(len(icon_file), 1)
     delete_icon_grib(run_datetime, max_lead_time=1)  #deletes icon_file
     self.assertFalse(Path(icon_file[0]).exists())
Example #3
0
    def from_icon_grib(cls,
                       run_datetime,
                       event_date=None,
                       model_name='icon-eu-eps',
                       description=None,
                       grib_dir=None,
                       delete_raw_data=True,
                       intensity_thres=None):
        """Create new StormEurope object from DWD icon weather forecast footprints.

        New files are available for 24 hours on
        https://opendata.dwd.de, old files can be processed if they are
        already stored in grib_dir.
        One event is one full day in UTC. Current setup works for runs
        starting at 00H and 12H. Otherwise the aggregation is inaccurate,
        because of the given file structure with 1-hour, 3-hour and
        6-hour maxima provided.

        The frequency for one event is 1/(number of ensemble members)

        Parameters
        ----------
        run_datetime : datetime
            The starting timepoint of the forecast run
            of the icon model
        event_date : datetime, optional
            one day within the forecast
            period, only this day (00H-24H) will be included in the hazard
        model_name : str,optional
            select the name of the icon model to
            be downloaded. Must match the url on https://opendata.dwd.de
            (see download_icon_grib for further info)
        description : str, optional
            description of the events, defaults
            to a combination of model_name and run_datetime
        grib_dir : str, optional
            path to folder, where grib files are
            or should be stored
        delete_raw_data : bool,optional
            select if downloaded raw data in
            .grib.bz2 file format should be stored on the computer or
            removed
        intensity_thres : float, optional
            Intensity threshold for storage in m/s. Default: class attribute
            StormEurope.intensity_thres (same as used by WISC SSI calculations)

        Returns
        -------
        haz : StormEurope
            StormEurope object with data from DWD icon weather forecast footprints.
        """
        intensity_thres = cls.intensity_thres if intensity_thres is None else intensity_thres

        haz = cls()
        if not (run_datetime.hour == 0 or run_datetime.hour == 12):
            LOGGER.warning('The event definition is inaccuratly implemented ' +
                           'for starting times, which are not 00H or 12H.')
        # download files, if they don't already exist
        file_names = download_icon_grib(run_datetime,
                                        model_name=model_name,
                                        download_dir=grib_dir)

        # create centroids
        nc_centroids_file = download_icon_centroids_file(model_name, grib_dir)
        haz.centroids = haz._centroids_from_nc(nc_centroids_file)

        # read intensity from files
        for ind_i, file_i in enumerate(file_names):
            gripfile_path_i = Path(file_i[:-4])
            with open(file_i, 'rb') as source, open(gripfile_path_i,
                                                    'wb') as dest:
                dest.write(bz2.decompress(source.read()))
            ds_i = xr.open_dataset(gripfile_path_i, engine='cfgrib')
            if ind_i == 0:
                stacked = ds_i
            else:
                stacked = xr.concat([stacked, ds_i], 'valid_time')

        # create intensity matrix with max for each full day
        stacked = stacked.assign_coords(
            date=('valid_time', stacked["valid_time"].dt.floor("D").values))
        if event_date:
            try:
                stacked = stacked.sel(valid_time=event_date.strftime(
                    '%Y-%m-%d')).groupby('date').max()
            except KeyError:
                raise ValueError('Extraction of date and coordinates failed. '
                                 'This is most likely because '
                                 'the selected event_date {} is not contained'
                                 ' in the weather forecast selected by '
                                 'run_datetime {}. Please adjust event_date'
                                 ' or run_datetime.'.format(
                                     event_date.strftime('%Y-%m-%d'),
                                     run_datetime.strftime('%Y-%m-%d %H:%M')))

            considered_dates = np.datetime64(event_date)
        else:
            time_covered_step = stacked['valid_time'].diff('valid_time')
            time_covered_day = time_covered_step.groupby('date').sum()
            # forecast run should cover at least 18 hours of a day
            considered_dates_bool = time_covered_day >= np.timedelta64(18, 'h')
            stacked = stacked.groupby('date').max().sel(
                date=considered_dates_bool)
            considered_dates = stacked['date'].values
        stacked = stacked.stack(date_ensemble=('date', 'number'))
        stacked = stacked.where(stacked > intensity_thres)
        stacked = stacked.fillna(0)

        # fill in values from netCDF
        haz.intensity = sparse.csr_matrix(stacked.gust.T)
        haz.event_id = np.arange(stacked.date_ensemble.size) + 1

        # fill in default values
        haz.units = 'm/s'
        haz.fraction = haz.intensity.copy().tocsr()
        haz.fraction.data.fill(1)
        haz.orig = np.ones_like(haz.event_id) * False
        haz.orig[(stacked.number == 1).values] = True

        haz.date = np.repeat(np.array(datetime64_to_ordinal(considered_dates)),
                             np.unique(stacked.number).size)
        haz.event_name = [
            date_i + '_ens' + str(ens_i) for date_i, ens_i in zip(
                date_to_str(haz.date), stacked.number.values)
        ]
        haz.frequency = np.divide(np.ones_like(haz.event_id),
                                  np.unique(stacked.number).size)
        if not description:
            description = ('icon weather forecast windfield ' +
                           'for run startet at ' +
                           run_datetime.strftime('%Y%m%d%H'))

        haz.tag = TagHazard(HAZ_TYPE,
                            'Hazard set not saved, too large to pickle',
                            description=description)
        haz.check()

        # delete generated .grib2 and .4cc40.idx files
        for ind_i, file_i in enumerate(file_names):
            gripfile_path_i = Path(file_i[:-4])
            idxfile_path_i = next(
                gripfile_path_i.parent.glob(
                    str(gripfile_path_i.name) + '.*.idx'))
            gripfile_path_i.unlink()
            idxfile_path_i.unlink()

        if delete_raw_data:
            #delete downloaded .bz2 files
            delete_icon_grib(run_datetime,
                             model_name=model_name,
                             download_dir=grib_dir)

        return haz