Esempio n. 1
0
def calc_tc(expo_dict, tracks, data_dir):
    """ Compute tropical cyclone events from tracks at every island group,
    if not contained in data_dir. """
    try:
        abs_path = os.path.join(data_dir, 'tc_isl.p')
        with open(abs_path, 'rb') as f:
            tc_dict = pickle.load(f)
        print('Loaded tc_isl:', len(tc_dict))
    except FileNotFoundError:
        all_isl = BlackMarble()
        for ent_iso, ent_val in expo_dict.items():
            all_isl.append(ent_val)

        centr = Centroids()
        centr.coord = all_isl.coord
        centr.id = np.arange(centr.lat.size) + 1
        centr.region_id = all_isl.region_id

        tc = TropCyclone()
        tc.set_from_tracks(tracks, centr)

        tc_dict = dict()
        for ent_iso, ent_val in expo_dict.items():
            reg_id = np.unique(ent_val.region_id)[0]
            tc_dict[ent_iso] = tc.select(reg_id=reg_id)

        save(os.path.join(data_dir, 'tc_isl.p'), tc_dict)

    return tc_dict
Esempio n. 2
0
def irma_tc(exp, data_irma):
    centr = Centroids()
    centr.set_lat_lon(exp.latitude.values, exp.longitude.values)

    tc_irma = TropCyclone()
    data_irma.equal_timestep(0.1)
    tc_irma.set_from_tracks(data_irma, centr)

    return tc_irma
def irma_tc(exp, data_irma):
    centr = Centroids()
    centr.coord = exp.coord
    centr.id = np.arange(centr.lat.size)

    tc_irma = TropCyclone()
    data_irma.equal_timestep(0.1)
    tc_irma.set_from_tracks(data_irma, centr)

    return tc_irma
Esempio n. 4
0
    def centroids(self, latlon=None, res_as=360):
        """Return centroids in this region

        Parameters
        ----------
        latlon : pair (lat, lon), optional
            Latitude and longitude of centroids.
            If not given, values are taken from CLIMADA's base grid (see `res_as`).
        res_as : int, optional
            One of 150 or 360. When `latlon` is not given, choose coordinates from centroids
            according to CLIMADA's base grid of given resolution in arc-seconds. Default: 360.

        Returns
        -------
        centroids : climada.hazard.Centroids object
        """
        if latlon is None:
            centroids = Centroids.from_base_grid(res_as=res_as)
            centroids.set_meta_to_lat_lon()
            lat, lon = centroids.lat, centroids.lon
        else:
            lat, lon = latlon
            centroids = Centroids()
            centroids.set_lat_lon(lat, lon)
        msk = shapely.vectorized.contains(self.shape, lon, lat)
        centroids = centroids.select(sel_cen=msk)
        centroids.id = np.arange(centroids.lon.shape[0])
        return centroids
def calc_tc(expo_dict, tracks, data_dir, pool):
    """ Compute tropical cyclone events from tracks at every island group,
    if not contained in data_dir. """
    try:
        abs_path = os.path.join(data_dir, 'tc_isl.p')
        with open(abs_path, 'rb') as f:
            tc_dict = pickle.load(f)
        print('Loaded tc_isl:', len(tc_dict))
    except FileNotFoundError:
        all_isl = BlackMarble(pd.concat(list(expo_dict.values())))

        centr = Centroids()
        centr.set_lat_lon(all_isl.latitude.values, all_isl.longitude.values)
        centr.region_id = all_isl.region_id.values
        centr.check()

        tc = TropCyclone(pool)
        tc.set_from_tracks(tracks, centr)

        tc_dict = dict()
        for ent_iso, ent_val in expo_dict.items():
            reg_id = np.unique(ent_val.region_id)[0]
            tc_dict[ent_iso] = tc.select(reg_id=reg_id)

        save(os.path.join(data_dir, 'tc_isl.p'), tc_dict)

    return tc_dict
    def test_fraction_on_land(self):
        """Test _fraction_on_land helper function."""
        res_deg = 10 / (60 * 60)
        bounds = (-149.54, -23.42, -149.40, -23.33)
        lat = np.arange(bounds[1] + 0.5 * res_deg, bounds[3], res_deg)
        lon = np.arange(bounds[0] + 0.5 * res_deg, bounds[2], res_deg)
        shape = (lat.size, lon.size)
        lon, lat = [ar.ravel() for ar in np.meshgrid(lon, lat)]
        centroids = Centroids()
        centroids.set_lat_lon(lat, lon)
        centroids.set_dist_coast(signed=True, precomputed=True)

        dem_bounds = (bounds[0] - 1, bounds[1] - 1, bounds[2] + 1, bounds[3] + 1)
        dem_res = 3 / (60 * 60)
        with tmp_artifical_topo(dem_bounds, dem_res) as topo_path:
            fraction = _fraction_on_land(centroids, topo_path)
        fraction = fraction.reshape(shape)
        dist_coast = centroids.dist_coast.reshape(shape)

        # check valid range and order of magnitude
        self.assertTrue(np.all((fraction >= 0) & (fraction <= 1)))
        np.testing.assert_array_equal(fraction[dist_coast > 1000], 0)
        np.testing.assert_array_equal(fraction[dist_coast < -1000], 1)

        # check individual known pixel values
        self.assertAlmostEqual(fraction[24, 10], 0.0)
        self.assertAlmostEqual(fraction[22, 11], 0.21)
        self.assertAlmostEqual(fraction[22, 12], 0.93)
        self.assertAlmostEqual(fraction[21, 14], 1.0)
Esempio n. 7
0
    def get_centroids(self,
                      res_arcsec_land=150,
                      res_arcsec_ocean=1800,
                      extent=(-180, 180, -60, 60),
                      country=None,
                      version=None,
                      dump_dir=SYSTEM_DIR):
        """Get centroids from teh API

        Parameters
        ----------
        res_land_arcsec : int
            resolution for land centroids in arcsec. Default is 150
        res_ocean_arcsec : int
            resolution for ocean centroids in arcsec. Default is 1800
        country : str
            country name, numeric code or iso code based on pycountry. Default is None (global).
        extent : tuple
            Format (min_lon, max_lon, min_lat, max_lat) tuple.
            If min_lon > lon_max, the extend crosses the antimeridian and is
            [lon_max, 180] + [-180, lon_min]
            Borders are inclusive. Default is (-180, 180, -60, 60).
        version : str, optional
            the version of the dataset
            Default: newest version meeting the requirements
        dump_dir : str
            directory where the files should be downoladed. Default: SYSTEM_DIR
        Returns
        -------
        climada.hazard.centroids.Centroids
            Centroids from the api
        """

        properties = {
            'res_arcsec_land': str(res_arcsec_land),
            'res_arcsec_ocean': str(res_arcsec_ocean),
            'extent': '(-180, 180, -90, 90)'
        }
        dataset = self.get_dataset_info('centroids',
                                        version=version,
                                        properties=properties)
        target_dir = self._organize_path(dataset, dump_dir) \
                     if dump_dir == SYSTEM_DIR else dump_dir
        centroids = Centroids.from_hdf5(
            self._download_file(target_dir, dataset.files[0]))
        if country:
            reg_id = pycountry.countries.lookup(country).numeric
            centroids = centroids.select(reg_id=int(reg_id), extent=extent)
        if extent:
            centroids = centroids.select(extent=extent)

        return centroids
Esempio n. 8
0
def irma_tc(exp, data_irma):
    centr = Centroids()
    centr.coord = np.zeros((exp.latitude.size, 2))
    centr.coord[:, 0] = exp.latitude
    centr.coord[:, 1] = exp.longitude
    centr.id = np.arange(centr.lat.size)

    tc_irma = TropCyclone()
    data_irma.equal_timestep(0.1)
    tc_irma.set_from_tracks(data_irma, centr)

    return tc_irma
Esempio n. 9
0
 def test_haz_max_events(self):
     """Test haz_max_events function"""
     hazard = Hazard('TC')
     hazard.centroids = Centroids()
     hazard.centroids.set_lat_lon(np.array([1, 3, 5]), np.array([2, 4, 6]))
     hazard.event_id = np.array([1, 2, 3, 4])
     hazard.event_name = ['ev1', 'ev2', 'ev3', 'ev4']
     hazard.date = np.array([1, 3, 5, 7])
     hazard.intensity = sp.csr_matrix(
         [[0, 0, 4], [1, 0, 1], [43, 21, 0], [0, 53, 1]])
     data = stats.haz_max_events(hazard, min_thresh=18)
     self.assertSequenceEqual(data['id'].tolist(), [2, 3])
     self.assertSequenceEqual(data['name'].tolist(), ["ev3", "ev4"])
     self.assertSequenceEqual(data['year'].tolist(), [1, 1])
     self.assertSequenceEqual(data['month'].tolist(), [1, 1])
     self.assertSequenceEqual(data['day'].tolist(), [5, 7])
     self.assertSequenceEqual(data['lat'].tolist(), [1, 3])
     self.assertSequenceEqual(data['lon'].tolist(), [2, 4])
     self.assertSequenceEqual(data['intensity'].tolist(), [43, 53])
    def __init__(self, bounds, res_deg):
        """Read distance to coast values from raster file for later use.

        Parameters
        ----------
        bounds : tuple (lon_min, lat_min, lon_max, lat_max)
            Coordinates of bounding box
        res_deg : float
            Resolution in degrees
        """
        lat = np.arange(bounds[3] - 0.5 * res_deg, bounds[1], -res_deg)
        lon = np.arange(bounds[0] + 0.5 * res_deg, bounds[2], res_deg)
        self.shape = (lat.size, lon.size)
        self.transform = rasterio.Affine(res_deg, 0, bounds[0], 0, -res_deg, bounds[3])
        centroids = Centroids()
        centroids.set_lat_lon(*[ar.ravel() for ar in np.meshgrid(lon, lat)][::-1])
        centroids.set_dist_coast(signed=True, precomputed=True)
        self.dist_coast = centroids.dist_coast
    def test_surge_from_track(self):
        """Test TCSurgeBathtub.from_tc_winds function."""
        # similar to IBTrACS 2010029S12177 (OLI, 2010) hitting Tubuai, but much stronger
        track = xr.Dataset({
            'radius_max_wind': ('time', [15., 15, 15, 15, 15, 17, 20, 20]),
            'radius_oci': ('time', [202., 202, 202, 202, 202, 202, 202, 202]),
            'max_sustained_wind': ('time', [155., 147, 140, 135, 130, 122, 115, 116]),
            'central_pressure': ('time', [894., 901, 906, 909, 913, 918, 924, 925]),
            'environmental_pressure': ('time', np.full((8,), 1004.0, dtype=np.float64)),
            'time_step': ('time', np.full((8,), 3.0, dtype=np.float64)),
        }, coords={
            'time': np.arange('2010-02-05T09:00', '2010-02-06T09:00',
                              np.timedelta64(3, 'h'), dtype='datetime64[h]'),
            'lat': ('time', [-24.33, -25.54, -24.79, -24.05,
                             -23.35, -22.7, -22.07, -21.50]),
            'lon': ('time', [-147.27, -148.0, -148.51, -148.95,
                             -149.41, -149.85, -150.27, -150.56]),
        }, attrs={
            'max_sustained_wind_unit': 'kn',
            'central_pressure_unit': 'mb',
            'name': 'test',
            'sid': '2010029S12177_test',
            'orig_event_flag': True,
            'data_provider': 'unit_test',
            'basin': 'SP',
            'id_no': 0,
            'category': 4,
        })
        tc_track = TCTracks()
        tc_track.data = [track]
        tc_track.equal_timestep(time_step_h=1)

        res_deg = 10 / (60 * 60)
        bounds = (-149.54, -23.42, -149.40, -23.33)
        lat = np.arange(bounds[1] + 0.5 * res_deg, bounds[3], res_deg)
        lon = np.arange(bounds[0] + 0.5 * res_deg, bounds[2], res_deg)
        shape = (lat.size, lon.size)
        lon, lat = [ar.ravel() for ar in np.meshgrid(lon, lat)]
        centroids = Centroids()
        centroids.set_lat_lon(lat, lon)
        centroids.set_dist_coast(signed=True, precomputed=True)

        wind_haz = TropCyclone()
        wind_haz.set_from_tracks(tc_track, centroids=centroids)

        dem_bounds = (bounds[0] - 1, bounds[1] - 1, bounds[2] + 1, bounds[3] + 1)
        dem_res = 3 / (60 * 60)
        with tmp_artifical_topo(dem_bounds, dem_res) as topo_path:
            for slr in [0, 0.5, 1.5]:
                surge_haz = TCSurgeBathtub.from_tc_winds(wind_haz, topo_path,
                                                         add_sea_level_rise=slr)
                inten = surge_haz.intensity.toarray().reshape(shape)
                fraction = surge_haz.fraction.toarray().reshape(shape)

                # check valid range and order of magnitude
                np.testing.assert_array_equal(inten >= 0, True)
                np.testing.assert_array_equal(inten <= 10, True)
                np.testing.assert_array_equal((fraction >= 0) & (fraction <= 1), True)
                np.testing.assert_array_equal(inten[fraction == 0], 0)

                # check individual known pixel values
                self.assertAlmostEqual(inten[9, 31], max(-0.391 + slr, 0), places=2)
                self.assertAlmostEqual(inten[14, 34] - slr, 3.637, places=2)
Esempio n. 12
0
def main(path, debug, remote_directory, typhoonname):
    initialize.setup_cartopy()
    start_time = datetime.now()
    print(
        '---------------------AUTOMATION SCRIPT STARTED---------------------------------'
    )
    print(str(start_time))
    #%% check for active typhoons
    print(
        '---------------------check for active typhoons---------------------------------'
    )
    print(str(start_time))
    remote_dir = remote_directory
    if debug:
        typhoonname = 'SURIGAE'
        remote_dir = '20210421120000'
        logger.info(f"DEBUGGING piepline for typhoon{typhoonname}")
        Activetyphoon = [typhoonname]
    else:
        # If passed typhoon name is None or empty string
        if not typhoonname:
            Activetyphoon = Check_for_active_typhoon.check_active_typhoon()
            if not Activetyphoon:
                logger.info("No active typhoon in PAR stop pipeline")
                sys.exit()
            logger.info(f"Running on active Typhoon(s) {Activetyphoon}")
        else:
            Activetyphoon = [typhoonname]
            remote_dir = remote_directory
            logger.info(f"Running on custom Typhoon {Activetyphoon}")

    Alternative_data_point = (start_time -
                              timedelta(hours=24)).strftime("%Y%m%d")

    date_dir = start_time.strftime("%Y%m%d%H")
    Input_folder = os.path.join(path, f'forecast/Input/{date_dir}/Input/')
    Output_folder = os.path.join(path, f'forecast/Output/{date_dir}/Output/')

    if not os.path.exists(Input_folder):
        os.makedirs(Input_folder)
    if not os.path.exists(Output_folder):
        os.makedirs(Output_folder)
    #download NOAA rainfall
    try:
        #Rainfall_data_window.download_rainfall_nomads(Input_folder,path,Alternative_data_point)
        Rainfall_data.download_rainfall_nomads(Input_folder, path,
                                               Alternative_data_point)
        rainfall_error = False
    except:
        traceback.print_exc()
        #logger.warning(f'Rainfall download failed, performing download in R script')
        logger.info(
            f'Rainfall download failed, performing download in R script')
        rainfall_error = True
    ###### download UCL data

    try:
        ucl_data.create_ucl_metadata(path, os.environ['UCL_USERNAME'],
                                     os.environ['UCL_PASSWORD'])
        ucl_data.process_ucl_data(path, Input_folder,
                                  os.environ['UCL_USERNAME'],
                                  os.environ['UCL_PASSWORD'])
    except:
        logger.info(f'UCL download failed')
    #%%
    ##Create grid points to calculate Winfield
    cent = Centroids()
    cent.set_raster_from_pnt_bounds((118, 6, 127, 19), res=0.05)
    #this option is added to make the script scaleable globally To Do
    #cent.set_raster_from_pnt_bounds((LonMin,LatMin,LonMax,LatMax), res=0.05)
    cent.check()
    cent.plot()
    ####
    admin = gpd.read_file(
        os.path.join(path, "./data-raw/phl_admin3_simpl2.geojson"))
    df = pd.DataFrame(data=cent.coord)
    df["centroid_id"] = "id" + (df.index).astype(str)
    centroid_idx = df["centroid_id"].values
    ncents = cent.size
    df = df.rename(columns={0: "lat", 1: "lon"})
    df = gpd.GeoDataFrame(df, geometry=gpd.points_from_xy(df.lon, df.lat))
    #df.to_crs({'init': 'epsg:4326'})
    df.crs = {'init': 'epsg:4326'}
    df_admin = sjoin(df, admin, how="left").dropna()

    # Sometimes the ECMWF ftp server complains about too many requests
    # This code allows several retries with some sleep time in between
    n_tries = 0
    while True:
        try:
            logger.info("Downloading ECMWF typhoon tracks")
            bufr_files = TCForecast.fetch_bufr_ftp(remote_dir=remote_dir)
            fcast = TCForecast()
            fcast.fetch_ecmwf(files=bufr_files)
        except ftplib.all_errors as e:
            n_tries += 1
            if n_tries >= ECMWF_MAX_TRIES:
                logger.error(
                    f' Data downloading from ECMWF failed: {e}, '
                    f'reached limit of {ECMWF_MAX_TRIES} tries, exiting')
                sys.exit()
            logger.error(
                f' Data downloading from ECMWF failed: {e}, retrying after {ECMWF_SLEEP} s'
            )
            time.sleep(ECMWF_SLEEP)
            continue
        break

    #%% filter data downloaded in the above step for active typhoons  in PAR
    # filter tracks with name of current typhoons and drop tracks with only one timestep
    fcast.data = [
        track_data_clean.track_data_clean(tr) for tr in fcast.data
        if (tr.time.size > 1 and tr.name in Activetyphoon)
    ]

    # fcast.data = [tr for tr in fcast.data if tr.name in Activetyphoon]
    # fcast.data = [tr for tr in fcast.data if tr.time.size>1]
    for typhoons in Activetyphoon:
        #typhoons=Activetyphoon[0]
        logger.info(f'Processing data {typhoons}')
        fname = open(
            os.path.join(path, 'forecast/Input/',
                         "typhoon_info_for_model.csv"), 'w')
        fname.write('source,filename,event,time' + '\n')
        if not rainfall_error:
            line_ = 'Rainfall,' + '%srainfall' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
            fname.write(line_ + '\n')

        line_ = 'Output_folder,' + '%s' % Output_folder + ',' + typhoons + ',' + date_dir  #StormName #
        #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
        fname.write(line_ + '\n')

        #typhoons='SURIGAE'  # to run it manually for any typhoon
        # select windspeed for HRS model

        fcast.data = [tr for tr in fcast.data if tr.name == typhoons]
        tr_HRS = [tr for tr in fcast.data if (tr.is_ensemble == 'False')]

        if tr_HRS != []:
            HRS_SPEED = (tr_HRS[0].max_sustained_wind.values / 0.84).tolist(
            )  ############# 0.84 is conversion factor for ECMWF 10MIN TO 1MIN AVERAGE
            dfff = tr_HRS[0].to_dataframe()
            dfff[['VMAX', 'LAT',
                  'LON']] = dfff[['max_sustained_wind', 'lat', 'lon']]
            dfff['YYYYMMDDHH'] = dfff.index.values
            dfff['YYYYMMDDHH'] = dfff['YYYYMMDDHH'].apply(
                lambda x: x.strftime("%Y%m%d%H%M"))
            dfff['STORMNAME'] = typhoons
            dfff[['YYYYMMDDHH', 'VMAX', 'LAT', 'LON',
                  'STORMNAME']].to_csv(os.path.join(Input_folder,
                                                    'ecmwf_hrs_track.csv'),
                                       index=False)
            line_ = 'ecmwf,' + '%secmwf_hrs_track.csv' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
            #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
            fname.write(line_ + '\n')
            # Adjust track time step
            data_forced = [
                tr.where(tr.time <= max(tr_HRS[0].time.values), drop=True)
                for tr in fcast.data
            ]
            # data_forced = [track_data_clean.track_data_force_HRS(tr,HRS_SPEED) for tr in data_forced] # forced with HRS windspeed

            #data_forced= [track_data_clean.track_data_clean(tr) for tr in fcast.data] # taking speed of ENS
            # interpolate to 3h steps from the original 6h
            #fcast.equal_timestep(3)
        else:
            len_ar = np.min([len(var.lat.values) for var in fcast.data])
            lat_ = np.ma.mean([var.lat.values[:len_ar] for var in fcast.data],
                              axis=0)
            lon_ = np.ma.mean([var.lon.values[:len_ar] for var in fcast.data],
                              axis=0)
            YYYYMMDDHH = pd.date_range(fcast.data[0].time.values[0],
                                       periods=len_ar,
                                       freq="H")
            vmax_ = np.ma.mean(
                [var.max_sustained_wind.values[:len_ar] for var in fcast.data],
                axis=0)
            d = {
                'YYYYMMDDHH': YYYYMMDDHH,
                "VMAX": vmax_,
                "LAT": lat_,
                "LON": lon_
            }
            dfff = pd.DataFrame(d)
            dfff['STORMNAME'] = typhoons
            dfff['YYYYMMDDHH'] = dfff['YYYYMMDDHH'].apply(
                lambda x: x.strftime("%Y%m%d%H%M"))
            dfff[['YYYYMMDDHH', 'VMAX', 'LAT', 'LON',
                  'STORMNAME']].to_csv(os.path.join(Input_folder,
                                                    'ecmwf_hrs_track.csv'),
                                       index=False)
            line_ = 'ecmwf,' + '%secmwf_hrs_track.csv' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
            #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
            fname.write(line_ + '\n')
            data_forced = fcast.data

        # calculate windfields for each ensamble
        threshold = 0  #(threshold to filter dataframe /reduce data )
        df = pd.DataFrame(data=cent.coord)
        df["centroid_id"] = "id" + (df.index).astype(str)
        centroid_idx = df["centroid_id"].values
        ncents = cent.size
        df = df.rename(columns={0: "lat", 1: "lon"})

        #calculate wind field for each ensamble members
        list_intensity = []
        distan_track = []
        for tr in data_forced:
            logger.info(
                f"Running on ensemble # {tr.ensemble_number} for typhoon {tr.name}"
            )
            track = TCTracks()
            typhoon = TropCyclone()
            track.data = [tr]
            #track.equal_timestep(3)
            tr = track.data[0]
            typhoon.set_from_tracks(track, cent, store_windfields=True)
            # Make intensity plot using the high resolution member
            if tr.is_ensemble == 'False':
                logger.info("High res member: creating intensity plot")
                plot_intensity.plot_inensity(typhoon=typhoon,
                                             event=tr.sid,
                                             output_dir=Output_folder,
                                             date_dir=date_dir,
                                             typhoon_name=tr.name)
            windfield = typhoon.windfields
            nsteps = windfield[0].shape[0]
            centroid_id = np.tile(centroid_idx, nsteps)
            intensity_3d = windfield[0].toarray().reshape(nsteps, ncents, 2)
            intensity = np.linalg.norm(intensity_3d, axis=-1).ravel()
            timesteps = np.repeat(track.data[0].time.values, ncents)
            #timesteps = np.repeat(tr.time.values, ncents)
            timesteps = timesteps.reshape((nsteps, ncents)).ravel()
            inten_tr = pd.DataFrame({
                'centroid_id': centroid_id,
                'value': intensity,
                'timestamp': timesteps,
            })
            inten_tr = inten_tr[inten_tr.value > threshold]
            inten_tr['storm_id'] = tr.sid
            inten_tr['ens_id'] = tr.sid + '_' + str(tr.ensemble_number)
            inten_tr['name'] = tr.name
            inten_tr = (pd.merge(inten_tr,
                                 df_admin,
                                 how='outer',
                                 on='centroid_id').dropna().groupby(
                                     ['adm3_pcode', 'ens_id'],
                                     as_index=False).agg(
                                         {"value": ['count', 'max']}))
            inten_tr.columns = [
                x for x in ['adm3_pcode', 'storm_id', 'value_count', 'v_max']
            ]
            list_intensity.append(inten_tr)
            distan_track1 = []
            for index, row in df.iterrows():
                dist = np.min(
                    np.sqrt(
                        np.square(tr.lat.values - row['lat']) +
                        np.square(tr.lon.values - row['lon'])))
                distan_track1.append(dist * 111)
            dist_tr = pd.DataFrame({
                'centroid_id': centroid_idx,
                'value': distan_track1
            })
            dist_tr['storm_id'] = tr.sid
            dist_tr['name'] = tr.name
            dist_tr['ens_id'] = tr.sid + '_' + str(tr.ensemble_number)
            dist_tr = (pd.merge(dist_tr,
                                df_admin,
                                how='outer',
                                on='centroid_id').dropna().groupby(
                                    ['adm3_pcode', 'name', 'ens_id'],
                                    as_index=False).agg({'value': 'min'}))
            dist_tr.columns = [
                x for x in ['adm3_pcode', 'name', 'storm_id', 'dis_track_min']
            ]  # join_left_df_.columns.ravel()]
            distan_track.append(dist_tr)
        df_intensity_ = pd.concat(list_intensity)
        distan_track1 = pd.concat(distan_track)

        typhhon_df = pd.merge(df_intensity_,
                              distan_track1,
                              how='left',
                              on=['adm3_pcode', 'storm_id'])

        typhhon_df.to_csv(os.path.join(Input_folder, 'windfield.csv'),
                          index=False)

        line_ = 'windfield,' + '%swindfield.csv' % Input_folder + ',' + typhoons + ',' + date_dir  #StormName #
        #line_='Rainfall,'+'%sRainfall/' % Input_folder +','+ typhoons + ',' + date_dir #StormName #
        fname.write(line_ + '\n')
        fname.close()

        #############################################################
        #### Run IBF model
        #############################################################
        os.chdir(path)

        if platform == "linux" or platform == "linux2":  #check if running on linux or windows os
            # linux
            try:
                p = subprocess.check_call(
                    ["Rscript", "run_model_V2.R",
                     str(rainfall_error)])
            except subprocess.CalledProcessError as e:
                logger.error(f'failed to excute R sript')
                raise ValueError(str(e))
        elif platform == "win32":  #if OS is windows edit the path for Rscript
            try:
                p = subprocess.check_call([
                    "C:/Program Files/R/R-4.1.0/bin/Rscript", "run_model_V2.R",
                    str(rainfall_error)
                ])
            except subprocess.CalledProcessError as e:
                logger.error(f'failed to excute R sript')
                raise ValueError(str(e))

        #############################################################
        # send email in case of landfall-typhoon
        #############################################################

        image_filenames = list(Path(Output_folder).glob('*.png'))
        data_filenames = list(Path(Output_folder).glob('*.csv'))

        if image_filenames or data_filenames:
            message_html = """\
            <html>
            <body>
            <h1>IBF model run result </h1>
            <p>Please find attached a map and data with updated model run</p>
            <img src="cid:Impact_Data">
            </body>
            </html>
            """
            Sendemail.sendemail(
                smtp_server=os.environ["SMTP_SERVER"],
                smtp_port=int(os.environ["SMTP_PORT"]),
                email_username=os.environ["EMAIL_LOGIN"],
                email_password=os.environ["EMAIL_PASSWORD"],
                email_subject='Updated impact map for a new Typhoon in PAR',
                from_address=os.environ["EMAIL_FROM"],
                to_address_list=os.environ["EMAIL_TO_LIST"].split(','),
                cc_address_list=os.environ["EMAIL_CC_LIST"].split(','),
                message_html=message_html,
                filename_list=image_filenames + data_filenames)
        else:
            raise FileNotFoundError(
                f'No .png or .csv found in {Output_folder}')
            ##################### upload model output to 510 datalack ##############

        file_service = FileService(
            account_name=os.environ["AZURE_STORAGE_ACCOUNT"],
            protocol='https',
            connection_string=os.environ["AZURE_CONNECTING_STRING"])
        file_service.create_share('forecast')
        OutPutFolder = date_dir
        file_service.create_directory('forecast', OutPutFolder)

        for img_file in image_filenames:
            file_service.create_file_from_path(
                'forecast',
                OutPutFolder,
                os.fspath(img_file.parts[-1]),
                img_file,
                content_settings=ContentSettings(content_type='image/png'))

        for data_file in data_filenames:
            file_service.create_file_from_path(
                'forecast',
                OutPutFolder,
                os.fspath(data_file.parts[-1]),
                data_file,
                content_settings=ContentSettings(content_type='text/csv'))

        ##################### upload model input(Rainfall+wind intensity) to 510 datalack ##############
        # To DO

    print(
        '---------------------AUTOMATION SCRIPT FINISHED---------------------------------'
    )
    print(str(datetime.now()))
Esempio n. 13
0
from climada.hazard import StormEurope, Centroids

DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')

fn = [
    'fp_lothar_crop-test.nc',
    'fp_xynthia_crop-test.nc',
]
TEST_NCS = [os.path.join(DATA_DIR, f) for f in fn]
""" 
These test files have been generated using the netCDF kitchen sink:
ncks -d latitude,50.5,54.0 -d longitude,3.0,7.5 ./file_in.nc ./file_out.nc
"""

TEST_CENTROIDS = Centroids(os.path.join(DATA_DIR, 'fp_centroids-test.csv'))


class TestReader(unittest.TestCase):
    """ Test loading functions from the StormEurope class """
    def test_centroids_from_nc(self):
        """ Test if centroids can be constructed correctly """
        cent = StormEurope._centroids_from_nc(TEST_NCS[0])

        self.assertTrue(isinstance(cent, Centroids))
        self.assertTrue(isinstance(cent.coord, np.ndarray))
        self.assertEqual(cent.size, 9944)
        self.assertEqual(cent.coord.shape[0], cent.id.shape[0])

    def test_read_footprints(self):
        """ Test read_footprints function, using two small test files"""