コード例 #1
0
def align_synoptic_class_with_pw(path):
    import xarray as xr
    from aux_gps import dim_intersection
    from aux_gps import save_ncfile
    from aux_gps import xr_reindex_with_date_range
    pw = xr.load_dataset(path / 'GNSS_PW_thresh_50_homogenized.nc')
    pw = pw[[x for x in pw if '_error' not in x]]
    syn = read_synoptic_classification(report=False).to_xarray()
    # syn = syn.drop(['Name-EN', 'Name-HE'])
    syn = syn['class']
    syn = syn.sel(time=slice('1996', None))
    syn = syn.resample(time='5T').ffill()
    ds_list = []
    for sta in pw:
        print('aligning station {} with synoptics'.format(sta))
        new_time = dim_intersection([pw[sta], syn])
        syn_da = xr.DataArray(syn.sel(time=new_time))
        syn_da.name = '{}_class'.format(sta)
        syn_da = xr_reindex_with_date_range(syn_da)
        ds_list.append(syn_da)
    ds = xr.merge(ds_list)
    ds = ds.astype('int8')
    ds = ds.fillna(0)
    filename = 'GNSS_synoptic_class.nc'
    save_ncfile(ds, path, filename)
    return ds
コード例 #2
0
def post_process_ims_stations(ds, window, savepath, gis_path, dem_path,
                              axis_path):
    """fill TD with hourly mean if NaN and smooth, then fill in station_lat
    and lon and alt from DEM, finally interpolate to AXIS coords and save"""
    from aux_gps import fill_na_xarray_time_series_with_its_group
    from ims_procedures import analyse_10mins_ims_field
    from axis_process import produce_rinex_filenames_at_time_window
    from ims_procedures import IMS_interpolating_to_GNSS_stations_israel
    from aux_gps import save_ncfile
    import pandas as pd
    now_dt = pd.Timestamp.utcnow().floor('H')
    ds = fill_na_xarray_time_series_with_its_group(ds, grp='hour')
    ds = analyse_10mins_ims_field(ds=ds, var='TD', gis_path=gis_path,
                                  dem_path=dem_path)
    ds_axis = IMS_interpolating_to_GNSS_stations_israel(
        dt=None, start_year=str(now_dt.year), verbose=True, savepath=None,
        network='axis', ds_td=ds, cut_days_ago=None, axis_path=axis_path)
    now_dt = pd.Timestamp.utcnow().floor('H')
    names = produce_rinex_filenames_at_time_window(end_dt=now_dt,
                                                   window=window)
    st_str = names[0][4:8]
    end_str = names[-1][4:8]
    filename = 'AXIS_TD_{}-{}.nc'.format(st_str, end_str)
    save_ncfile(ds_axis, savepath, filename)
    return ds_axis
コード例 #3
0
ファイル: dsea_foehn.py プロジェクト: ZiskinZiv/PW_from_GPS
def produce_final_dsea_pwv(ims_station=None, savepath=None, use_pressure=True):
    """use ims_station='SEDOM' to get close ts, pressure"""
    import xarray as xr
    from aux_gps import save_ncfile
    if ims_station is not None:
        ts = xr.open_dataset(ims_path /
                             'IMS_TD_israeli_10mins.nc')[ims_station]
        pres = xr.open_dataset(ims_path /
                               'IMS_BP_israeli_10mins.nc')[ims_station]
        ts.load()
        pres.load()
        ts = ts.resample(time='5T').ffill()
        pres = pres.resample(time='5T', keep_attrs=True).ffill()
    if use_pressure:
        wetz = produce_wetz_dsea_from_ztd(pres=pres)
    else:
        p = dsea_gipsy_path / 'results-{}'.format('GPT2')
        wetz = read_all_final_tdps_dsea(return_mean=True, dryz=False)
    pwv = produce_pwv_from_zwd_with_ts_tm_from_deserve(ts=ts, zwd=wetz)
    pwv.attrs['units'] = 'mm'
    pwv.attrs['long_name'] = 'precipitable water vapor'
    pwv.name = 'pwv'
    pwv.attrs[
        'action'] = 'corrected wetz using surface pressure and ts-tm from radiosonde'
    if savepath is not None:
        if ims_station is not None:
            filename = 'DSEA_PWV_{}.nc'.format(ims_station)
            save_ncfile(pwv, savepath, filename)
    return pwv
コード例 #4
0
def read_BD_ceilometer_yoav_all_years(path=ceil_path, savepath=None):
    from aux_gps import path_glob
    from aux_gps import save_ncfile
    import pandas as pd
    files = path_glob(path, 'ceilometer_BD*.csv')
    dfs = []
    for file in files:
        dfs.append(read_BD_ceilometer_yoav_one_year_csv(file))
    df = pd.concat(dfs)
    df = df.sort_index()
    names = [x.split('[')[0] for x in df.columns]
    units = [x.split('[')[1].split(']')[0] for x in df.columns]
    long_names = [
        'total cloud cover', 'cloud cover of the most cloudy layer',
        'cloud cover of the 1st cloud layer', '1st cloud base height',
        'cloud cover of the 2nd cloud layer', '2nd cloud base height',
        'cloud cover of the 3rd cloud layer', '3rd cloud base height',
        'cloud cover of the 4th cloud layer', '4th cloud base height',
        'cloud cover of the 5th cloud layer', '5th cloud base height',
        'Mixing layer height'
    ]
    df.columns = names
    # fix cloud height to meters again for until 22-09-2013:
    hs = [x for x in df.columns if '_H' in x]
    df.loc[:'2013-09-22', hs] *= (1 / 0.3048)
    ds = df.to_xarray()
    for i, da in enumerate(ds):
        ds[da].attrs['units'] = units[i]
        ds[da].attrs['long_name'] = long_names[i]
    if savepath is not None:
        filename = 'BD_clouds_and_MLH_from_ceilometers.nc'
        save_ncfile(ds, savepath, filename)
    return ds
コード例 #5
0
def read_geodetic_positions_and_height(path=jpl_path):
    import pandas as pd
    import requests
    from io import StringIO
    from aux_gps import save_ncfile
    url = 'https://sideshow.jpl.nasa.gov/post/tables/table2.html'
    r = requests.get(url)
    data = r.text
    df = pd.read_csv(StringIO(data), delim_whitespace=True, skiprows=7)
    df.drop(df.tail(1).index, inplace=True)  # drop last n rows
    df = df.unstack()
    cols0 = df.columns.get_level_values(0)
    cols1 = df.columns.get_level_values(1)
    cols = ['{}_{}'.format(x, y) for x, y in zip(cols0, cols1)]
    df.columns = cols
    df.index.name = 'station'
    ds = df.to_xarray()
    pos_das = [x for x in ds if 'POS' in x]
    vel_das = [x for x in ds if 'VEL' in x]
    for da in pos_das:
        if 'V' in da.split('_')[0]:
            ds[da].attrs['units'] = 'mm'
        else:
            ds[da].attrs['units'] = 'deg'
    ds['SN_POS'].attrs['units'] = 'mm'
    ds['SE_POS'].attrs['units'] = 'mm'
    for da in vel_das:
        ds[da].attrs['units'] = 'mm/yr'
    ds.attrs['name'] = 'geodetic positions and height and velocities'
    ds.attrs['reference frame'] = 'IGS14'
    ds.attrs['reference epoch'] = '2020-01-01'
    ds.attrs['reference ellipsoid'] = 'GRS80'
    filename = 'jpl_geodetic_positions_velocities.nc'
    save_ncfile(ds, path, filename)
    return ds
コード例 #6
0
def read_break_estimates_jpl_gipsyx_site(path=jpl_path):
    import pandas as pd
    import requests
    from io import StringIO
    from aux_gps import save_ncfile
    url = 'https://sideshow.jpl.nasa.gov/post/tables/table3.html'
    r = requests.get(url)
    data = r.text
    df = pd.read_csv(StringIO(data), delim_whitespace=True, skiprows=4)
    df.drop(df.tail(1).index, inplace=True)  # drop last n rows
    ds = df.to_xarray()
    ds = ds.rename({'level_0': 'station', 'level_1': 'year'})
    for da in ds:
        ds[da].attrs['units'] = 'mm'
    ds.attrs['units'] = 'mm'
    ds['N'].attrs['long_name'] = 'north'
    ds['E'].attrs['long_name'] = 'east'
    ds['V'].attrs['long_name'] = 'vertical'
    ds['SN'].attrs['long_name'] = 'north error'
    ds['SE'].attrs['long_name'] = 'east error'
    ds['SV'].attrs['long_name'] = 'vertical error'
    ds.attrs['name'] = 'break estimates'
    filename = 'jpl_break_estimates.nc'
    save_ncfile(ds, path, filename)
    return ds
コード例 #7
0
ファイル: axis_process.py プロジェクト: ZiskinZiv/PW_from_GPS
def read_multi_station_tdp_file(file, stations, savepath=None):
    import pandas as pd
    import xarray as xr
    from aux_gps import save_ncfile
    df_raw = pd.read_csv(file, header=None, delim_whitespace=True)
    # first loop over list of stations and extract the data:
    df_stns = [df_raw[df_raw.iloc[:, -1].str.contains(x)] for x in stations]
    # now process each df from df_stns and extract the keys:
    keys = ['DryZ', 'WetZ', 'GradNorth', 'GradEast', 'Pos.X', 'Pos.Y', 'Pos.Z']
    desc = ['Zenith Hydrostatic Delay', 'Zenith Wet Delay',
            'North Gradient of Zenith Wet Delay',
            'East Gradient of Zenith Wet Delay',
            'WGS84(geocentric) X coordinate',
            'WGS84(geocentric) Y coordinate', 'WGS84(geocentric) Z coordinate']
    units = ['cm', 'cm', 'cm/m', 'cm/m', 'm', 'm', 'm']
    desc_dict = dict(zip(keys, desc))
    units_dict = dict(zip(keys, units))
    ppps = []
    for df_stn in df_stns:
        df_list = [df_stn[df_stn.iloc[:, -1].str.contains(x)] for x in keys]
        # make sure that all keys in df have the same length:
        # assert len(set([len(x) for x in df_list])) == 1
        # translate the seconds col to datetime:
        seconds = df_list[-1].iloc[:, 0]
        dt = pd.to_datetime('2000-01-01T12:00:00')
        time = dt + pd.to_timedelta(seconds, unit='sec')
        # build a new df that contains all the vars(from keys):
        ppp = pd.DataFrame(index=time)
        ppp.index.name = 'time'
        for i, df in enumerate(df_list):
            if df.empty:
                continue
            df.columns = ['seconds', 'to_drop', keys[i], keys[i] + '_error',
                          'meta']
            ppp[keys[i]] = df[keys[i]].values
            ppp[keys[i] + '_error'] = df[keys[i] + '_error'].values
            # rename all the Pos. to nothing:
            # ppp.columns = ppp.columns.str.replace('Pos.', '')
        ppps.append(ppp.to_xarray())
    ds = xr.concat(ppps, 'station')
    ds['station'] = stations
    for da in ds:
        if 'Wet' in da or 'Dry' in da or 'Grad' in da:
            ds[da] = ds[da] * 100
            if 'Wet' in da:
                ds[da].attrs['units'] = units_dict.get('WetZ')
            elif 'Grad' in da:
                ds[da].attrs['units'] = units_dict.get('GradNorth')
        ds[da].attrs['long_name'] = desc_dict.get(da, '')
        if 'Pos' in da:
            ds[da].attrs['units'] = 'm'
    pos_names = [x for x in ds if 'Pos' in x]
    pos_new_names = [x.split('.')[-1] for x in pos_names]
    ds = ds.rename(dict(zip(pos_names, pos_new_names)))
    if savepath is not None:
        # filename = file.as_posix().split('/')[-1].split()
        save_ncfile(ds, savepath, 'smoothFinal.nc')
    return ds
コード例 #8
0
def run_harmonic_analysis_on_all_jpl_products(path=jpl_path, savepath=jpl_path/'harmonic_analysis'):
    from aux_gps import save_ncfile
    dss = read_geodetic_positions_and_height(path=path)
    for i, station in enumerate(dss['station'].values):
        print('processing station {} ({} out of {})'.format(station, i+1, dss['station'].size))
        ds = produce_seasonal_trend_breakdown_time_series_from_jpl_gipsyx_site(station=station, verbose=False, plot=False)
        filename = '{}_V_harmonic_mm.nc'.format(station)
        save_ncfile(ds, savepath, filename)
    return
コード例 #9
0
def read_all_ceilometer_stations(path=ceil_path):
    import xarray as xr
    from aux_gps import save_ncfile
    stations = [x for x in stations_dict.keys()]
    da_list = []
    for station in stations:
        print('reading station {}'.format(station))
        da = read_ceilometer_station(path=path, name=station)
        da_list.append(da)
    ds = xr.merge(da_list)
    save_ncfile(ds, path, filename='MLH_from_ceilometers.nc')
    return ds
コード例 #10
0
ファイル: dsea_foehn.py プロジェクト: ZiskinZiv/PW_from_GPS
def concat_wrf_vars_same_date(path=des_path, date='2014-08-16'):
    import xarray as xr
    from aux_gps import path_glob
    from aux_gps import save_ncfile
    files = path_glob(path, 'wrfout_*_{}_*_*.nc'.format(date))
    dsl = [xr.open_dataset(x) for x in files]
    ds = xr.merge(dsl)
    varnames = '_'.join(sorted([x for x in ds]))
    name = files[0].as_posix().split('/')[-1].split('.')[0].split('_')[0:-1]
    filename = '_'.join(name) + '_{}'.format(varnames) + '.nc'
    save_ncfile(ds, path, filename)
    return
コード例 #11
0
ファイル: dsea_foehn.py プロジェクト: ZiskinZiv/PW_from_GPS
def load_wrf_var_from_wrf_file_and_save(file, varname="rh2", savepath=None):
    """load one wrfvar from wrf file and save it to savepath"""
    from netCDF4 import Dataset
    import wrf
    nc = Dataset(file)
    from aux_gps import save_ncfile
    name = file.as_posix().split('/')[-1].split('.')[0]
    filename = '{}_{}.nc'.format(name, varname)
    wrfvar = wrf.getvar(wrfin=nc, varname=varname, timeidx=wrf.ALL_TIMES)
    if savepath is not None:
        if wrfvar.attrs['projection'] is not None:
            wrfvar.attrs['projection'] = wrfvar.attrs['projection'].proj4()
        save_ncfile(wrfvar, savepath, filename)
    return wrfvar
コード例 #12
0
ファイル: dsea_foehn.py プロジェクト: ZiskinZiv/PW_from_GPS
def assemble_WRF_pwv(path=des_path, work_path=work_yuval, radius=1):
    from PW_stations import produce_geo_gnss_solved_stations
    import xarray as xr
    from aux_gps import save_ncfile
    from aux_gps import get_nearest_lat_lon_for_xy
    from aux_gps import get_unique_index
    df = produce_geo_gnss_solved_stations(path=work_path / 'gis', plot=False)
    dsea_point = df.loc['dsea'][['lat', 'lon']].astype(float).values
    if radius is not None:
        point = None
    else:
        point = dsea_point
    wrf_pw = read_all_WRF_GNSS_files(path, var='pw', point=point)
    wrf_pw8 = xr.load_dataarray(
        path / 'pw_wrfout_d04_2014-08-08_40lev.nc').sel(Time='2014-08-08')
    wrf_pw16 = xr.load_dataarray(
        path / 'pw_wrfout_d04_2014-08-16_40lev.nc').sel(Time='2014-08-16')
    wrf_pw_8_16 = xr.concat([wrf_pw8, wrf_pw16], 'Time')
    print('looking for {} at wrf.'.format(dsea_point))
    loc = get_nearest_lat_lon_for_xy(wrf_pw_8_16['XLAT'], wrf_pw_8_16['XLONG'],
                                     dsea_point)
    print(loc)
    if radius is not None:
        print('getting {} radius around {}.'.format(radius, dsea_point))
        lat_islice = [loc[0][0] - radius, loc[0][0] + radius + 1]
        lon_islice = [loc[0][1] - radius, loc[0][1] + radius + 1]
        wrf_pw_8_16 = wrf_pw_8_16.isel(south_north=slice(*lat_islice),
                                       west_east=slice(*lon_islice))
        loc = get_nearest_lat_lon_for_xy(wrf_pw['XLAT'], wrf_pw['XLONG'],
                                         dsea_point)
        lat_islice = [loc[0][0] - radius, loc[0][0] + radius + 1]
        lon_islice = [loc[0][1] - radius, loc[0][1] + radius + 1]
        wrf_pw = wrf_pw.isel(south_north=slice(*lat_islice),
                             west_east=slice(*lon_islice))
    else:
        wrf_pw_8_16 = wrf_pw_8_16.isel(south_north=loc[0][0],
                                       west_east=loc[0][1])
    wrf_pw = xr.concat([wrf_pw, wrf_pw_8_16], 'Time')
    wrf_pw = wrf_pw.rename({'Time': 'time'})
    wrf_pw = wrf_pw.sortby('time')
    wrf_pw = get_unique_index(wrf_pw)
    if wrf_pw.attrs['projection'] is not None:
        wrf_pw.attrs['projection'] = wrf_pw.attrs['projection'].proj4()
    if radius is not None:
        filename = 'pwv_wrf_dsea_gnss_radius_{}_2014-08.nc'.format(radius)
    else:
        filename = 'pwv_wrf_dsea_gnss_point_2014-08.nc'
    save_ncfile(wrf_pw, des_path, filename)
    return wrf_pw
コード例 #13
0
def get_dryz_from_all_stations(gnss_path, savepath):
    from aux_gps import save_ncfile
    from aux_gps import path_glob
    import xarray as xr
    pathes = path_glob(gnss_path, '*/')
    stations = [x.as_posix().split('/')[-1] for x in pathes]
    ds_list = []
    for station in stations:
        print('obtaining ZHD from station {}'.format(station))
        try:
            zhd = get_dryz_from_one_station(gnss_path, station=station)
            ds_list.append(zhd)
        except FileNotFoundError:
            continue
    ds = xr.merge(ds_list)
    filename = 'ZHD_GNSS.nc'
    save_ncfile(ds, savepath, filename)
    return ds
コード例 #14
0
def produce_pw_all_stations(ds, axis_path, mda_path):
    from PW_stations import load_mda
    from PW_stations import produce_GNSS_station_PW
    from aux_gps import fill_na_xarray_time_series_with_its_group
    from aux_gps import path_glob
    from aux_gps import save_ncfile
    import xarray as xr
    # first load mda:
    mda = load_mda(mda_path)
    # now loop over each station, produce pwv and save:
    st_dirs = path_glob(axis_path, '*/')
    st_dirs = [x for x in st_dirs if x.is_dir()]
    st_dirs = [x for x in st_dirs if not x.as_posix().split('/')[-1].isnumeric()]
    assert len(st_dirs) == 27
    pwv_list = []
    for st_dir in st_dirs:
        station = st_dir.as_posix().split('/')[-1]
        last_file = sorted(path_glob(st_dir/'dr/ultra', '*.nc'))[-1]
        last_file_str = last_file.as_posix().split('/')[-1][4:13]
        wet = xr.load_dataset(last_file)['WetZ'].squeeze(drop=True)
        logger.info('loaded {}.'.format(last_file))
        wet_error = xr.load_dataset(last_file)['WetZ_error'].squeeze(drop=True)
        wet.name = station
        wet_error.name = station
        # resample temp to 5 mins and reindex to wet delay time:
        t = ds[station].resample(time='5T').ffill().reindex_like(wet.time)
        # fill in NaNs with mean hourly signal:
        t_new = fill_na_xarray_time_series_with_its_group(t, grp='hour')
        try:
            pwv = produce_GNSS_station_PW(wet, t_new, mda=mda,
                                          model_name='LR', plot=False)
            pwv_error = produce_GNSS_station_PW(wet_error, t_new, mda=mda,
                                                model_name='LR', plot=False)
            pwv_error.name = '{}_error'.format(pwv.name)
            pwv_ds = xr.merge([pwv, pwv_error])
            filename = '{}{}_PWV.nc'.format(station, last_file_str)
            save_ncfile(pwv_ds, st_dir/'dr/ultra', filename)
            pwv_list.append(pwv_ds)
        except ValueError as e:
            logger.warning('encountered error: {}, skipping {}'.format(e, last_file))
            continue
    dss = xr.merge(pwv_list)
    filename = 'AXIS_{}_PWV_ultra.nc'.format(last_file_str)
    save_ncfile(dss, axis_path, filename)
コード例 #15
0
def process_ims_stations(mainpath, window, var='TD', ds=None):
    import os
    import xarray as xr
    from aux_gps import path_glob
    from axis_process import produce_rinex_filenames_at_time_window
    from aux_gps import save_ncfile
    import pandas as pd
    logger.info('processing IMS stations with {} variable'.format(var))
    savepath = mainpath / var
    if not savepath.is_dir():
        os.mkdir(savepath)
        logger.info('created {}.'.format(savepath))
    else:
        logger.info('{} already exist.'.format(savepath))
    if ds is None:
        files = path_glob(mainpath, '*.nc')
        dsl = [xr.load_dataset(x) for x in files]
    else:
        dsl = ds
    ds_list = []
    for ds in dsl:
        try:
            ds_var = ds[var]
        except KeyError:
            logger.warning('no {} in {}.'.format(
                var, ds.attrs['station_name']))
            continue
        ds_var.name = ds.attrs['station_name']
        ds_var.attrs['lat'] = ds.attrs['lat']
        ds_var.attrs['lon'] = ds.attrs['lon']
        ds_var.attrs['station_id'] = ds.attrs['station_id']
        ds_list.append(ds_var)
    ds = xr.merge(ds_list)
    now_dt = pd.Timestamp.utcnow().floor('H')
    names = produce_rinex_filenames_at_time_window(end_dt=now_dt,
                                                   window=window)
    st_str = names[0][4:8]
    end_str = names[-1][4:8]
    filename = 'IMS_{}_{}-{}.nc'.format(var, st_str, end_str)
    save_ncfile(ds, savepath, filename)
    # finally delete all nc files:
    if ds is None:
        [x.unlink() for x in files]
    return ds
コード例 #16
0
ファイル: axis_process.py プロジェクト: ZiskinZiv/PW_from_GPS
def read_and_concat_smoothFinals(rinexpath, solution='Final'):
    import xarray as xr
    from aux_gps import save_ncfile
    from aux_gps import path_glob
    years = [x.as_posix().split('/')[-1] for x in path_glob(rinexpath, '*/')]
    years = [x for x in years if x.isnumeric()]
    for year in years:
        dsl = []
        # doys = [x.as_posix().split('/')[-1] for x in path_glob(rinexpath/year, '*/')]
        for doypath in path_glob(rinexpath/year, '*/'):
            file = doypath / 'dr' / solution / 'smoothFinal.nc'
            if file.is_file():
                dsl.append(xr.load_dataset(file))
                print('found smoothFinal.nc in {}'.format(doypath))
        if dsl:
            ds = xr.concat(dsl, 'time')
            ds = ds.sortby('time')
            save_ncfile(ds, rinexpath, 'smoothFinal_{}.nc'.format(year))
    return ds
コード例 #17
0
ファイル: dsea_foehn.py プロジェクト: ZiskinZiv/PW_from_GPS
def load_wrf_output_and_save_field(path=des_path, varname="pw", savepath=None):
    """
    load WRF output field and save it to savepath

    Parameters
    ----------
    path : Path() or str, optional
        the WRF loadpath. The default is des_path.
    varname : str, optional
        can be 'temp', 'pres', etc.. The default is 'pw'.
    savepath : Path() or str, optional
        The field savepath. The default is None.

    Returns
    -------
    var_list : list
        field dataarrays list.

    """
    import wrf
    import xarray as xr
    from aux_gps import path_glob
    from aux_gps import save_ncfile
    files = path_glob(path, 'wrfout_*.nc')
    var_list = []
    for file in files:
        ds = xr.open_dataset(file)
        wrfin = ds._file_obj.ds
        wrfvar = wrf.getvar(wrfin=wrfin,
                            varname=varname,
                            timeidx=wrf.ALL_TIMES)
        if savepath is not None:
            if wrfvar.attrs['projection'] is not None:
                wrfvar.attrs['projection'] = wrfvar.attrs['projection'].proj4()
            filename_to_save = '{}_{}'.format(varname,
                                              file.as_posix().split('/')[-1])
            save_ncfile(wrfvar, savepath, filename_to_save)
        var_list.append(wrfvar)
    return var_list
コード例 #18
0
ファイル: dsea_foehn.py プロジェクト: ZiskinZiv/PW_from_GPS
def produce_and_save_soi_axis_pwv(axis_path=axis_path,
                                  soi_path=dsea_gipsy_path,
                                  ims_path=ims_path,
                                  savepath=work_yuval):
    import xarray as xr
    from aux_gps import save_ncfile
    soi_pwv = produce_pwv_from_dsea_axis_station(path=soi_path,
                                                 ims_path=ims_path)
    axis_pwv = produce_pwv_from_dsea_axis_station(path=axis_path,
                                                  ims_path=ims_path)
    soi_pwv.attrs['GNSS network'] = 'SOI-APN'
    soi_pwv.attrs['station'] = 'dsea'
    soi_pwv.attrs['units'] = 'mm'
    soi_pwv = soi_pwv.reset_coords(drop=True)
    axis_pwv.attrs['GNSS network'] = 'AXIS'
    axis_pwv = axis_pwv.reset_coords(drop=True)
    axis_pwv.attrs['station'] = 'dsea'
    axis_pwv.attrs['units'] = 'mm'
    ds = xr.Dataset()
    ds['pwv-soi'] = soi_pwv
    ds['pwv-axis'] = axis_pwv
    save_ncfile(ds, savepath, 'DSEA_PWV_GNSS_2014-08.nc')
    return ds
コード例 #19
0
def agg_month_consecutive_syn_class(path=climate_path, normalize=True):
    import numpy as np
    import pandas as pd
    from aux_gps import save_ncfile
    df = read_synoptic_classification(path=path, report=False)
    df['month'] = df.index.month
    df['year'] = df.index.year
    df['months'] = df['year'].astype(str) + '-' + df['month'].astype(str)
    new_df = df.groupby(df['months']).apply(find_consecutive_classes)
    new_df.columns = ['class_sum']
    new_df = new_df.unstack()
    new_df.columns = np.arange(1, 20, 1)
    dt = pd.to_datetime(new_df.index)
    new_df.set_index(dt, inplace=True)
    new_df = new_df.sort_index()
    new_df.index.name = 'time'
    if normalize:
        new_df = new_df.divide(new_df.index.days_in_month, axis=0)
    da = new_df.to_xarray().to_array('class')
    ds = da.to_dataset(name='consecutive')
    filename = 'GNSS_synoptic_class_consecutive.nc'
    save_ncfile(ds, work_yuval, filename)
    return ds
コード例 #20
0
def perform_pwv_filling_last_decade(path=work_yuval,
                                    fyear='2009',
                                    lyear='2019',
                                    drop=['slom', 'elro']):
    import xarray as xr
    from aux_gps import save_ncfile
    pw = xr.load_dataset(path / 'GNSS_PW_monthly_thresh_50.nc')
    pw = pw.sel(time=slice(fyear, lyear))
    pw = pw.drop_vars(drop)
    prepare_pwv_for_climatol(freq='monthly',
                             first_year=fyear,
                             last_year=lyear,
                             pwv_ds=pw)
    # then run these two lines in R:
    # homogen('PWV',2009,2019, na.strings="-999.9",dz.max=7,std=2)
    # dahstat('PWV',2009,2019,stat='series',long=TRUE)
    ds, ds_flag = read_climatol_results(first_year=fyear, last_year=lyear)
    filename = 'GNSS_PW_monthly_homogenized_filled_{}-{}.nc'.format(
        fyear, lyear)
    save_ncfile(ds, path, filename)
    filename = 'GNSS_PW_monthly_homogenized_filled_flags_{}-{}.nc'.format(
        fyear, lyear)
    save_ncfile(ds_flag, path, filename)
    return
コード例 #21
0
def ims_download(savepath, window, save=False):
    """
    Downloads a 10mins parameter from the IMS for the last <window> hours for all stations.

    Parameters
    ----------
    savepath : Path or string
        a full path to download the files, e.g., /home/ziskin/Work_Files/PW_yuval/IMS_T/10mins.
       Returns
    -------
    None.

    """
    import pandas as pd
    import requests
    from requests.exceptions import SSLError
    from requests.exceptions import ConnectionError
    from aux_gps import save_ncfile
    now_dt = pd.Timestamp.now().floor('H')
    start_dt = now_dt - pd.Timedelta('{} hour'.format(window))
    logger.info('Downloading IMS to {} and window {}'.format(
        savepath, window))
    logger.info('Fetching IMS from {} to {}.'.format(start_dt, now_dt))
    # use API from IMS
    myToken = 'f058958a-d8bd-47cc-95d7-7ecf98610e47'
    headers = {'Authorization': 'ApiToken ' + myToken}
    # download meta-data on stations:
    r = requests.get('https://api.ims.gov.il/v1/envista/stations/',
                     headers=headers)
    stations_df = pd.DataFrame(r.json())
    # use only active 10mins stations:
    stations_df = stations_df[stations_df['timebase'] == 10]
    stations_df = stations_df[stations_df['active']]
    ds_list = []
    for i, row in stations_df.iterrows():
        st_id = row['stationId']
        st_name = row['name']
        last = now_dt.strftime('%Y/%m/%dT%H:00:00')
        first = start_dt.strftime('%Y/%m/%dT%H:00:00')
        lat = row['location']['latitude']
        if lat is None:
            lat = ''
        lon = row['location']['longitude']
        if lon is None:
            lon = ''
        dl_command = ('https://api.ims.gov.il/v1/envista/stations/' +
                      str(st_id) + '/data/?from=' + first + '&to=' + last)
        try:
            r = requests.get(dl_command, headers=headers)
        except SSLError:
            logger.warning('SSLError')
            r = requests.get(dl_command, headers=headers)
        except ConnectionError:
            logger.warning('ConnectionError')
            r = requests.get(dl_command, headers=headers)
        if r.status_code == 204:  # i.e., no content:
            logger.warning('no content for this search, skipping...')
            continue
        logger.info('parsing data from {} to dataframe.'.format(st_name))
        ds = parse_single_station(r.json()['data'])
        ds.attrs['station_name'] = '-'.join(st_name.split(' '))
        ds.attrs['lat'] = lat
        ds.attrs['lon'] = lon
        ds.attrs['station_id'] = st_id
        if save:
            filename = '{}.nc'.format('-'.join(st_name.split(' ')))
            save_ncfile(ds, savepath, filename)
        else:
            ds_list.append(ds)
    return ds_list