示例#1
0
def test_read_filter_2():
    tuv_all_levs = mv.read(file_in_testdir('tuv_pl.grib'))
    u_all_levs = mv.read(data=tuv_all_levs, param='u')
    assert (mv.grib_get_long(u_all_levs,
                             'level') == [1000, 850, 700, 500, 400, 300])
    assert (mv.grib_get_string(u_all_levs,
                               'shortName') == ['u', 'u', 'u', 'u', 'u', 'u'])
    u_2levs = mv.read(data=u_all_levs, levelist=[700, 400])
    assert (mv.grib_get_long(u_2levs, 'level') == [700, 400])
示例#2
0
def test_class_():
    # these generate warnings, but if they pass then they show that the conversion
    # from class_ to class is working
    gg = mv.read(file_in_testdir('test.grib'))
    c = mv.read(data=gg, class_='od')
    assert (mv.type(c) == 'fieldset')
    c = mv.read({'data': gg, 'class_': 'od'})
    assert (mv.type(c) == 'fieldset')
    c = mv.read({'data': gg, 'class': 'od'})
    assert (mv.type(c) == 'fieldset')
示例#3
0
def test_read_filter_to_dataset():
    tuv_all_levs = mv.read(file_in_testdir('tuv_pl.grib'))
    u_all_levs = mv.read(data=tuv_all_levs, param='u')
    assert (mv.grib_get_long(u_all_levs,
                             'level') == [1000, 850, 700, 500, 400, 300])
    assert (mv.grib_get_string(u_all_levs,
                               'shortName') == ['u', 'u', 'u', 'u', 'u', 'u'])
    x = u_all_levs.to_dataset()
    x_keys = x.keys()
    assert ('u' in x_keys)  # only 'u' should be there
    assert ('v' not in x_keys)  # only 'u' should be there
    assert ('t' not in x_keys)  # only 'u' should be there
示例#4
0
def test_mvl_ml2hPa():
    ml_data = mv.read(file_in_testdir('ml_data.grib'))
    assert mv.type(ml_data) == 'fieldset'
    ml_t = mv.read(data=ml_data, param='t')
    ml_lnsp = mv.read(data=ml_data, param='lnsp')
    desired_pls = [1000, 900, 850, 500, 300, 100, 10, 1, 0.8, 0.5, 0.3, 0.1]
    pl_data = mv.mvl_ml2hPa(ml_lnsp, ml_t, desired_pls)
    assert mv.type(pl_data) == 'fieldset'
    pls = mv.grib_get_long(pl_data, 'level')
    lev_types = mv.grib_get_string(pl_data, 'typeOfLevel')
    lev_divisors = [1 if x == 'isobaricInhPa' else 100 for x in lev_types]
    pl_in_hpa = [a / b for a, b in zip(pls, lev_divisors)]
    assert (pl_in_hpa == desired_pls)
示例#5
0
def test_odb():
    if mv.is_feature_available('odb') == 0:
        print(
            'Skipping test_odb because ODB is not enabled in this Metview version'
        )
        return

    db = mv.read(file_in_testdir('temp_u.odb'))
    assert (mv.type(db) == 'odb')

    # assert isinstance(db,mv.Odb)
    assert (mv.count(db) == 88)

    p_val = mv.values(db, 'p')
    assert (mv.count(p_val) == 88)
    assert (np.isclose(p_val[0], 98065.578125))
    assert (np.isclose(p_val[87], 97651.2109375))

    t_val = mv.values(db, 't')
    assert (mv.count(t_val) == 88)
    assert (np.isclose(t_val[0], 144700))
    assert (np.isclose(t_val[87], 94700))

    v_val = mv.values(db, 'val')
    assert (mv.count(v_val) == 88)
    assert (np.isclose(v_val[0], -4.62306786))
    assert (np.isclose(v_val[87], -4.27525187))
示例#6
0
def test_fieldset_relational_operators():
    a = mv.read(os.path.join(PATH, 'test.grib'))
    a = mv.int(a)
    assert (mv.accumulate(a > 273) == 76001)
    assert (mv.accumulate(a >= 273) == 78156)
    assert (mv.accumulate(a < 273) == 37524)
    assert (mv.accumulate(a <= 273) == 39679)
示例#7
0
def test_read_gptset():
    gpts = mv.read(file_in_testdir('geopointset_1.gpts'))
    assert (mv.type(gpts) == 'geopointset')
    assert (mv.count(gpts) == 6)
    gpt1 = gpts[0]
    assert (mv.type(gpt1) == 'geopoints')
    assert (mv.count(gpt1) == 11)
    assert (mv.metadata(gpt1) is None)
    gpt2 = gpts[1]
    assert (mv.type(gpt2) == 'geopoints')
    assert (mv.count(gpt2) == 1)
    # check the metadata
    md = mv.metadata(gpt2)
    assert (isinstance(md, dict))
    assert (md['mykey1'] == 'val1')
    assert (md['mykey2'] == 5)
    # check that it is iterable
    counts = [mv.count(c) for c in gpts]
    assert (counts == [11.0, 1.0, 44.0, 11.0, 1.0, 44.0])
    # test the filtering
    bad_filter = mv.filter(gpts, {'badkey': 7})
    assert (bad_filter is None)
    good_filter = mv.filter(gpts, {'mykey2': 5})
    assert (mv.type(good_filter) == 'geopointset')
    assert (mv.count(good_filter) == 1)
    assert (mv.count(good_filter[0]) == 1)
    lats = good_filter[0].latitudes()
    assert (len(lats) == 1)
    assert (lats[0] == 60.82)
示例#8
0
    def _extract_fields(self, df, fs, max_count):
        if df.empty:
            return None

        if "_fileIndex3" in df.columns:
            comp_num = 3
        elif "_fileIndex2" in df.columns:
            comp_num = 2
        elif "_fileIndex1" in df.columns:
            comp_num = 1
        else:
            return None

        idx = [[] for k in range(comp_num)]
        comp_lst = list(range(comp_num))
        for row in df.itertuples():
            for comp in comp_lst:
                idx_file = row[-1 - (comp_num - comp - 1) * 2]
                idx_msg = row[-2 - (comp_num - comp - 1) * 2]
                if not idx_file in self.fs:
                    self.fs[idx_file] = mv.read(self.data_files[idx_file])
                fs.append(self.fs[idx_file][idx_msg])
                idx[comp].append(len(fs) - 1)
        # generate a new dataframe
        df = df.copy()
        for k, v in enumerate(idx):
            df[f"_msgIndex{k+1}"] = v
        df.drop([f"_fileIndex{x+1}" for x in range(comp_num)],
                axis=1,
                inplace=True)
        return df
示例#9
0
def test_flextra():
    flx = mv.read(file_in_testdir('flextra_output.txt'))
    assert (flx.type() == 'definition')
    trNum = int(mv.flextra_group_get(flx, "trNum"))
    assert (trNum == 5)

    startLst = ['03:00:00', '06:00:00', '09:00:00', '12:00:00', '15:00:00']
    stopIndexLst = [1, 1, 1, 1, 1]
    for i in range(trNum):
        vals = mv.flextra_tr_get(flx, i, ["startTime", "stopIndex"])
        assert (len(vals) == 2)
        assert (vals[0] == startLst[i])
        assert (int(vals[1]) == stopIndexLst[i])

    # Read data for the first trajectory
    vals = mv.flextra_tr_get(flx, 0, ["lat", "lon", "date"])
    assert (len(vals) == 3)
    assert (vals[0].size == 25)
    assert (vals[1].size == 25)
    assert (len(vals[2]) == 25)
    assert (isinstance(vals[0], np.ndarray))
    assert (isinstance(vals[1], np.ndarray))
    assert (isinstance(vals[2], list))
    assert (np.isclose(vals[0].mean(), 63.369828))
    assert (np.isclose(vals[0].std(), 2.2269590))
    assert (np.isclose(vals[1].mean(), 18.014544))
    assert (np.isclose(vals[1].std(), 14.98830786689625))
    assert (vals[2][0] == datetime.datetime(2012, 1, 11, 3, 0, 0))
示例#10
0
def test_table():
    # test csv with metadata
    db = mv.read_table(table_filename=file_in_testdir('sample_metadata.csv'),
                       table_delimiter=' ',
                       table_combine_delimiters='on',
                       table_header_row=2,
                       table_meta_data_rows=1)
    assert (db.type() == 'table')
    assert (db.count() == 9)
    assert (db.name(3) == "LATIT")
    assert (len(db.metadata_keys()) == 16)
    assert (db.metadata_value('integration') == 'PETTERSSEN')
    v = db.values(0)
    assert (isinstance(v, np.ndarray))
    assert (len(v) == 7)
    assert (np.array_equal(
        v, np.array([0, 3600, 7200, 10800, 14400, 18000, 21600])))
    v = db.values(4)
    assert (isinstance(v, np.ndarray))
    assert (len(v) == 7)
    assert (np.array_equal(
        v, np.array([963.5, 964.1, 964.0, 963.3, 961.8, 960.3, 959.0])))

    # test csv with no metadata
    db = mv.read(file_in_testdir('sample.csv'))
    assert (db.type() == 'table')
    assert (db.count() == 4)
    assert (db.name(2) == "h2")
    v = db.values(2)
    assert (isinstance(v, np.ndarray))
    assert (len(v) == 6)
    assert (np.array_equal(v, np.array([4, 5, 6, 7, 8, 9])))
示例#11
0
def read_geopoints(path: Union[Path, str]):
    if isinstance(path, Path):
        path = str(path)

    if not os.path.exists(path):
        raise IOError(f"File does not exist: {path}")

    return metview.read(path)
示例#12
0
def test_valid_date_len_6():
    grib = mv.read(os.path.join(PATH, 't_for_xs.grib'))
    vd_grib = mv.valid_date(grib)
    assert isinstance(vd_grib[1], datetime.datetime)

    vd_ref = datetime.datetime(2017, 8, 1, 12, 0, 0)
    for vd in vd_grib:
        assert vd == vd_ref
示例#13
0
def test_netcdf_multi_indexed_values():
    nc = mv.read(file_in_testdir('xs_date_mv5.nc'))
    mv.setcurrent(nc, 't')
    assert (mv.attributes(nc)['long_name'] == "Temperature")
    assert (np.isclose(mv.values(nc, [0, 0, 0]), 234.7144))
    assert (np.isclose(mv.values(nc, [0, 0, 4]), 237.4377))
    assert (np.isclose(mv.values(nc, [0, 1, 0]), 248.7220))
    assert (np.isclose(mv.values(nc, [0, 1, 1]), 249.3030))
示例#14
0
def test_xyv_gpts_to_dataframe():
    gpt = mv.read(file_in_testdir('xyv.gpt'))
    df = gpt.to_dataframe()
    assert (isinstance(df, pd.DataFrame))
    assert (df.shape == (60, 3))
    assert (df.loc[5]['latitude'] == 70)
    assert (df.loc[25]['longitude'] == 20)
    assert (np.isclose(df.loc[4]['value'], -10.8656))
示例#15
0
def test_netcdf_var_indexing():
    nc = mv.read(file_in_testdir('xs_date_mv5.nc'))
    mv.setcurrent(nc, 0)
    assert (mv.attributes(nc)['long_name'] == "time")
    mv.setcurrent(nc, 1)
    assert (mv.attributes(nc)['long_name'] == "latitude")
    mv.setcurrent(nc, 4)
    assert (mv.attributes(nc)['long_name'] == "Temperature")
示例#16
0
def test_datainfo():
    a = mv.read(os.path.join(PATH, 'tuv_pl.grib'))
    di = mv.datainfo(a)
    di3 = di[3]
    assert (di3['index'] == 3)
    assert (di3['number_present'] == 2664)
    assert (di3['number_missing'] == 0)
    assert (di3['proportion_present'] == 1)
    assert (di3['proportion_missing'] == 0)
示例#17
0
def test_nearest_gridpoint_info():
    a = mv.read(os.path.join(PATH, 'test.grib'))
    ni = mv.nearest_gridpoint_info(a, 57.193, -2.360)
    ni0 = ni[0]
    assert (np.isclose(ni0['latitude'], 57.0))
    assert (np.isclose(ni0['longitude'], 357.75))
    assert (np.isclose(ni0['distance'], 22.4505))
    assert (np.isclose(ni0['value'], 282.436))
    assert (ni0['index'] == 21597)
示例#18
0
def test_fieldset_slice():
    grib = mv.read(os.path.join(PATH, 't_for_xs.grib'))
    # slice [start,stop+1,step]
    assert (mv.grib_get_long(grib[0:6:1],
                             'level') == [1000, 850, 700, 500, 400, 300])
    assert (mv.grib_get_long(grib[0:6:2], 'level') == [1000, 700, 400])
    assert (mv.grib_get_long(grib[4:6:1], 'level') == [400, 300])
    assert (mv.grib_get_long(grib[5:6:1], 'level') == 300)
    assert (mv.grib_get_long(grib[4:12:1], 'level') == [400, 300])
    assert (mv.grib_get_long(grib[1:6:3], 'level') == [850, 400])
    assert (mv.grib_get_long(grib[-1::], 'level') == 300)
    assert (mv.grib_get_long(grib[-1:-4:-1], 'level') == [300, 400, 500])

    grib = mv.read(os.path.join(PATH, 'tuv_pl.grib'))
    assert (mv.grib_get_long(grib[0:18:1], 'level') == [
        1000, 1000, 1000, 850, 850, 850, 700, 700, 700, 500, 500, 500, 400,
        400, 400, 300, 300, 300
    ])
示例#19
0
def test_fieldset_iterator():
    grib = mv.read(os.path.join(PATH, 't_for_xs.grib'))
    avg = mv.average(grib)
    assert (len(avg) == 6)
    iteravg = []
    for f in grib:
        iteravg.append(mv.average(f))
    assert (len(iteravg) == len(avg))
    for i in range(0, 6):
        assert np.isclose(avg[i], iteravg[i])
示例#20
0
    def from_path(cls, path: Union[Path, str]):
        if isinstance(path, Path):
            path = str(path)

        if not os.path.exists(path):
            raise IOError(f"File does not exist: {path}")

        obj = metview.read(path)
        obj.__class__ = cls
        return obj
def run_mv(context, arg):
    print("args", arg)
    print("file=", arg["file"])
    fname = context.get_data(arg["file"])
    print("fname=", fname)
    #arg.pop("location")
    #arg.pop("contentLength")
    arg.pop("file")
    params = arg
    res = context.create_result("application/x-netcdf")
    print("res={}".format(res))

    f = mv.read(str(fname))
    params["data"] = f
    print("params=", params)
    nc = mv.mcross_sect(**params)
    mv.write(str(res.path), nc)

    nc_vars = mv.variables(nc)
    nc_param = None
    nc_level_param = None
    for v in nc_vars:
        if v not in ["time", "lat", "lon"] and "_" not in v:
            nc_param = v
            break

    if nc_param:
        for v in nc_vars:
            if nc_param + "_" in v:
                nc_level_param = v
                break

    if not nc_param:
        raise Exception("Parameter to plot not found in resulting netcdf")

    if not nc_level_param:
        raise Exception("Vertical level param not found in resulting netcdf")

    #res["line"] = [0, -180, 0, 180]

    print("nc_param=", nc_param)
    print("nc_level_param=", nc_level_param)

    #NETCDF_MISSING_ATTRIBUTE = _FillValue,
    res["plotter"] = "magics"
    res["verb"] = "netcdf_xy_matrix"
    res["netcdf_value_variable"] = nc_param
    res["netcdf_y_variable"] = nc_level_param
    res["netcdf_dimension_setting_method"] = "index"
    res["netcdf_dimension_setting"] = "time:0"
    res["netcdf_x_variable"] = "lon"
    res["netcdf_x_auxiliary_variable"] = "lat"
    res["netcdf_x_geoline_convention"] = "lonlat"

    return res
示例#22
0
def test_temporary_file_deletion(file_name):
    g = mv.read(file_in_testdir(file_name))
    h = g + 1  # this will force Metview to write a new temporary file
    temp_filepath = h.url()
    assert (temp_filepath != "")  # file should exist right now
    assert (os.path.isfile(temp_filepath))  # file should exist right now
    h = 0  # this should force deletion of the variable
    # here we make the assumption that the system has not created
    # another temporary file with the same name between object
    # deletion and the following test for the file
    assert (not (os.path.isfile(temp_filepath)))
示例#23
0
def test_netcdf_multi_indexed_values_with_all():
    nc = mv.read(file_in_testdir('xs_date_mv5.nc'))
    mv.setcurrent(nc, 't')
    assert (mv.attributes(nc)['long_name'] == "Temperature")
    v = mv.values(nc, [0, 0, 'all'])
    assert (len(v) == 64)
    assert (np.isclose(v[0], 234.714))
    assert (np.isclose(v[63], 258.979))
    v = mv.values(nc, [0, 'all', 0])
    assert (len(v) == 5)
    assert (np.isclose(v[0], 234.714))
    assert (np.isclose(v[4], 260.484))
示例#24
0
def test_plot_2_pages():
    output_name = file_in_testdir('test_plot_2_pages')
    png = mv.png_output(output_name=output_name)
    degraded_field = mv.read(data=TEST_FIELDSET, grid=[4, 4])
    page1 = mv.plot_page(top=2.2, bottom=52.2, right=100)
    page2 = mv.plot_page(top=50)
    dw = mv.plot_superpage(pages=[page1, page2])
    mv.setoutput(png)
    mv.plot(dw[0], degraded_field, dw[1], degraded_field + 50)
    output_name_from_magics = output_name + '.1.png'
    assert (os.path.isfile(output_name_from_magics))
    os.remove(output_name_from_magics)
示例#25
0
def test_cross_section_data():
    grib = mv.read(os.path.join(PATH, 't_for_xs.grib'))
    xs_data = mv.mcross_sect(
        line=[59.9, -180, -13.5, 158.08],
        data=grib,
    )
    # the result of this should be a netCDF variable
    assert mv.type(xs_data) == 'netcdf'
    mv.setcurrent(xs_data, 't')
    assert mv.dimension_names(xs_data) == ['time', 'nlev', 'lon']
    assert np.isclose(mv.value(xs_data, 0), 230.39156)
    xs_data_x2 = xs_data * 2
    assert np.isclose(mv.value(xs_data_x2, 0), 460.7831)
示例#26
0
def load_dataset(filename, check_local=False):
    if check_local and os.path.exists(filename):
        return mv.read(filename)

    base_url = "http://download.ecmwf.org/test-data/metview/gallery/"
    try:
        d = mv.download(url=base_url + filename, target=filename)
        if filename.endswith(".zip"):
            with zipfile.ZipFile(filename, "r") as f:
                f.extractall()
        return d
    except:
        raise Exception("Could not download file " + filename +
                        " from the download server")
示例#27
0
def test_obsfilter():
    bufr = mv.read(file_in_testdir('obs_3day.bufr'))

    # test two styles of passing parameters
    gpt1 = mv.obsfilter({
        'data': bufr,
        'parameter': '012004',
        'output': "geopoints"
    })
    gpt2 = mv.obsfilter(data=bufr, parameter='012004', output="geopoints")
    assert (mv.type(gpt1) == 'geopoints')
    assert (mv.count(gpt1) == 45)
    assert (mv.type(gpt2) == 'geopoints')
    assert (mv.count(gpt2) == 45)
示例#28
0
def test_fieldset_assignment_to_field_index():
    grib = mv.read(os.path.join(PATH, 't_for_xs.grib'))
    # check numbers before assignment
    assert (np.isclose(mv.integrate(grib[0]), 290.802))
    assert (np.isclose(mv.integrate(grib[3]), 260.601))
    assert (np.isclose(mv.integrate(grib[4]), 249.617))
    # change one field and check some fields
    grib[0] = grib[0] * 10
    assert (np.isclose(mv.integrate(grib[0]), 2908.02))
    assert (np.isclose(mv.integrate(grib[3]), 260.601))
    assert (np.isclose(mv.integrate(grib[4]), 249.617))
    # another assignment
    grib[4] = grib[3] * 10
    assert (np.isclose(mv.integrate(grib[0]), 2908.02))
    assert (np.isclose(mv.integrate(grib[3]), 260.601))
    assert (np.isclose(mv.integrate(grib[4]), 2606.01))
示例#29
0
def test_plot_2():
    output_name = file_in_testdir('test_plot_2')
    png = mv.png_output(output_name=output_name)
    grid_shade = {
        'legend': True,
        'contour': False,
        'contour_highlight': True,
        'contour_shade': True,
        'contour_shade_technique': 'grid_shading',
        'contour_shade_max_level_colour': 'olive',
        'contour_shade_min_level_colour': 'blue',
        'contour_shade_colour_direction': 'clockwise',
    }
    degraded_field = mv.read(data=TEST_FIELDSET, grid=[4, 4])
    mv.plot(png, degraded_field, mv.mcont(grid_shade))
    output_name_from_magics = output_name + '.1.png'
    assert (os.path.isfile(output_name_from_magics))
    os.remove(output_name_from_magics)
示例#30
0
def test_fieldset_append_from_empty():
    f = mv.Fieldset()
    g = mv.read(os.path.join(PATH, 'tuv_pl.grib'))
    f.append(g[0])
    assert (type(f) == mv.Fieldset)
    assert (len(f) == 1)
    shortnames = f.grib_get_string('shortName')
    assert (shortnames == 't')
    f.append(g[1])
    assert (type(f) == mv.Fieldset)
    assert (len(f) == 2)
    shortnames = f.grib_get_string('shortName')
    assert (shortnames == ['t', 'u'])
    f.append(g[15:18])
    assert (type(f) == mv.Fieldset)
    assert (len(f) == 5)
    shortnames = f.grib_get_string('shortName')
    assert (shortnames == ['t', 'u', 't', 'u', 'v'])