コード例 #1
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_vertical_velocity_pressure_moist_air():
    """Test conversion of w to omega assuming moist air."""
    w = -1 * units('cm/s')
    omega_truth = 1.032100858 * units('microbar/second')
    omega_test = vertical_velocity_pressure(w, 850. * units.mbar, 280. * units.K,
                                            8 * units('g/kg'))
    assert_almost_equal(omega_test, omega_truth, 6)
コード例 #2
0
ファイル: test_kinematics.py プロジェクト: DBaryudin/MetPy
 def test_basic3(self):
     'Basic test of advection'
     u = np.array([1, 2, 3]) * units('m/s')
     s = np.array([1, 2, 3]) * units('Pa')
     a = advection(s, u, (1 * units.meter,))
     truth = np.array([-1, -2, -3]) * units('Pa/sec')
     assert_array_equal(a, truth)
コード例 #3
0
ファイル: test_station_plot.py プロジェクト: metpy/MetPy
def test_nws_layout():
    """Test metpy's NWS layout for station plots."""
    fig = plt.figure(figsize=(3, 3))

    # testing data
    x = np.array([1])
    y = np.array([2])
    data = dict()
    data["air_temperature"] = np.array([77]) * units.degF
    data["dew_point_temperature"] = np.array([71]) * units.degF
    data["air_pressure_at_sea_level"] = np.array([999.8]) * units("mbar")
    data["eastward_wind"] = np.array([15.0]) * units.knots
    data["northward_wind"] = np.array([15.0]) * units.knots
    data["cloud_coverage"] = [7]
    data["present_weather"] = [80]
    data["high_cloud_type"] = [1]
    data["medium_cloud_type"] = [3]
    data["low_cloud_type"] = [2]
    data["visibility_in_air"] = np.array([5.0]) * units.mile
    data["tendency_of_air_pressure"] = np.array([-0.3]) * units("mbar")
    data["tendency_of_air_pressure_symbol"] = [8]

    # Make the plot
    sp = StationPlot(fig.add_subplot(1, 1, 1), x, y, fontsize=12, spacing=16)
    nws_layout.plot(sp, data)

    sp.ax.set_xlim(0, 3)
    sp.ax.set_ylim(0, 3)

    return fig
コード例 #4
0
ファイル: test_station_plot.py プロジェクト: ajoros/MetPy
def test_nws_layout():
    'Test metpy\'s NWS layout for station plots'
    setup_font()
    fig = make_figure(figsize=(3, 3))

    # testing data
    x = np.array([1])
    y = np.array([2])
    data = dict()
    data['air_temperature'] = np.array([77]) * units.degF
    data['dew_point_temperature'] = np.array([71]) * units.degF
    data['air_pressure_at_sea_level'] = np.array([999.8]) * units('mbar')
    data['eastward_wind'] = np.array([15.]) * units.knots
    data['northward_wind'] = np.array([15.]) * units.knots
    data['cloud_coverage'] = [7]
    data['present_weather'] = [80]
    data['high_cloud_type'] = [1]
    data['medium_cloud_type'] = [3]
    data['low_cloud_type'] = [2]
    data['visibility_in_air'] = np.array([5.]) * units.mile
    data['tendency_of_air_pressure'] = np.array([-0.3]) * units('mbar')
    data['tendency_of_air_pressure_symbol'] = [8]

    # Make the plot
    sp = StationPlot(fig.add_subplot(1, 1, 1), x, y, fontsize=12, spacing=16)
    nws_layout.plot(sp, data)

    sp.ax.set_xlim(0, 3)
    sp.ax.set_ylim(0, 3)
    hide_tick_labels(sp.ax)

    return fig
コード例 #5
0
ファイル: test_kinematics.py プロジェクト: DBaryudin/MetPy
 def test_basic(self):
     'Basic braindead test of advection'
     u = np.ones((3,)) * units('m/s')
     s = np.ones_like(u) * units.kelvin
     a = advection(s, u, (1 * units.meter,))
     truth = np.zeros_like(u) * units('K/sec')
     assert_array_equal(a, truth)
コード例 #6
0
ファイル: test_kinematics.py プロジェクト: DBaryudin/MetPy
 def test_basic2(self):
     'Basic test of advection'
     u = np.ones((3,)) * units('m/s')
     s = np.array([1, 2, 3]) * units('kg')
     a = advection(s, u, (1 * units.meter,))
     truth = -np.ones_like(u) * units('kg/sec')
     assert_array_equal(a, truth)
コード例 #7
0
ファイル: test_calc_tools.py プロジェクト: dopplershift/MetPy
def test_grid_deltas_from_dataarray_xy(test_da_xy):
    """Test grid_deltas_from_dataarray with a xy grid."""
    dx, dy = grid_deltas_from_dataarray(test_da_xy)
    true_dx = np.array([[[[500] * 3]]]) * units('km')
    true_dy = np.array([[[[500]] * 3]]) * units('km')
    assert_array_almost_equal(dx, true_dx, 5)
    assert_array_almost_equal(dy, true_dy, 5)
コード例 #8
0
ファイル: test_kinematics.py プロジェクト: ahill818/MetPy
def test_advection_2d_uniform():
    """Test advection for uniform 2D field."""
    u = np.ones((3, 3)) * units('m/s')
    s = np.ones_like(u) * units.kelvin
    a = advection(s, [u, u], (1 * units.meter, 1 * units.meter))
    truth = np.zeros_like(u) * units('K/sec')
    assert_array_equal(a, truth)
コード例 #9
0
ファイル: test_kinematics.py プロジェクト: ahill818/MetPy
def test_advection_uniform():
    """Test advection calculation for a uniform 1D field."""
    u = np.ones((3,)) * units('m/s')
    s = np.ones_like(u) * units.kelvin
    a = advection(s, u, (1 * units.meter,))
    truth = np.zeros_like(u) * units('K/sec')
    assert_array_equal(a, truth)
コード例 #10
0
ファイル: test_kinematics.py プロジェクト: ahill818/MetPy
def test_advection_1d():
    """Test advection calculation with varying wind and field."""
    u = np.array([1, 2, 3]) * units('m/s')
    s = np.array([1, 2, 3]) * units('Pa')
    a = advection(s, u, (1 * units.meter,))
    truth = np.array([-1, -2, -3]) * units('Pa/sec')
    assert_array_equal(a, truth)
コード例 #11
0
ファイル: test_kinematics.py プロジェクト: ahill818/MetPy
def test_advection_1d_uniform_wind():
    """Test advection for simple 1D case with uniform wind."""
    u = np.ones((3,)) * units('m/s')
    s = np.array([1, 2, 3]) * units('kg')
    a = advection(s, u, (1 * units.meter,))
    truth = -np.ones_like(u) * units('kg/sec')
    assert_array_equal(a, truth)
コード例 #12
0
ファイル: test_station_plot.py プロジェクト: akrherz/MetPy
def test_nws_layout():
    """Test metpy's NWS layout for station plots."""
    fig = plt.figure(figsize=(3, 3))

    # testing data
    x = np.array([1])
    y = np.array([2])
    data = {'air_temperature': np.array([77]) * units.degF,
            'dew_point_temperature': np.array([71]) * units.degF,
            'air_pressure_at_sea_level': np.array([999.8]) * units('mbar'),
            'eastward_wind': np.array([15.]) * units.knots,
            'northward_wind': np.array([15.]) * units.knots, 'cloud_coverage': [7],
            'present_weather': [80], 'high_cloud_type': [1], 'medium_cloud_type': [3],
            'low_cloud_type': [2], 'visibility_in_air': np.array([5.]) * units.mile,
            'tendency_of_air_pressure': np.array([-0.3]) * units('mbar'),
            'tendency_of_air_pressure_symbol': [8]}

    # Make the plot
    sp = StationPlot(fig.add_subplot(1, 1, 1), x, y, fontsize=12, spacing=16)
    nws_layout.plot(sp, data)

    sp.ax.set_xlim(0, 3)
    sp.ax.set_ylim(0, 3)

    return fig
コード例 #13
0
ファイル: compute_vpd.py プロジェクト: akrherz/DEV
def main():
    """Go Main Go"""
    pgconn = get_dbconn('scan')
    for station in ['S2004', 'S2196', 'S2002', 'S2072', 'S2068',
                    'S2031', 'S2001', 'S2047']:
        df = read_sql("""
        select extract(year from valid + '2 months'::interval) as wy,
        tmpf, dwpf from alldata where station = %s and tmpf is not null
        and dwpf is not null
        """, pgconn, params=(station, ), index_col=None)
        df['mixingratio'] = meteorology.mixing_ratio(
            temperature(df['dwpf'].values, 'F')).value('KG/KG')
        df['vapor_pressure'] = mcalc.vapor_pressure(
            1000. * units.mbar,
            df['mixingratio'].values * units('kg/kg')).to(units('kPa'))
        df['saturation_mixingratio'] = (
            meteorology.mixing_ratio(
                temperature(df['tmpf'].values, 'F')).value('KG/KG'))
        df['saturation_vapor_pressure'] = mcalc.vapor_pressure(
            1000. * units.mbar,
            df['saturation_mixingratio'].values * units('kg/kg')).to(units('kPa'))
        df['vpd'] = df['saturation_vapor_pressure'] - df['vapor_pressure']
        means = df.groupby('wy').mean()
        counts = df.groupby('wy').count()
        for yr, row in means.iterrows():
            print(("%s,%s,%.0f,%.3f"
                   ) % (yr, station, counts.at[yr, 'vpd'], row['vpd']))
コード例 #14
0
ファイル: test_indices.py プロジェクト: dodolooking/MetPy
def test_supercell_composite_scalar():
    """Test supercell composite function with a single value."""
    mucape = 2000. * units('J/kg')
    esrh = 400. * units('m^2/s^2')
    ebwd = 30. * units('m/s')
    truth = 16.
    supercell_comp = supercell_composite(mucape, esrh, ebwd)
    assert_almost_equal(supercell_comp, truth, 6)
コード例 #15
0
ファイル: test_kinematics.py プロジェクト: ahill818/MetPy
def test_advection_2d():
    """Test advection in varying 2D field."""
    u = np.ones((3, 3)) * units('m/s')
    v = 2 * np.ones((3, 3)) * units('m/s')
    s = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) * units.kelvin
    a = advection(s, [u, v], (1 * units.meter, 1 * units.meter))
    truth = np.array([[-3, -2, 1], [-4, 0, 4], [-1, 2, 3]]) * units('K/sec')
    assert_array_equal(a, truth)
コード例 #16
0
ファイル: test_indices.py プロジェクト: dodolooking/MetPy
def test_supercell_composite():
    """Test supercell composite function."""
    mucape = [2000., 1000., 500., 2000.] * units('J/kg')
    esrh = [400., 150., 45., 45.] * units('m^2/s^2')
    ebwd = [30., 15., 5., 5.] * units('m/s')
    truth = [16., 2.25, 0., 0.]
    supercell_comp = supercell_composite(mucape, esrh, ebwd)
    assert_array_equal(supercell_comp, truth)
コード例 #17
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_surface_based_cape_cin():
    """Test the surface-based CAPE and CIN calculation."""
    p = np.array([959., 779.2, 751.3, 724.3, 700., 269.]) * units.mbar
    temperature = np.array([22.2, 14.6, 12., 9.4, 7., -38.]) * units.celsius
    dewpoint = np.array([19., -11.2, -10.8, -10.4, -10., -53.2]) * units.celsius
    cape, cin = surface_based_cape_cin(p, temperature, dewpoint)
    assert_almost_equal(cape, 58.0368212 * units('joule / kilogram'), 6)
    assert_almost_equal(cin, -89.8073512 * units('joule / kilogram'), 6)
コード例 #18
0
ファイル: test_kinematics.py プロジェクト: DBaryudin/MetPy
 def test_2dbasic2(self):
     'Basic 2D test of advection'
     u = np.ones((3, 3)) * units('m/s')
     v = 2 * np.ones((3, 3)) * units('m/s')
     s = np.array([[1, 2, 1], [2, 4, 2], [1, 2, 1]]) * units.kelvin
     a = advection(s, [u, v], (1 * units.meter, 1 * units.meter))
     truth = np.array([[-3, -2, 1], [-4, 0, 4], [-1, 2, 3]]) * units('K/sec')
     assert_array_equal(a, truth)
コード例 #19
0
ファイル: test_kinematics.py プロジェクト: DBaryudin/MetPy
 def test_geopotential(self):
     'Test of geostrophic wind calculation with geopotential'
     z = np.array([[48, 49, 48], [49, 50, 49], [48, 49, 48]]) * 100. * units('m^2/s^2')
     ug, vg = geostrophic_wind(z, 1 / units.sec, 100. * units.meter, 100. * units.meter)
     true_u = np.array([[-1, 0, 1]] * 3) * units('m/s')
     true_v = -true_u.T
     assert_array_equal(ug, true_u)
     assert_array_equal(vg, true_v)
コード例 #20
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def bv_data():
    """Return height and potential temperature data for testing Brunt-Vaisala functions."""
    heights = [1000., 1500., 2000., 2500.] * units('m')
    potential_temperatures = [[290., 290., 290., 290.],
                              [292., 293., 293., 292.],
                              [294., 296., 293., 293.],
                              [296., 295., 293., 296.]] * units('K')
    return heights, potential_temperatures
コード例 #21
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_most_unstable_cape_cin():
    """Test the most unstable CAPE/CIN calculation."""
    pressure = np.array([1000., 959., 867.9, 850., 825., 800.]) * units.mbar
    temperature = np.array([18.2, 22.2, 17.4, 10., 0., 15]) * units.celsius
    dewpoint = np.array([19., 19., 14.3, 0., -10., 0.]) * units.celsius
    mucape, mucin = most_unstable_cape_cin(pressure, temperature, dewpoint)
    assert_almost_equal(mucape, 157.07111 * units('joule / kilogram'), 4)
    assert_almost_equal(mucin, -15.74772 * units('joule / kilogram'), 4)
コード例 #22
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_most_unstable_cape_cin_surface():
    """Test the most unstable CAPE/CIN calculation when surface is most unstable."""
    pressure = np.array([959., 779.2, 751.3, 724.3, 700., 269.]) * units.mbar
    temperature = np.array([22.2, 14.6, 12., 9.4, 7., -38.]) * units.celsius
    dewpoint = np.array([19., -11.2, -10.8, -10.4, -10., -53.2]) * units.celsius
    mucape, mucin = most_unstable_cape_cin(pressure, temperature, dewpoint)
    assert_almost_equal(mucape, 58.0368212 * units('joule / kilogram'), 6)
    assert_almost_equal(mucin, -89.8073512 * units('joule / kilogram'), 6)
コード例 #23
0
ファイル: test_indices.py プロジェクト: dodolooking/MetPy
def test_critical_angle():
    """Test critical angle with observed sounding."""
    data = get_upper_air_data(datetime(2016, 5, 22, 0), 'DDC')
    ca = critical_angle(data['pressure'], data['u_wind'],
                        data['v_wind'], data['height'],
                        stormu=0 * units('m/s'), stormv=0 * units('m/s'))
    truth = [140.0626637513269] * units('degrees')
    assert_almost_equal(ca, truth, 8)
コード例 #24
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_cape_cin_no_lfc():
    """Test that CAPE is zero with no LFC."""
    p = np.array([959., 779.2, 751.3, 724.3, 700., 269.]) * units.mbar
    temperature = np.array([22.2, 24.6, 22., 20.4, 18., -10.]) * units.celsius
    dewpoint = np.array([19., -11.2, -10.8, -10.4, -10., -53.2]) * units.celsius
    parcel_prof = parcel_profile(p, temperature[0], dewpoint[0]).to('degC')
    cape, cin = cape_cin(p, temperature, dewpoint, parcel_prof)
    assert_almost_equal(cape, 0.0 * units('joule / kilogram'), 6)
    assert_almost_equal(cin, 0.0 * units('joule / kilogram'), 6)
コード例 #25
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_cape_cin_custom_profile():
    """Test the CAPE and CIN calculation with a custom profile passed to LFC and EL."""
    p = np.array([959., 779.2, 751.3, 724.3, 700., 269.]) * units.mbar
    temperature = np.array([22.2, 14.6, 12., 9.4, 7., -38.]) * units.celsius
    dewpoint = np.array([19., -11.2, -10.8, -10.4, -10., -53.2]) * units.celsius
    parcel_prof = parcel_profile(p, temperature[0], dewpoint[0]) + 5 * units.delta_degC
    cape, cin = cape_cin(p, temperature, dewpoint, parcel_prof)
    assert_almost_equal(cape, 1443.505086499895 * units('joule / kilogram'), 6)
    assert_almost_equal(cin, 0.0 * units('joule / kilogram'), 6)
コード例 #26
0
ファイル: test_indices.py プロジェクト: dodolooking/MetPy
def test_sigtor():
    """Test significant tornado parameter function."""
    sbcape = [2000., 2000., 2000., 2000., 3000, 4000] * units('J/kg')
    sblcl = [3000., 1500., 500., 1500., 1500, 800] * units('meter')
    srh1 = [200., 200., 200., 200., 300, 400] * units('m^2/s^2')
    shr6 = [20., 5., 20., 35., 20., 35] * units('m/s')
    truth = [0., 0, 1.777778, 1.333333, 2., 10.666667]
    sigtor = significant_tornado(sbcape, sblcl, srh1, shr6)
    assert_almost_equal(sigtor, truth, 6)
コード例 #27
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_cape_cin():
    """Test the basic CAPE and CIN calculation."""
    p = np.array([959., 779.2, 751.3, 724.3, 700., 269.]) * units.mbar
    temperature = np.array([22.2, 14.6, 12., 9.4, 7., -38.]) * units.celsius
    dewpoint = np.array([19., -11.2, -10.8, -10.4, -10., -53.2]) * units.celsius
    parcel_prof = parcel_profile(p, temperature[0], dewpoint[0])
    cape, cin = cape_cin(p, temperature, dewpoint, parcel_prof)
    assert_almost_equal(cape, 58.0368212 * units('joule / kilogram'), 6)
    assert_almost_equal(cin, -89.8073512 * units('joule / kilogram'), 6)
コード例 #28
0
ファイル: test_indices.py プロジェクト: dodolooking/MetPy
def test_sigtor_scalar():
    """Test significant tornado parameter function with a single value."""
    sbcape = 4000 * units('J/kg')
    sblcl = 800 * units('meter')
    srh1 = 400 * units('m^2/s^2')
    shr6 = 35 * units('m/s')
    truth = 10.666667
    sigtor = significant_tornado(sbcape, sblcl, srh1, shr6)
    assert_almost_equal(sigtor, truth, 6)
コード例 #29
0
ファイル: test_indices.py プロジェクト: dodolooking/MetPy
def test_bulk_shear():
    """Test bulk shear with observed sounding."""
    data = get_upper_air_data(datetime(2016, 5, 22, 0), 'DDC')
    u, v = bulk_shear(data['pressure'], data['u_wind'],
                      data['v_wind'], heights=data['height'],
                      depth=6000 * units('meter'))
    truth = [29.899581266946115, -14.389225800205509] * units('knots')
    assert_almost_equal(u.to('knots'), truth[0], 8)
    assert_almost_equal(v.to('knots'), truth[1], 8)
コード例 #30
0
ファイル: test_thermo.py プロジェクト: dodolooking/MetPy
def test_cape_cin_no_el():
    """Test that CAPE works with no EL."""
    p = np.array([959., 779.2, 751.3, 724.3]) * units.mbar
    temperature = np.array([22.2, 14.6, 12., 9.4]) * units.celsius
    dewpoint = np.array([19., -11.2, -10.8, -10.4]) * units.celsius
    parcel_prof = parcel_profile(p, temperature[0], dewpoint[0]).to('degC')
    cape, cin = cape_cin(p, temperature, dewpoint, parcel_prof)
    assert_almost_equal(cape, 0.08750805 * units('joule / kilogram'), 6)
    assert_almost_equal(cin, -89.8073512 * units('joule / kilogram'), 6)
コード例 #31
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_get_wind_dir():
    """Test that get_wind_dir wrapper works (deprecated in 0.9)."""
    with pytest.warns(MetpyDeprecationWarning):
        d = get_wind_dir(3. * units('m/s'), 4. * units('m/s'))
    assert_almost_equal(d, 216.870 * units.deg, 3)
コード例 #32
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_windchill_scalar():
    """Test wind chill with scalars."""
    wc = windchill(-5 * units.degC, 35 * units('m/s'))
    assert_almost_equal(wc, -18.9357 * units.degC, 0)
コード例 #33
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_wind_comps_scalar():
    """Test wind components calculation with scalars."""
    u, v = wind_components(8 * units('m/s'), 150 * units.deg)
    assert_almost_equal(u, -4 * units('m/s'), 3)
    assert_almost_equal(v, 6.9282 * units('m/s'), 3)
コード例 #34
0
    x.strftime('%Y-%m-%d %H:%M')
    for x in rrule.rrule(rrule.HOURLY, dtstart=date_1, until=date_2)
]

metar_file = main + '/wyoming/' + date[0:6] + '/AbuDhabi_surf_' + date[
    0:8] + '.csv'
outFile = main + '/wyoming/' + date[0:6] + '/AbuDhabi_surf_mr' + date[
    0:8] + '.csv'

metar_data = pd.read_csv(metar_file)

metar_data = metar_data[[
    'STN', 'TIME', 'ALTM', 'TMP', 'DEW', 'RH', 'DIR', 'SPD', 'VIS'
]]
#metar_data=metar_data.drop('Unnamed: 9',axis=1)
metar_data = metar_data.drop(metar_data.index[0])
metar_data['TIME'] = date_list

tmp = np.array((metar_data.iloc[:, 3]).apply(pd.to_numeric, errors='coerce') +
               273.15) * units('K')
rh = np.array(
    (metar_data.iloc[:, 5]).apply(pd.to_numeric, errors='coerce') / 100.0)
press = np.array((metar_data.iloc[:, 2]).apply(
    pd.to_numeric, errors='coerce')) * units('millibar')

mrio = mcalc.mixing_ratio_from_relative_humidity(rh, tmp, press)

metar_data['mrio'] = mrio

metar_data.to_csv(outFile)
コード例 #35
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_warning_dir():
    """Test that warning is raised wind direction > 2Pi."""
    with pytest.warns(UserWarning):
        wind_components(3. * units('m/s'), 270)
コード例 #36
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_direction_dimensions():
    """Verify wind_direction returns degrees."""
    d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
    assert str(d.units) == 'degree'
コード例 #37
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_scalar_direction():
    """Test wind direction with scalars."""
    d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
    assert_almost_equal(d, 216.870 * units.deg, 3)
コード例 #38
0
ファイル: test_xarray.py プロジェクト: khintz/MetPy
def test_assign_y_x_dataarray_outside_tolerance(test_coord_helper_da_latlon):
    """Test assign_y_x raises ValueError when tolerance is exceeded on DataArray."""
    with pytest.raises(ValueError) as exc:
        test_coord_helper_da_latlon.metpy.assign_y_x(tolerance=1 * units('um'))
    assert 'cannot be collapsed to 1D within tolerance' in str(exc)
コード例 #39
0
ファイル: test_xarray.py プロジェクト: khintz/MetPy
def test_units_percent():
    """Test that '%' is converted to 'percent'."""
    test_var_percent = xr.open_dataset(
        get_test_data('irma_gfs_example.nc',
                      as_file_obj=False))['Relative_humidity_isobaric']
    assert test_var_percent.metpy.units == units('percent')
コード例 #40
0
            sl_vars_this = sl_vars.sel(time=this_time)
            vtime = pd.to_datetime(iso_vars_this.time.values)
            
            print("working on "+vtime.strftime("%H%M UTC %d %b %Y"))

            ## pull out needed variables
            lon = iso_vars_this.longitude.data
            lat = iso_vars_this.latitude.data

            ## get 500-mb data to start
            hght_500 = iso_vars_this.z.sel(level=500)/9.81  ## this was geopotential, convert to geopotential height
            uwnd_500 = iso_vars_this.u.sel(level=500)
            vwnd_500 = iso_vars_this.v.sel(level=500)
            
            # less smooth the height a bit
            hght_500 = ndimage.gaussian_filter(hght_500, sigma=1, order=0) * units('m')

            # Combine 1D latitude and longitudes into a 2D grid of locations
            lon_2d, lat_2d = np.meshgrid(lon, lat)
            # Gridshift for barbs
            lon_2d[lon_2d > 180] = lon_2d[lon_2d > 180] - 360

            if plot_500vort:
            
                #Begin Data Calculations
                dx, dy = mpcalc.lat_lon_grid_deltas(lon, lat)

                f = mpcalc.coriolis_parameter(np.deg2rad(lat_2d)).to(units('1/sec'))

                ## need to set the lats this way since GFS is 1-d
                #avor = mpcalc.absolute_vorticity(uwnd_500,vwnd_500,dx,dy,lat[None,:,None]*units('degrees'))
コード例 #41
0
def test_streamfunc():
    """Test of Montgomery Streamfunction calculation."""
    t = 287. * units.kelvin
    hgt = 5000. * units.meter
    msf = montgomery_streamfunction(hgt, t)
    assert_almost_equal(msf, 337468.2500 * units('m^2 s^-2'), 4)
コード例 #42
0
def test_default_order_warns():
    """Test that using the default array ordering issues a warning."""
    u = np.ones((3, 3)) * units('m/s')
    with pytest.warns(UserWarning):
        vorticity(u, u, 1 * units.meter, 1 * units.meter)
コード例 #43
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_get_wind_speed():
    """Test that get_wind_speed wrapper works (deprecated in 0.9)."""
    with pytest.warns(MetpyDeprecationWarning):
        s = get_wind_speed(-3. * units('m/s'), -4. * units('m/s'))
    assert_almost_equal(s, 5. * units('m/s'), 3)
コード例 #44
0
ファイル: test_xarray.py プロジェクト: khintz/MetPy
def test_units(test_var):
    """Test the units property on the accessor."""
    assert test_var.metpy.units == units('kelvin')
コード例 #45
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_get_wind_components():
    """Test that get_wind_components wrapper works (deprecated in 0.9)."""
    with pytest.warns(MetpyDeprecationWarning):
        u, v = get_wind_components(8 * units('m/s'), 150 * units.deg)
    assert_almost_equal(u, -4 * units('m/s'), 3)
    assert_almost_equal(v, 6.9282 * units('m/s'), 3)
コード例 #46
0
def process(ncfn):
    """Process this file """
    pgconn = get_dbconn("iem")
    icursor = pgconn.cursor()
    xref = {}
    icursor.execute("""
        SELECT id, network from stations where
        network ~* 'ASOS' or network = 'AWOS' and country = 'US'
    """)
    for row in icursor:
        xref[row[0]] = row[1]
    icursor.close()
    nc = ncopen(ncfn)
    data = {}
    for vname in [
            "stationId",
            "observationTime",
            "temperature",
            "dewpoint",
            "altimeter",  # Pa
            "windDir",
            "windSpeed",  # mps
            "windGust",  # mps
            "visibility",  # m
            "precipAccum",
            "presWx",
            "skyCvr",
            "skyCovLayerBase",
            "autoRemark",
            "operatorRemark",
    ]:
        data[vname] = nc.variables[vname][:]
        for qc in ["QCR", "QCD"]:
            vname2 = vname + qc
            if vname2 in nc.variables:
                data[vname2] = nc.variables[vname2][:]
    for vname in ["temperature", "dewpoint"]:
        data[vname + "C"] = temperature(data[vname], "K").value("C")
        data[vname] = temperature(data[vname], "K").value("F")
    for vname in ["windSpeed", "windGust"]:
        data[vname] = (masked_array(data[vname], units("meter / second")).to(
            units("knots")).magnitude)

    data["altimeter"] = pressure(data["altimeter"], "PA").value("IN")
    data["skyCovLayerBase"] = distance(data["skyCovLayerBase"],
                                       "M").value("FT")
    data["visibility"] = distance(data["visibility"], "M").value("MI")
    data["precipAccum"] = distance(data["precipAccum"], "MM").value("IN")
    stations = chartostring(data["stationId"][:])
    presentwxs = chartostring(data["presWx"][:])
    skycs = chartostring(data["skyCvr"][:])
    autoremarks = chartostring(data["autoRemark"][:])
    opremarks = chartostring(data["operatorRemark"][:])

    def decision(i, fieldname, tolerance):
        """Our decision if we are going to take a HFMETAR value or not"""
        if data[fieldname][i] is np.ma.masked:
            return None
        if data["%sQCR" % (fieldname, )][i] == 0:
            return data[fieldname][i]
        # Now we have work to do
        departure = np.ma.max(np.ma.abs(data["%sQCD" % (fieldname, )][i, :]))
        # print("departure: %s tolerance: %s" % (departure, tolerance))
        if departure <= tolerance:
            return data[fieldname][i]
        return None

    for i, sid in tqdm(
            enumerate(stations),
            total=len(stations),
            disable=(not sys.stdout.isatty()),
    ):
        sid3 = sid[1:] if sid[0] == "K" else sid
        ts = datetime.datetime(1970, 1, 1) + datetime.timedelta(
            seconds=data["observationTime"][i])
        ts = ts.replace(tzinfo=pytz.UTC)

        mtr = "%s %sZ AUTO " % (sid, ts.strftime("%d%H%M"))
        network = xref.get(sid3, "ASOS")
        iem = Observation(sid3, network, ts)

        #  06019G23KT
        val = decision(i, "windDir", 15)
        if val is not None:
            iem.data["drct"] = int(val)
            mtr += "%03i" % (iem.data["drct"], )
        else:
            mtr += "///"

        val = decision(i, "windSpeed", 10)
        if val is not None:
            iem.data["sknt"] = int(val)
            mtr += "%02i" % (iem.data["sknt"], )
        else:
            mtr += "//"

        val = decision(i, "windGust", 10)
        if val is not None and val > 0:
            iem.data["gust"] = int(val)
            mtr += "G%02i" % (iem.data["gust"], )
        mtr += "KT "

        val = decision(i, "visibility", 4)
        if val is not None:
            iem.data["vsby"] = float(val)
            mtr += "%sSM " % (vsbyfmt(iem.data["vsby"]), )

        presentwx = presentwxs[i]
        if presentwx != "":
            # database storage is comma delimited
            iem.data["wxcodes"] = presentwx.split(" ")
            mtr += "%s " % (presentwx, )

        for _i, (skyc,
                 _l) in enumerate(zip(skycs[i], data["skyCovLayerBase"][i])):
            if skyc != "":
                iem.data["skyc%s" % (_i + 1, )] = skyc
                if skyc != "CLR":
                    iem.data["skyl%s" % (_i + 1, )] = int(_l)
                    mtr += "%s%03i " % (skyc, int(_l) / 100)
                else:
                    mtr += "CLR "

        t = ""
        tgroup = "T"
        val = decision(i, "temperature", 10)
        if val is not None:
            # Recall the pain enabling this
            # iem.data['tmpf'] = float(data['temperature'][i])
            tmpc = float(data["temperatureC"][i])
            t = "%s%02i/" % (
                "M" if tmpc < 0 else "",
                tmpc if tmpc > 0 else (0 - tmpc),
            )
            tgroup += "%s%03i" % (
                "1" if tmpc < 0 else "0",
                (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0,
            )
        val = decision(i, "dewpoint", 10)
        if val is not None:
            # iem.data['dwpf'] = float(data['dewpoint'][i])
            tmpc = float(data["dewpointC"][i])
            if t != "":
                t = "%s%s%02i " % (
                    t,
                    "M" if tmpc < 0 else "",
                    tmpc if tmpc > 0 else 0 - tmpc,
                )
                tgroup += "%s%03i" % (
                    "1" if tmpc < 0 else "0",
                    (tmpc if tmpc > 0 else (0 - tmpc)) * 10.0,
                )
        if len(t) > 4:
            mtr += t
        val = decision(i, "altimeter", 20)
        if val is not None:
            iem.data["alti"] = float(round(val, 2))
            mtr += "A%4i " % (iem.data["alti"] * 100.0, )

        mtr += "RMK "
        val = decision(i, "precipAccum", 25)
        if val is not None:
            if val >= 0.01:
                iem.data["phour"] = float(round(val, 2))
                mtr += "P%04i " % (iem.data["phour"] * 100.0, )
            elif val > 0:
                # Trace
                mtr += "P0000 "
                iem.data["phour"] = TRACE_VALUE

        if tgroup != "T":
            mtr += "%s " % (tgroup, )

        if autoremarks[i] != "" or opremarks[i] != "":
            mtr += "%s %s " % (autoremarks[i], opremarks[i])
        mtr += "MADISHF"
        # Eat our own dogfood
        try:
            Metar.Metar(mtr)
            iem.data["raw"] = mtr
        except Exception as exp:
            print("dogfooding extract_hfmetar %s resulted in %s" % (mtr, exp))
            continue

        for key in iem.data:
            if isinstance(iem.data[key], np.float32):
                print("key: %s type: %s" % (key, type(iem.data[key])))
        icursor = pgconn.cursor()
        if not iem.save(icursor, force_current_log=True, skip_current=True):
            print(("extract_hfmetar: unknown station? %s %s %s\n%s") %
                  (sid3, network, ts, mtr))

        icursor.close()
        pgconn.commit()
コード例 #47
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_scalar_speed():
    """Test wind speed with scalars."""
    s = wind_speed(-3. * units('m/s'), -4. * units('m/s'))
    assert_almost_equal(s, 5. * units('m/s'), 3)
コード例 #48
0
def zdrarc(zdrc, ZDRmasked, CC, REF, grad_ffd, grad_mag, KDP, forest_loaded,
           ax, f, time_start, month, d_beg, h_beg, min_beg, sec_beg, d_end,
           h_end, min_end, sec_end, rlons, rlats, max_lons_c, max_lats_c,
           zdrlev, proj, storm_relative_dir, Outer_r, Inner_r, tracking_ind):
    #Inputs,
    #zdrc: Contour of differential reflectivity at zdrlev (typically 1.5 dB)
    #ZDRmasked: Masked array ZDRmasked1 in regions outside the forward flank (grad_ffd) and below 0.6 CC
    #CC: 1km Correlation Coefficient (CC) grid
    #REF: 1km Reflectivity grid
    #grad_ffd: Angle (degrees) used to indicate angular region of supercell containing the forward flank
    #grad_mag: Array of wind velocity magnitude along reflectivity gradient
    #KDP: 1km Specific Differential Phase (KDP) grid
    #forest_loaded: Random forest pickle file for Zdr arcs
    #ax: Subplot object to be built on with each contour
    #f: Placefile, edited throughout the program
    #time_start: Radar file date and time of scan
    #month: Month of case, supplied by user
    #d_beg,h_beg,min_beg,sec_beg,d_end,h_end,min_end,sec_end: Day, hour, minute, second of the beginning and end of a scan
    #rlons,rlats: Full volume geographic coordinates, longitude and latitude respectively
    #max_lons_c,max_lats_c: Centroid coordinates of storm objects
    #zdrlev: User defined value for Zdr contour of arcs
    #proj: Projection of Earth's surface to be used for accurate area and distance calculations
    #storm_relative_dir: Vector direction along the reflectivity gradient in the forward flank
    #Outer_r,Inner_r: Outer and Inner limit of radar range effective for analysis
    #tracking_ind: Index for storm of interest
    zdr_areas = []
    zdr_centroid_lon = []
    zdr_centroid_lat = []
    zdr_mean = []
    zdr_cc_mean = []
    zdr_max = []
    zdr_storm_lon = []
    zdr_storm_lat = []
    zdr_dist = []
    zdr_forw = []
    zdr_back = []
    zdr_masks = []
    zdr_outlines = []
    if np.max(ZDRmasked) > zdrlev:
        #Break contours into polygons using the same method as for reflectivity
        for level in zdrc.collections:
            for contour_poly in level.get_paths():
                for n_contour, contour in enumerate(
                        contour_poly.to_polygons()):
                    contour_a = np.asarray(contour[:])
                    xa = contour_a[:, 0]
                    ya = contour_a[:, 1]
                    polygon_new = geometry.Polygon([(i[0], i[1])
                                                    for i in zip(xa, ya)])
                    if n_contour == 0:
                        polygon = polygon_new
                    else:
                        polygon = polygon.difference(polygon_new)
                try:
                    pr_area = (transform(proj, polygon).area *
                               units('m^2')).to('km^2')
                except:
                    continue
                boundary = np.asarray(polygon.boundary.xy)
                polypath = Path(boundary.transpose())
                coord_map = np.vstack(
                    (rlons[0, :, :].flatten(), rlats[0, :, :].flatten())).T
                mask = polypath.contains_points(coord_map).reshape(
                    rlons[0, :, :].shape)
                mean = np.mean(ZDRmasked[mask])
                mean_cc = np.mean(CC[mask])
                mean_Z = np.mean(REF[mask])
                mean_graddir = np.mean(grad_ffd[mask])
                mean_grad = np.mean(grad_mag[mask])
                mean_kdp = np.mean(KDP[mask])
                #Only select out objects larger than 1 km^2 with high enough CC
                if pr_area > 1 * units(
                        'km^2') and mean > zdrlev[0] and mean_cc > .88:
                    g = Geod(ellps='sphere')
                    dist = np.zeros((np.asarray(max_lons_c).shape[0]))
                    forw = np.zeros((np.asarray(max_lons_c).shape[0]))
                    rawangle = np.zeros((np.asarray(max_lons_c).shape[0]))
                    back = np.zeros((np.asarray(max_lons_c).shape[0]))
                    zdr_polypath = polypath
                    #Assign ZDR arc objects to the nearest acceptable storm object
                    for i in range(dist.shape[0]):
                        distance_1 = g.inv(polygon.centroid.x,
                                           polygon.centroid.y, max_lons_c[i],
                                           max_lats_c[i])
                        back[i] = distance_1[1]
                        if distance_1[1] < 0:
                            back[i] = distance_1[1] + 360
                        forw[i] = np.abs(back[i] - storm_relative_dir)
                        rawangle[i] = back[i] - storm_relative_dir
                        #Account for weird angles
                        if forw[i] > 180:
                            forw[i] = 360 - forw[i]
                            rawangle[i] = (360 - forw[i]) * (-1)
                        dist[i] = distance_1[2] / 1000.
                        rawangle[i] = rawangle[i] * (-1)

                    #Pick out only ZDR arc objects with a reasonable probability of actually being in the FFD region
                    #using their location relative to the storm centroid
                    if (forw[np.where(dist == np.min(dist))[0][0]] < 180
                            and np.min(dist) < Outer_r
                        ) or (forw[np.where(dist == np.min(dist))[0][0]] < 140
                              and np.min(dist) < Inner_r):
                        #Use ML algorithm to eliminate non-arc objects
                        #Get x and y components
                        if (rawangle[np.where(dist == np.min(dist))[0][0]] >
                                0):
                            directions_raw = 360 - rawangle[np.where(
                                dist == np.min(dist))[0][0]]
                        else:
                            directions_raw = (-1) * rawangle[np.where(
                                dist == np.min(dist))[0][0]]

                        xc, yc = wind_components(
                            np.min(dist) * units('m/s'),
                            directions_raw * units('degree'))
                        ARC_X = np.zeros((1, 12))
                        ARC_X[:, 0] = pr_area.magnitude
                        ARC_X[:, 1] = np.min(dist)
                        ARC_X[:, 2] = np.max(ZDRmasked[mask]) / mean
                        ARC_X[:, 3] = (np.max(ZDRmasked[mask]) /
                                       mean) * pr_area.magnitude
                        ARC_X[:, 4] = mean_cc
                        ARC_X[:, 5] = mean_kdp
                        ARC_X[:, 6] = mean_Z
                        ARC_X[:, 7] = mean_graddir
                        ARC_X[:, 8] = mean_grad
                        ARC_X[:, 9] = rawangle[np.where(
                            dist == np.min(dist))[0][0]]
                        ARC_X[:, 10] = xc
                        ARC_X[:, 11] = yc
                        pred_zdr = forest_loaded.predict(ARC_X)
                        if (pred_zdr[0] == 1):
                            zdr_storm_lon.append((max_lons_c[np.where(
                                dist == np.min(dist))[0][0]]))
                            zdr_storm_lat.append((max_lats_c[np.where(
                                dist == np.min(dist))[0][0]]))
                            zdr_dist.append(np.min(dist))
                            zdr_forw.append(
                                forw[np.where(dist == np.min(dist))[0][0]])
                            zdr_back.append(
                                back[np.where(dist == np.min(dist))[0][0]])
                            zdr_areas.append((pr_area))
                            zdr_centroid_lon.append((polygon.centroid.x))
                            zdr_centroid_lat.append((polygon.centroid.y))
                            zdr_mean.append((mean))
                            zdr_cc_mean.append((mean_cc))
                            zdr_max.append((np.max(ZDRmasked[mask])))
                            zdr_masks.append(mask)
                            patch = PathPatch(polypath,
                                              facecolor='blue',
                                              alpha=.6,
                                              edgecolor='blue',
                                              linewidth=3)
                            ax.add_patch(patch)
                            #Add polygon to placefile
                            f.write('TimeRange: ' + str(time_start.year) +
                                    '-' + str(month) + '-' + str(d_beg) + 'T' +
                                    str(h_beg) + ':' + str(min_beg) + ':' +
                                    str(sec_beg) + 'Z ' +
                                    str(time_start.year) + '-' + str(month) +
                                    '-' + str(d_end) + 'T' + str(h_end) + ':' +
                                    str(min_end) + ':' + str(sec_end) + 'Z')
                            f.write('\n')
                            f.write("Color: 000 000 139 \n")
                            f.write('Line: 3, 0, "ZDR Arc Outline" \n')
                            for i in range(len(zdr_polypath.vertices)):
                                f.write("%.5f" % (zdr_polypath.vertices[i][1]))
                                f.write(", ")
                                f.write("%.5f" % (zdr_polypath.vertices[i][0]))
                                f.write('\n')

                            f.write("End: \n \n")
                            f.flush()
                            if (((max_lons_c[np.where(
                                    dist == np.min(dist))[0][0]])
                                 in max_lons_c[tracking_ind])
                                    and ((max_lats_c[np.where(
                                        dist == np.min(dist))[0][0]])
                                         in max_lats_c[tracking_ind])):
                                zdr_outlines.append(polypath)

    #Returning Variables,
    #zdr_storm_lon,zdr_storm_lat: Storm object centroids associated with Zdr arcs
    #zdr_dist: Zdr arc centroid distance from associated storm object centroid
    #zdr_forw,zdr_back: Forward and rear angles about the zdr arc region
    #zdr_areas: Zdr arc area
    #zdr_centroid_lon,zdr_centroid_lat: Zdr arc centroid coordinates
    #zdr_mean: Mean value of Zdr within arc
    #zdr_cc_mean: Mean value of CC within arc
    #zdr_max: Maximum value of Zdr within arc
    #zdr_masks: Mask array of zdr arc
    #zdr_outlines: Zdr arc contour array
    #ax: Subplot object to be built on with each contour
    #f: Placefile, edited throughout the program
    return zdr_storm_lon, zdr_storm_lat, zdr_dist, zdr_forw, zdr_back, zdr_areas, zdr_centroid_lon, zdr_centroid_lat, zdr_mean, zdr_cc_mean, zdr_max, zdr_masks, zdr_outlines, ax, f
コード例 #49
0
# Grab lat/lon values (GFS will be 1D)
lat = ds.lat.data
lon = ds.lon.data

# Set subset slice for the geographic extent of data to limit download
lon_slice = slice(400, 701)
lat_slice = slice(10, 160)

# Subset lat/lon values
lons = lon[lon_slice]
lats = lat[lat_slice]

# Grab the pressure levels and select the data to be imported
# Need all pressure levels for Temperatures, U and V Wind, and Rel. Humidity
# Smooth with the gaussian filter from scipy
pres = ds['isobaric3'].data[:] * units('Pa')

tmpk_var = ds['Temperature_isobaric'].data[0, :, lat_slice, lon_slice]
tmpk = gaussian_filter(tmpk_var, sigma=1.0) * units.K
thta = mpcalc.potential_temperature(pres[:, None, None], tmpk)

uwnd_var = ds['u-component_of_wind_isobaric'].data[0, :, lat_slice, lon_slice]
vwnd_var = ds['v-component_of_wind_isobaric'].data[0, :, lat_slice, lon_slice]
uwnd = gaussian_filter(uwnd_var, sigma=1.0) * units('m/s')
vwnd = gaussian_filter(vwnd_var, sigma=1.0) * units('m/s')

# Create a clean datetime object for plotting based on time of Geopotential heights
vtime = datetime.strptime(str(ds.time.data[0].astype('datetime64[ms]')),
                          '%Y-%m-%dT%H:%M:%S.%f')

######################################################################
コード例 #50
0
# Remove all missing data from pressure
x_masked, y_masked, pres = remove_nan_observations(xp, yp, data['slp'].values)

###########################################
# Interpolate pressure using Cressman interpolation
slpgridx, slpgridy, slp = interpolate_to_grid(x_masked,
                                              y_masked,
                                              pres,
                                              interp_type='cressman',
                                              minimum_neighbors=1,
                                              search_radius=400000,
                                              hres=100000)

##########################################
# Get wind information and mask where either speed or direction is unavailable
wind_speed = (data['wind_speed'].values * units('m/s')).to('knots')
wind_dir = data['wind_dir'].values * units.degree

good_indices = np.where((~np.isnan(wind_dir)) & (~np.isnan(wind_speed)))

x_masked = xp[good_indices]
y_masked = yp[good_indices]
wind_speed = wind_speed[good_indices]
wind_dir = wind_dir[good_indices]

###########################################
# Calculate u and v components of wind and then interpolate both.
#
# Both will have the same underlying grid so throw away grid returned from v interpolation.
u, v = wind_components(wind_speed, wind_dir)
コード例 #51
0
ファイル: pwpp.py プロジェクト: tjwixtrom/adaptive_WRF
#!/home/twixtrom/miniconda3/envs/wrfpost/bin/python
import sys
from PWPP import wrfpost
import numpy as np
from metpy.units import units
# import xarray as xr

infile = sys.argv[1]
outfile = sys.argv[2]
variables = [
    'temp', 'dewpt', 'uwnd', 'vwnd', 'wwnd', 'avor', 'height', 'temp_2m',
    'dewpt_2m', 'mslp', 'q_2m', 'u_10m', 'v_10m', 'timestep_pcp', 'UH', 'cape',
    'refl'
]

plevs = np.array([1000, 925, 850, 700, 500, 300, 250]) * units('hPa')
# chunks = {'Time': 1}
wrfpost(infile,
        outfile,
        variables,
        plevs=plevs,
        compression=True,
        complevel=4,
        format='NETCDF4')
コード例 #52
0
interruptor = 'on'
if interruptor == 'on':
    for dd in range(0, 10):
        ######################## LEYENDO LOS DATOS #########################
        ## Se cargan los datos de la pagina
        datos = Dataset(enlace, 'r')
        lat = datos.variables[u'lat'][120:300]
        lon = datos.variables[u'lon'][1080:1320]
        hgt = datos.variables['hgtprs'][dd, 12, 120:300, 1080:1320]
        ugrd = datos.variables['ugrdprs'][dd, 12, 120:300, 1080:1320]
        vgrd = datos.variables['vgrdprs'][dd, 12, 120:300, 1080:1320]
        datos.close()

        ## calculo la vorticidad
        dx, dy = mpcalc.lat_lon_grid_deltas(lon, lat)
        f = mpcalc.coriolis_parameter(np.deg2rad(lat)).to(units('1/sec'))
        vor = mpcalc.vorticity(ugrd, vgrd, dx, dy, dim_order='yx')
        #vor = ndimage.gaussian_filter(vor, sigma=3, order=0) * units('1/sec')

        ####### GRAFICADO ########
        fig = plt.figure(figsize=(15, 12))
        ax1 = plt.subplot(1, 1, 1, projection=ccrs.PlateCarree())
        ax1.set_extent([lon[0], lon[len(lon) - 1], lat[0], lat[len(lat) - 1]],
                       crs=ccrs.PlateCarree())
        # en contornos el geopotencial
        cs_1 = ax1.contour(lon,
                           lat,
                           hgt,
                           30,
                           colors='black',
                           transform=ccrs.PlateCarree())
コード例 #53
0
data = fulldata[reduce_point_density(point_locs, 300000.)]

###########################################
# Now that we have the data we want, we need to perform some conversions:
#
# - Get a list of strings for the station IDs
# - Get wind components from speed and direction
# - Convert cloud fraction values to integer codes [0 - 8]
# - Map METAR weather codes to WMO codes for weather symbols

# Get all of the station IDs as a list of strings
stid = [s.decode('ascii') for s in data['stid']]

# Get the wind components, converting from m/s to knots as will be appropriate
# for the station plot
u, v = get_wind_components((data['wind_speed'] * units('m/s')).to('knots'),
                           data['wind_dir'] * units.degree)

# Convert the fraction value into a code of 0-8 and compensate for NaN values,
# which can be used to pull out the appropriate symbol
cloud_frac = (8 * data['cloud_fraction'])
cloud_frac[np.isnan(cloud_frac)] = 10
cloud_frac = cloud_frac.astype(int)

# Map weather strings to WMO codes, which we can use to convert to symbols
# Only use the first symbol if there are multiple
wx_text = [s.decode('ascii') for s in data['weather']]
wx_codes = {
    '': 0,
    'HZ': 5,
    'BR': 10,
コード例 #54
0
# Our import for numpy and metpy
import numpy as np
import matplotlib.pyplot as plt
from metpy.units import units

# Here's the "data"
np.random.seed(19990503)  # Make sure we all have the same data
temp = (20 * np.cos(np.linspace(0, 2 * np.pi, 100)) +
        80 + 2 * np.random.randn(100)) * units.degF
rh = (np.abs(20 * np.cos(np.linspace(0, 4 * np.pi, 100)) +
              50 + 5 * np.random.randn(100))) * units('percent')

# Create a mask for the two conditions described above
good_heat_index = (temp >= 80 * units.degF) & (rh >= 40 * units.percent)

# Use this mask to grab the temperature and relative humidity values that together
# will give good heat index values
hi_temps = temp[good_heat_index]
hi_rh = rh[good_heat_index]

# BONUS POINTS: Plot only the data where heat index is defined by
# inverting the mask (using `~mask`) and setting invalid values to np.nan
temp[~good_heat_index] = np.nan
rh[~good_heat_index] = np.nan

plt.plot(temp.m, color='tab:red')
plt.plot(rh.m, color='tab:green')
コード例 #55
0
def hail_objects(hailc, REF_Hail2, ax, f, time_start, month, d_beg, h_beg,
                 min_beg, sec_beg, d_end, h_end, min_end, sec_end, rlons,
                 rlats, max_lons_c, max_lats_c, proj):
    #Inputs,
    #REF_Hail2: REFmasked masked where Zdr and CC greater than 1.0
    #hailc: Contour of REF_Hail2 where reflectivity greater than 50.0 dBz
    #ax: Subplot object to be built on with each contour
    #f: Placefile, edited throughout the program
    #time_start: Radar file date and time of scan
    #month: Month of case, supplied by user
    #d_beg,h_beg,min_beg,sec_beg,d_end,h_end,min_end,sec_end: Day, hour, minute, second of the beginning and end of a scan
    #rlons,rlats: Full volume geographic coordinates, longitude and latitude respectively
    #max_lons_c,max_lats_c: Centroid coordinates of storm objects
    #proj: Projection of Earth's surface to be used for accurate area and distance calculations
    hail_areas = []
    hail_centroid_lon = []
    hail_centroid_lat = []
    hail_storm_lon = []
    hail_storm_lat = []
    if np.max(REF_Hail2) > 50.0:
        for level in hailc.collections:
            for contour_poly in level.get_paths():
                for n_contour, contour in enumerate(
                        contour_poly.to_polygons()):
                    contour_a = np.asarray(contour[:])
                    xa = contour_a[:, 0]
                    ya = contour_a[:, 1]
                    polygon_new = geometry.Polygon([(i[0], i[1])
                                                    for i in zip(xa, ya)])
                    if n_contour == 0:
                        polygon = polygon_new
                    else:
                        polygon = polygon.difference(polygon_new)

                pr_area = (transform(proj, polygon).area *
                           units('m^2')).to('km^2')
                boundary = np.asarray(polygon.boundary.xy)
                polypath = Path(boundary.transpose())
                coord_map = np.vstack(
                    (rlons[0, :, :].flatten(), rlats[0, :, :].flatten())).T
                #Create an Mx2 array listing all the coordinates in field
                mask_hail = polypath.contains_points(coord_map).reshape(
                    rlons[0, :, :].shape)

                if pr_area > 2 * units('km^2'):
                    g = Geod(ellps='sphere')
                    dist_hail = np.zeros((np.asarray(max_lons_c).shape[0]))
                    for i in range(dist_hail.shape[0]):
                        distance_hail = g.inv(polygon.centroid.x,
                                              polygon.centroid.y,
                                              max_lons_c[i], max_lats_c[i])
                        dist_hail[i] = distance_hail[2] / 1000.
                    if np.min(np.asarray(dist_hail)) < 15.0:
                        hail_path = polypath
                        hail_areas.append((pr_area))
                        hail_centroid_lon.append((polygon.centroid.x))
                        hail_centroid_lat.append((polygon.centroid.y))
                        hail_storm_lon.append((max_lons_c[np.where(
                            dist_hail == np.min(dist_hail))[0][0]]))
                        hail_storm_lat.append((max_lats_c[np.where(
                            dist_hail == np.min(dist_hail))[0][0]]))
                        patch = PathPatch(polypath,
                                          facecolor='gold',
                                          alpha=.7,
                                          edgecolor='gold',
                                          linewidth=4)
                        ax.add_patch(patch)
                        #Add polygon to placefile
                        f.write('TimeRange: ' + str(time_start.year) + '-' +
                                str(month) + '-' + str(d_beg) + 'T' +
                                str(h_beg) + ':' + str(min_beg) + ':' +
                                str(sec_beg) + 'Z ' + str(time_start.year) +
                                '-' + str(month) + '-' + str(d_end) + 'T' +
                                str(h_end) + ':' + str(min_end) + ':' +
                                str(sec_end) + 'Z')
                        f.write('\n')
                        f.write("Color: 245 242 066 \n")
                        f.write('Line: 3, 0, "Hail Core Outline" \n')
                        for i in range(len(hail_path.vertices)):
                            f.write("%.5f" % (hail_path.vertices[i][1]))
                            f.write(", ")
                            f.write("%.5f" % (hail_path.vertices[i][0]))
                            f.write('\n')
                        f.write("End: \n \n")
    #Returning Variables,
    #hail_areas: Hail core area
    #hail_centroid_lon,hail_centroid_lat: Hail core centroid coordinates
    #hail_storm_lon,hail_storm_lat: Storm object centroids associated with the hail core
    #ax: Subplot object to be built on with each contour
    #f: Placefile, edited throughout the program
    return hail_areas, hail_centroid_lon, hail_centroid_lat, hail_storm_lon, hail_storm_lat, ax, f
コード例 #56
0
ファイル: p78.py プロジェクト: stormchas4/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    month = ctx["month"]

    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "summer":
        months = [6, 7, 8]
    else:
        ts = datetime.datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
        # make sure it is length two for the trick below in SQL
        months = [ts.month, 999]

    df = read_sql(
        """
        SELECT tmpf::int as tmpf, dwpf, relh,
        coalesce(mslp, alti * 33.8639, 1013.25) as slp
        from alldata where station = %s
        and drct is not null and dwpf is not null and dwpf <= tmpf
        and sknt > 3 and drct::int %% 10 = 0
        and extract(month from valid) in %s
        and report_type = 2
    """,
        pgconn,
        params=(station, tuple(months)),
    )
    if df.empty:
        raise NoDataFound("No Data Found.")
    # Convert sea level pressure to station pressure
    df["pressure"] = mcalc.add_height_to_pressure(
        df["slp"].values * units("millibars"),
        ctx["_nt"].sts[station]["elevation"] * units("m"),
    ).to(units("millibar"))
    # compute mixing ratio
    df["mixingratio"] = mcalc.mixing_ratio_from_relative_humidity(
        df["relh"].values * units("percent"),
        df["tmpf"].values * units("degF"),
        df["pressure"].values * units("millibars"),
    )
    # compute pressure
    df["vapor_pressure"] = mcalc.vapor_pressure(
        df["pressure"].values * units("millibars"),
        df["mixingratio"].values * units("kg/kg"),
    ).to(units("kPa"))

    means = df.groupby("tmpf").mean().copy()
    # compute dewpoint now
    means["dwpf"] = (
        mcalc.dewpoint(means["vapor_pressure"].values * units("kPa"))
        .to(units("degF"))
        .m
    )
    means.reset_index(inplace=True)
    # compute RH again
    means["relh"] = (
        mcalc.relative_humidity_from_dewpoint(
            means["tmpf"].values * units("degF"),
            means["dwpf"].values * units("degF"),
        )
        * 100.0
    )

    (fig, ax) = plt.subplots(1, 1, figsize=(8, 6))
    ax.bar(
        means["tmpf"].values - 0.5,
        means["dwpf"].values - 0.5,
        ec="green",
        fc="green",
        width=1,
    )
    ax.grid(True, zorder=11)
    ab = ctx["_nt"].sts[station]["archive_begin"]
    if ab is None:
        raise NoDataFound("Unknown station metadata.")
    ax.set_title(
        (
            "%s [%s]\nAverage Dew Point by Air Temperature (month=%s) "
            "(%s-%s)\n"
            "(must have 3+ hourly observations at the given temperature)"
        )
        % (
            ctx["_nt"].sts[station]["name"],
            station,
            month.upper(),
            ab.year,
            datetime.datetime.now().year,
        ),
        size=10,
    )

    ax.plot([0, 140], [0, 140], color="b")
    ax.set_ylabel("Dew Point [F]")
    y2 = ax.twinx()
    y2.plot(means["tmpf"].values, means["relh"].values, color="k")
    y2.set_ylabel("Relative Humidity [%] (black line)")
    y2.set_yticks([0, 5, 10, 25, 50, 75, 90, 95, 100])
    y2.set_ylim(0, 100)
    ax.set_ylim(0, means["tmpf"].max() + 2)
    ax.set_xlim(0, means["tmpf"].max() + 2)
    ax.set_xlabel(r"Air Temperature $^\circ$F")

    return fig, means[["tmpf", "dwpf", "relh"]]
コード例 #57
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_windchill_kelvin():
    """Test wind chill when given Kelvin temperatures."""
    wc = windchill(268.15 * units.kelvin, 35 * units('m/s'))
    assert_almost_equal(wc, -18.9357 * units.degC, 0)
コード例 #58
0
ファイル: test_basic.py プロジェクト: zhatin/MetPy
def test_coriolis_units():
    """Test that coriolis returns units of 1/second."""
    f = coriolis_parameter(50 * units.degrees)
    assert f.units == units('1/second')
コード例 #59
0
simple_layout.names()

###########################################
# Next grab the simple variables out of the data we have (attaching correct units), and
# put them into a dictionary that we will hand the plotting function later:

# This is our container for the data
data = dict()

# Copy out to stage everything together. In an ideal world, this would happen on
# the data reading side of things, but we're not there yet.
data['longitude'] = data_arr['lon']
data['latitude'] = data_arr['lat']
data['air_temperature'] = data_arr['air_temperature'] * units.degC
data['dew_point_temperature'] = data_arr['dew_point_temperature'] * units.degC
data['air_pressure_at_sea_level'] = data_arr['slp'] * units('mbar')

###########################################
# Notice that the names (the keys) in the dictionary are the same as those that the
# layout is expecting.
#
# Now perform a few conversions:
#
# - Get wind components from speed and direction
# - Convert cloud fraction values to integer codes [0 - 8]
# - Map METAR weather codes to WMO codes for weather symbols

# Get the wind components, converting from m/s to knots as will be appropriate
# for the station plot
u, v = get_wind_components(data_arr['wind_speed'] * units('m/s'),
                           data_arr['wind_dir'] * units.degree)
コード例 #60
0
ファイル: Simple_Sounding.py プロジェクト: andyli386/MetPy
# Add the MetPy logo!
fig = plt.gcf()
add_metpy_logo(fig, 115, 100)

###########################################

# Example of defining your own vertical barb spacing
skew = SkewT()

# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r')
skew.plot(p, Td, 'g')

# Set spacing interval--Every 50 mb from 1000 to 100 mb
my_interval = np.arange(100, 1000, 50) * units('mbar')

# Get indexes of values closest to defined interval
ix = mpcalc.resample_nn_1d(p, my_interval)

# Plot only values nearest to defined interval values
skew.plot_barbs(p[ix], u[ix], v[ix])

# Add the relevant special lines
skew.plot_dry_adiabats()
skew.plot_moist_adiabats()
skew.plot_mixing_lines()
skew.ax.set_ylim(1000, 100)

# Add the MetPy logo!
fig = plt.gcf()