예제 #1
0
파일: testing.py 프로젝트: akrherz/MetPy
def get_upper_air_data(date, station):
    """Get upper air observations from the test data cache.

    Parameters
     ----------
    time : datetime
          The date and time of the desired observation.
    station : str
         The three letter ICAO identifier of the station for which data should be
         downloaded.
    Returns
    -------
        dict : upper air data

    """
    sounding_key = '{0:%Y-%m-%dT%HZ}_{1:}'.format(date, station)
    sounding_files = {'2016-05-22T00Z_DDC': 'may22_sounding.txt',
                      '2013-01-20T12Z_OUN': 'jan20_sounding.txt',
                      '1999-05-04T00Z_OUN': 'may4_sounding.txt',
                      '2002-11-11T00Z_BNA': 'nov11_sounding.txt',
                      '2010-12-09T12Z_BOI': 'dec9_sounding.txt'}

    fname = sounding_files[sounding_key]
    fobj = get_test_data(fname)

    def to_float(s):
        # Remove all whitespace and replace empty values with NaN
        if not s.strip():
            s = 'nan'
        return float(s)

    # Skip dashes, column names, units, and more dashes
    for _ in range(4):
        fobj.readline()

    # Initiate lists for variables
    arr_data = []

    # Read all lines of data and append to lists only if there is some data
    for row in fobj:
        level = to_float(row[0:7])
        values = (to_float(row[7:14]), to_float(row[14:21]), to_float(row[21:28]),
                  to_float(row[42:49]), to_float(row[49:56]))

        if any(np.invert(np.isnan(values[1:]))):
            arr_data.append((level,) + values)

    p, z, t, td, direc, spd = np.array(arr_data).T

    p = p * units.hPa
    z = z * units.meters
    t = t * units.degC
    td = td * units.degC
    direc = direc * units.degrees
    spd = spd * units.knots

    u, v = wind_components(spd, direc)

    return {'pressure': p, 'height': z, 'temperature': t,
            'dewpoint': td, 'direction': direc, 'speed': spd, 'u_wind': u, 'v_wind': v}
예제 #2
0
파일: test_basic.py 프로젝트: akrherz/MetPy
def test_wind_comps_with_north_and_calm():
    """Test that the wind component calculation handles northerly and calm winds."""
    speed = np.array([0, 5, 5]) * units.mph
    dirs = np.array([0, 360, 0]) * units.deg

    u, v = wind_components(speed, dirs)

    true_u = np.array([0, 0, 0]) * units.mph
    true_v = np.array([0, -5, -5]) * units.mph

    assert_array_almost_equal(true_u, u, 4)
    assert_array_almost_equal(true_v, v, 4)
예제 #3
0
def test_wind_comps_with_north_and_calm(array_type):
    """Test that the wind component calculation handles northerly and calm winds."""
    mask = [False, True, False]
    speed = array_type([0, 5, 5], 'mph', mask=mask)
    dirs = array_type([0, 360, 0], 'deg', mask=mask)

    u, v = wind_components(speed, dirs)

    true_u = array_type([0, 0, 0], 'mph', mask=mask)
    true_v = array_type([0, -5, -5], 'mph', mask=mask)

    assert_array_almost_equal(true_u, u, 4)
    assert_array_almost_equal(true_v, v, 4)
예제 #4
0
def test_wind_comps_basic():
    """Test the basic wind component calculation."""
    speed = np.array([4, 4, 4, 4, 25, 25, 25, 25, 10.]) * units.mph
    dirs = np.array([0, 45, 90, 135, 180, 225, 270, 315, 360]) * units.deg
    s2 = np.sqrt(2.)

    u, v = wind_components(speed, dirs)

    true_u = np.array([0, -4 / s2, -4, -4 / s2, 0, 25 / s2, 25, 25 / s2, 0]) * units.mph
    true_v = np.array([-4, -4 / s2, 0, 4 / s2, 25, 25 / s2, 0, -25 / s2, -10]) * units.mph

    assert_array_almost_equal(true_u, u, 4)
    assert_array_almost_equal(true_v, v, 4)
예제 #5
0
def test_speed_direction_roundtrip():
    """Test round-tripping between speed/direction and components."""
    # Test each quadrant of the whole circle
    wspd = np.array([15., 5., 2., 10.]) * units.meters / units.seconds
    wdir = np.array([160., 30., 225., 350.]) * units.degrees

    u, v = wind_components(wspd, wdir)

    wdir_out = wind_direction(u, v)
    wspd_out = wind_speed(u, v)

    assert_array_almost_equal(wspd, wspd_out, 4)
    assert_array_almost_equal(wdir, wdir_out, 4)
예제 #6
0
파일: test_basic.py 프로젝트: akrherz/MetPy
def test_wind_comps_basic():
    """Test the basic wind component calculation."""
    speed = np.array([4, 4, 4, 4, 25, 25, 25, 25, 10.]) * units.mph
    dirs = np.array([0, 45, 90, 135, 180, 225, 270, 315, 360]) * units.deg
    s2 = np.sqrt(2.)

    u, v = wind_components(speed, dirs)

    true_u = np.array([0, -4 / s2, -4, -4 / s2, 0, 25 / s2, 25, 25 / s2, 0]) * units.mph
    true_v = np.array([-4, -4 / s2, 0, 4 / s2, 25, 25 / s2, 0, -25 / s2, -10]) * units.mph

    assert_array_almost_equal(true_u, u, 4)
    assert_array_almost_equal(true_v, v, 4)
예제 #7
0
파일: test_basic.py 프로젝트: akrherz/MetPy
def test_speed_direction_roundtrip():
    """Test round-tripping between speed/direction and components."""
    # Test each quadrant of the whole circle
    wspd = np.array([15., 5., 2., 10.]) * units.meters / units.seconds
    wdir = np.array([160., 30., 225., 350.]) * units.degrees

    u, v = wind_components(wspd, wdir)

    wdir_out = wind_direction(u, v)
    wspd_out = wind_speed(u, v)

    assert_array_almost_equal(wspd, wspd_out, 4)
    assert_array_almost_equal(wdir, wdir_out, 4)
예제 #8
0
def main(argv):
    """Go Main Go."""
    year = utc().year if len(argv) == 1 else int(argv[1])
    dbconn = get_dbconn("postgis")
    cursor = dbconn.cursor()
    nt = NetworkTable("RAOB")
    df = read_sql(
        """
        select f.fid, f.station, pressure, dwpc, tmpc, drct, smps, height,
        levelcode from
        raob_profile_""" + str(year) + """ p JOIN raob_flights f
        on (p.fid = f.fid) WHERE not computed and height is not null
        and pressure is not null
        ORDER by pressure DESC
    """,
        dbconn,
    )
    if df.empty or pd.isnull(df["smps"].max()):
        return
    u, v = wind_components(
        df["smps"].values * units("m/s"),
        df["drct"].values * units("degrees_north"),
    )
    df["u"] = u.to(units("m/s")).m
    df["v"] = v.to(units("m/s")).m
    count = 0
    progress = tqdm(df.groupby("fid"), disable=not sys.stdout.isatty())
    for fid, gdf in progress:
        progress.set_description("%s %s" % (year, fid))
        try:
            do_profile(cursor, fid, gdf, nt)
        except (RuntimeError, ValueError, IndexError) as exp:
            LOG.debug(
                "Profile %s fid: %s failed calculation %s",
                gdf.iloc[0]["station"],
                fid,
                exp,
            )
            cursor.execute(
                """
                UPDATE raob_flights SET computed = 't' WHERE fid = %s
            """,
                (fid, ),
            )
        if count % 100 == 0:
            cursor.close()
            dbconn.commit()
            cursor = dbconn.cursor()
        count += 1
    cursor.close()
    dbconn.commit()
예제 #9
0
def plot_skewt(df):
    # We will pull the data out of the example dataset into individual variables
    # and assign units.
    hght = df['height'].values * units.hPa
    p = df['pressure'].values * units.hPa
    T = df['temperature'].values * units.degC
    Td = df['dewpoint'].values * units.degC
    wind_speed = df['speed'].values * units.knots
    wind_dir = df['direction'].values * units.degrees
    u, v = mpcalc.wind_components(wind_speed, wind_dir)

    # Create a new figure. The dimensions here give a good aspect ratio.
    fig = plt.figure(figsize=(9, 12))
    skew = SkewT(fig, rotation=45)

    # Plot the data using normal plotting functions, in this case using
    # log scaling in Y, as dictated by the typical meteorological plot
    skew.plot(p, T, 'r')
    skew.plot(p, Td, 'g')
    skew.plot_barbs(p, u, v)
    skew.ax.set_ylim(1000, 100)
    skew.ax.set_xlim(-40, 60)

    # Calculate LCL height and plot as black dot
    lcl_pressure, lcl_temperature = mpcalc.lcl(p[0], T[0], Td[0])
    skew.plot(lcl_pressure, lcl_temperature, 'ko', markerfacecolor='black')

    # Calculate full parcel profile and add to plot as black line
    prof = mpcalc.parcel_profile(p, T[0], Td[0]).to('degC')
    skew.plot(p, prof, 'k', linewidth=2)

    # An example of a slanted line at constant T -- in this case the 0
    # isotherm
    skew.ax.axvline(0, color='c', linestyle='--', linewidth=2)

    # Add the relevant special lines
    skew.plot_dry_adiabats()
    skew.plot_moist_adiabats()
    skew.plot_mixing_lines()

    # Create a hodograph
    ax_hod = inset_axes(skew.ax, '40%', '40%', loc=2)
    h = Hodograph(ax_hod, component_range=80.)
    h.add_grid(increment=20)
    h.plot_colormapped(u, v, hght)

    return skew
예제 #10
0
def getDataFrame_UpperAir(model, station, init, hour):
    bufrData = BUFKIT.getBufkitData(model, station, init)

    if bufrData != False:  # Verify data found
        modelSoundings = bufrData.SoundingParameters
        surfaceData = bufrData.SurfaceParameters[hour]
        soundingData = modelSoundings[hour + 1]

        z = [0 * units.meters]
        p = [surfaceData.pres]
        T = [surfaceData.t2ms]
        Td = [surfaceData.td2m]
        #Tw = [mpcalc.wet_bulb_temperature(surfaceData.pres, surfaceData.t2ms, surfaceData.td2m)]
        theta = [
            mpcalc.potential_temperature(surfaceData.pres, surfaceData.t2ms)
        ]
        theta_e = [
            mpcalc.equivalent_potential_temperature(surfaceData.pres,
                                                    surfaceData.t2ms,
                                                    surfaceData.td2m)
        ]
        u = [surfaceData.uwnd]
        v = [surfaceData.vwnd]

        for level in soundingData:
            z.append(level.hght)
            p.append(level.pres)
            T.append(level.tmpc)
            Td.append(level.dwpc)
            #Tw.append(mpcalc.wet_bulb_temperature(level.pres, level.tmpc, level.dwpc))
            theta.append(mpcalc.potential_temperature(level.pres, level.tmpc))
            theta_e.append(
                mpcalc.equivalent_potential_temperature(
                    level.pres, level.tmpc, level.dwpc))
            uv = mpcalc.wind_components(level.sknt, level.drct)
            u.append(uv[0])
            v.append(uv[1])

        return pd.DataFrame(list(zip(z, p, T, Td, theta, theta_e, u, v)),
                            columns=[
                                'height', 'pressure', 'temperature',
                                'dewpoint', 'theta', 'theta_e', 'u_wind',
                                'v_wind'
                            ])
    else:
        return False
예제 #11
0
def plot_metpy(data, title: str = "", saveplot: Path = None):
    """ TODO DOC

    :param data: TODO DOC
    :param title: The title of the graph
    :param saveplot: The path of file to save the plot
    """

    # Convert data into a suitable format for metpy.
    _altitude = data[:, 0] * units('m')
    p = mpcalc.height_to_pressure_std(_altitude)
    T = data[:, 3] * units.degC
    Td = data[:, 4] * units.degC
    wind_speed = data[:, 1] * units('m/s')
    wind_direction = data[:, 2] * units.degrees
    u, v = mpcalc.wind_components(wind_speed, wind_direction)

    fig = plt.figure(figsize=(6, 8))
    skew = SkewT(fig=fig)
    skew.plot(p, T, 'r')
    skew.plot(p, Td, 'g')

    my_interval = np.arange(300, 1000, 50) * units('mbar')
    ix = mpcalc.resample_nn_1d(p, my_interval)
    skew.plot_barbs(p[ix], u[ix], v[ix])
    skew.ax.set_ylim(1000, 300)
    skew.ax.set_xlim(-40, 30)
    skew.plot_dry_adiabats()

    heights = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9]) * units.km
    std_pressures = mpcalc.height_to_pressure_std(heights)
    for height_tick, p_tick in zip(heights, std_pressures):
        trans, _, _ = skew.ax.get_yaxis_text1_transform(0)
        skew.ax.text(0.02,
                     p_tick,
                     '---{:~d}'.format(height_tick),
                     transform=trans)

    plt.title("Sounding: {}".format(title))

    if saveplot is not None:
        fig.savefig(str(saveplot), bbox_inches='tight')
예제 #12
0
def download_alldata(startts=datetime.datetime(2012, 8, 1),
                     endts=datetime.datetime(2012, 9, 1)):
    """An alternative method that fetches all available data.
    Service supports up to 24 hours worth of data at a time."""
    # timestamps in UTC to request data for
    #startts = datetime.datetime(2012, 8, 1)
    #endts = datetime.datetime(2012, 9, 1)
    interval = datetime.timedelta(hours=24)

    service = SERVICE + "data=all&tz=Etc/UTC&format=comma&latlon=yes&elev=yes&trace=0.0001&"

    now = startts
    while now < endts:
        thisurl = service
        thisurl += now.strftime("year1=%Y&month1=%m&day1=%d&")
        thisurl += (now + interval).strftime("year2=%Y&month2=%m&day2=%d&")
        print("Downloading: %s" % (now, ))
        data = download_data(thisurl)
        df = pd.read_csv(data,
                         skiprows=5,
                         header=0,
                         parse_dates=['valid'],
                         na_values=['M'],
                         low_memory=False)
        # Convert wx codes to numeric values for plotting purposes
        df['present_weather'] = wx_code_to_numeric(df['wxcodes'].fillna(''))
        # Convert cloud cover strings to numeric values for plotting purposes
        df['cloud_cover'] = df.skyc1.fillna('').apply(get_cloud_cover)
        # Compute the u (eastward_wind) and v(northward_wind) components
        df['eastward_wind'], df['northward_wind'] = mpcalc.wind_components(
            df.sknt.values * units.knots, df.drct.values * units.degree)
        # Compute the MSLP from the Altimeter reading using MetPy calculation
        df['air_pressure_at_sea_level'] = mpcalc.altimeter_to_sea_level_pressure(
            df.alti.values * units.inHg, df.elevation.values * units.meters,
            df.tmpf.values * units.degF).to('hPa')
        # Save CSV version of archive and new variables
        df.to_csv(f'surface_data_{now:%Y%m%d}.csv', index=False)
        #outfn = "surface_data_%s.txt" % (now.strftime("%Y%m%d"),)
        #with open(outfn, "w") as fh:
        #    fh.write(data)
        now += interval
예제 #13
0
def test_wind_comps_basic(array_type):
    """Test the basic wind component calculation."""
    mask = [False, True, False, True, False, True, False, True, False]
    speed = array_type([4, 4, 4, 4, 25, 25, 25, 25, 10.], 'mph', mask=mask)
    dirs = array_type([0, 45, 90, 135, 180, 225, 270, 315, 360],
                      'deg',
                      mask=mask)
    s2 = np.sqrt(2.)

    u, v = wind_components(speed, dirs)

    true_u = array_type([0, -4 / s2, -4, -4 / s2, 0, 25 / s2, 25, 25 / s2, 0],
                        'mph',
                        mask=mask)
    true_v = array_type(
        [-4, -4 / s2, 0, 4 / s2, 25, 25 / s2, 0, -25 / s2, -10],
        'mph',
        mask=mask)

    assert_array_almost_equal(true_u, u, 4)
    assert_array_almost_equal(true_v, v, 4)
예제 #14
0
    def reader(self, sounding=None, wdShift=0):
        if(type(sounding)==None):
            logger.error("sounding data empty")
            return

        ## basic variables
        Time_released = sounding[self.soundingMeta["Time"]]
        P_released    = sounding[self.soundingMeta["P"]][:].to_numpy(dtype=float)
        T_released    = sounding[self.soundingMeta["T"]["name"]][:].to_numpy(dtype=float)
        Z_released    = sounding[self.soundingMeta["Z"]][:].to_numpy(dtype=float)
        RH_released   = sounding[self.soundingMeta["RH"]][:].to_numpy(dtype=float)
        lat_released  = sounding[self.soundingMeta["Lat"]][:].to_numpy(dtype=float)
        lon_released  = sounding[self.soundingMeta["Lon"]][:].to_numpy(dtype=float)
        WS_released   = sounding[self.soundingMeta["WS"]["name"]][:].to_numpy(dtype=float)
        WD_released   = ((sounding[self.soundingMeta["WD"]][:] - wdShift)%360).to_numpy(dtype=float)
        ## metpy cal
        TD_released   = dewpoint_from_relative_humidity(T_released * units(f'{self.soundingMeta["T"]["unit"]}'), RH_released/100).m
        U_released, V_released = wind_components(WS_released * units(f'{self.soundingMeta["WS"]["unit"]}'), WD_released * units.degrees)

        # T, P, T, TD, U, V
        data      = np.asarray([P_released,T_released,Z_released,TD_released,RH_released,U_released.to('m/s').m,V_released.to('m/s').m,lat_released,lon_released])
        dataFrame = DataFrame(np.transpose(data),index=Time_released,columns=['P [hPa]','T [degC]','Z [m]','TD [degC]','RH [%]','U [m/s]','V [m/s]','Lat [o]','Lon [o]'])
        return dataFrame, {'P [hPa]':"hPa", 'T [degC]':"degC", 'Z [m]':'m', 'TD [degC]':"degC", 'RH [%]':"percent", 'U [m/s]':'m/s', 'V [m/s]':'m/s', 'Lat [o]':'deg', 'Lon [o]':'deg'}
예제 #15
0
def add_wind_components_to_dataset(dataset):
    """
    Input :

        dataset : xarray dataset
    
    Output :

        dataset : xarray dataset
                  Original dataset, with added variables 'u_wind' and 'v_wind' calculated
                  from wind_speed and wind_direction of the given dataset
    
    Function to compute u and v components of wind, from wind speed and direction in the given dataset,
    and add them as variables to the dataset.                   
    """
    u, v = mpcalc.wind_components(
        dataset.wspd.values * units["m/s"],
        dataset.wdir.values * units.deg,
    )

    dataset["u"] = (dataset.p.dims, u.magnitude)
    dataset["v"] = (dataset.p.dims, v.magnitude)

    return dataset
예제 #16
0
def get_sounding(file=None):

    if file == None:
        file = './KGRB_2019072000_raob.txt'
        df = pd.read_fwf(file,
                         skiprows=1,
                         usecols=[1, 6, 7],
                         names=['height', 'direction', 'speed'])
        wind_dir = df['direction'].values * units.degrees
        wind_spd = df['speed'].values * .514
        u, v = mpcalc.wind_components(wind_spd, wind_dir)

        for n in np.arange(u.shape[0]):
            print("%8.1f  %6.3f  %6.3f  %5.2f  %5.2f  %5.2f" %
                  (df['height'].values[n], df['direction'].values[n],
                   wind_spd[n], u[n], v[n]))

        return Gridded_Field('sounding',
                             height=df['height'].values,
                             u_wind=u,
                             v_wind=v)
    else:
        print('No sounding found')
        return None
예제 #17
0
        #지상 FLAG(null),권계면 FLAG(null),최대풍 FLAG(null)
        df = pd.read_csv(dir_name + filename,
                         skiprows=1,
                         sep=',',
                         header=None,
                         index_col=0,
                         names=[
                             'site', 'time', 'pressure', 'height',
                             'temperature', 'dewpoint', 'direction', 'speed',
                             'FLAG1', 'FLAG2', 'FLAG3'
                         ],
                         skipfooter=0,
                         engine='python')

        df['u_wind'], df['v_wind'] = mpcalc.wind_components(
            df['speed'].values * units('m/s'),
            np.deg2rad(df['direction'].values * units.deg))

        # Drop any rows with all NaN values for T, Td, winds
        #df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed', 'u_wind', 'v_wind'),
        #how='all').reset_index(drop=True)
        df = df.dropna(subset=('temperature', 'dewpoint'),
                       how='all').reset_index(drop=True)
        #df = df.dropna(subset=('temperature', 'dewpoint', 'height'), how='all').reset_index(drop=True)

        # Rows that do not meet the condition alpha + num are eliminated
        s_start_date = datetime(obs_year, 1,
                                1)  #convert startdate to date type
        s_end_date = datetime(obs_year + 1, 1, 1)

        selected_times = []
예제 #18
0
                                     dx,
                                     dy,
                                     dim_order='yx')

# Get the wind direction for each point
wdir = mpcalc.wind_direction(ugeo, vgeo)

# Compute the Gradient Wind via an approximation
dydx = mpcalc.first_derivative(Z, delta=dx, axis=1)
d2ydx2 = mpcalc.first_derivative(dydx, delta=dx, axis=1)
R = ((1 + dydx.m**2)**(3. / 2.)) / d2ydx2.m

geo_mag = mpcalc.wind_speed(ugeo, vgeo)
grad_mag = geo_mag.m - (geo_mag.m**2) / (f.magnitude * R)

ugrad, vgrad = mpcalc.wind_components(grad_mag * units('m/s'), wdir)

# Calculate Ageostrophic wind
uageo = ugrad - ugeo
vageo = vgrad - vgeo

# Compute QVectors
uqvect, vqvect = mpcalc.q_vector(ugeo, vgeo, T * units.degC, 500 * units.hPa,
                                 dx, dy)

# Calculate divergence of the ageostrophic wind
div = mpcalc.divergence(uageo, vageo, dx, dy, dim_order='yx')

# Calculate Relative Vorticity Advection
relvor = mpcalc.vorticity(ugeo, vgeo, dx, dy, dim_order='yx')
adv = mpcalc.advection(relvor, (ugeo, vgeo), (dx, dy), dim_order='yx')
예제 #19
0
# Get wind information and mask where either speed or direction is unavailable
wind_speed = (data['wind_speed'].values * units('m/s')).to('knots')
wind_dir = data['wind_dir'].values * units.degree

good_indices = np.where((~np.isnan(wind_dir)) & (~np.isnan(wind_speed)))

x_masked = xp[good_indices]
y_masked = yp[good_indices]
wind_speed = wind_speed[good_indices]
wind_dir = wind_dir[good_indices]

###########################################
# Calculate u and v components of wind and then interpolate both.
#
# Both will have the same underlying grid so throw away grid returned from v interpolation.
u, v = wind_components(wind_speed, wind_dir)

windgridx, windgridy, uwind = interpolate_to_grid(x_masked,
                                                  y_masked,
                                                  np.array(u),
                                                  interp_type='cressman',
                                                  search_radius=400000,
                                                  hres=100000)

_, _, vwind = interpolate_to_grid(x_masked,
                                  y_masked,
                                  np.array(v),
                                  interp_type='cressman',
                                  search_radius=400000,
                                  hres=100000)
예제 #20
0
# Use the cartopy map projection to transform station locations to the map and
# then refine the number of stations plotted by setting a 300km radius
point_locs = proj.transform_points(ccrs.PlateCarree(), data['lon'].values,
                                   data['lat'].values)
data = data[reduce_point_density(point_locs, 300000.)]

###########################################
# Now that we have the data we want, we need to perform some conversions:
#
# - Get wind components from speed and direction
# - Convert cloud fraction values to integer codes [0 - 8]
# - Map METAR weather codes to WMO codes for weather symbols

# Get the wind components, converting from m/s to knots as will be appropriate
# for the station plot.
u, v = wind_components((data['wind_speed'].values * units('m/s')).to('knots'),
                       data['wind_dir'].values * units.degree)

# Convert the fraction value into a code of 0-8 and compensate for NaN values,
# which can be used to pull out the appropriate symbol
cloud_frac = (8 * data['cloud_fraction'])
cloud_frac[np.isnan(cloud_frac)] = 10
cloud_frac = cloud_frac.astype(int)

# Map weather strings to WMO codes, which we can use to convert to symbols
# Only use the first symbol if there are multiple
wx = [
    wx_code_map[s.split()[0] if ' ' in s else s]
    for s in data['weather'].fillna('')
]

###########################################
예제 #21
0
def test_wind_comps_scalar():
    """Test wind components calculation with scalars."""
    u, v = wind_components(8 * units('m/s'), 150 * units.deg)
    assert_almost_equal(u, -4 * units('m/s'), 3)
    assert_almost_equal(v, 6.9282 * units('m/s'), 3)
예제 #22
0
def get_upper_air_data(date, station):
    """Get upper air observations from the test data cache.

    Parameters
    ----------
    time : datetime
          The date and time of the desired observation.
    station : str
         The three letter ICAO identifier of the station for which data should be
         downloaded.

    Returns
    -------
        dict : upper air data

    """
    sounding_key = '{:%Y-%m-%dT%HZ}_{}'.format(date, station)
    sounding_files = {
        '2016-05-22T00Z_DDC': 'may22_sounding.txt',
        '2013-01-20T12Z_OUN': 'jan20_sounding.txt',
        '1999-05-04T00Z_OUN': 'may4_sounding.txt',
        '2002-11-11T00Z_BNA': 'nov11_sounding.txt',
        '2010-12-09T12Z_BOI': 'dec9_sounding.txt'
    }

    fname = sounding_files[sounding_key]
    fobj = get_test_data(fname)

    def to_float(s):
        # Remove all whitespace and replace empty values with NaN
        if not s.strip():
            s = 'nan'
        return float(s)

    # Skip dashes, column names, units, and more dashes
    for _ in range(4):
        fobj.readline()

    # Initiate lists for variables
    arr_data = []

    # Read all lines of data and append to lists only if there is some data
    for row in fobj:
        level = to_float(row[0:7])
        values = (to_float(row[7:14]), to_float(row[14:21]),
                  to_float(row[21:28]), to_float(row[42:49]),
                  to_float(row[49:56]))

        if any(np.invert(np.isnan(values[1:]))):
            arr_data.append((level, ) + values)

    p, z, t, td, direc, spd = np.array(arr_data).T

    p = p * units.hPa
    z = z * units.meters
    t = t * units.degC
    td = td * units.degC
    direc = direc * units.degrees
    spd = spd * units.knots

    u, v = wind_components(spd, direc)

    return {
        'pressure': p,
        'height': z,
        'temperature': t,
        'dewpoint': td,
        'direction': direc,
        'speed': spd,
        'u_wind': u,
        'v_wind': v
    }
예제 #23
0
def do(ts):
    """Process this date timestamp"""
    asos = get_dbconn("asos", user="******")
    iemaccess = get_dbconn("iem")
    icursor = iemaccess.cursor()
    table = "summary_%s" % (ts.year,)
    # Get what we currently know, just grab everything
    current = read_sql(
        """
        SELECT * from """
        + table
        + """ WHERE day = %s
    """,
        iemaccess,
        params=(ts.strftime("%Y-%m-%d"),),
        index_col="iemid",
    )
    df = read_sql(
        """
    select station, network, iemid, drct, sknt, gust,
    valid at time zone tzname as localvalid, valid,
    tmpf, dwpf, relh, feel,
    peak_wind_gust, peak_wind_drct, peak_wind_time,
    peak_wind_time at time zone tzname as local_peak_wind_time from
    alldata d JOIN stations t on (t.id = d.station)
    where (network ~* 'ASOS' or network = 'AWOS')
    and valid between %s and %s and t.tzname is not null
    and date(valid at time zone tzname) = %s
    ORDER by valid ASC
    """,
        asos,
        params=(
            ts - datetime.timedelta(days=2),
            ts + datetime.timedelta(days=2),
            ts.strftime("%Y-%m-%d"),
        ),
        index_col=None,
    )
    if df.empty:
        print("compute_daily no ASOS database entries for %s" % (ts,))
        return
    # derive some parameters
    df["u"], df["v"] = mcalc.wind_components(
        df["sknt"].values * munits.knots, df["drct"].values * munits.deg
    )
    df["localvalid_lag"] = df.groupby("iemid")["localvalid"].shift(1)
    df["timedelta"] = df["localvalid"] - df["localvalid_lag"]
    ndf = df[pd.isna(df["timedelta"])]
    df.loc[ndf.index.values, "timedelta"] = pd.to_timedelta(
        ndf["localvalid"].dt.hour * 3600.0
        + ndf["localvalid"].dt.minute * 60.0,
        unit="s",
    )
    df["timedelta"] = df["timedelta"] / np.timedelta64(1, "s")

    for iemid, gdf in df.groupby("iemid"):
        if len(gdf.index) < 6:
            # print(" Quorum not meet for %s" % (gdf.iloc[0]['station'], ))
            continue
        if iemid not in current.index:
            print(
                ("compute_daily Adding %s for %s %s %s")
                % (table, gdf.iloc[0]["station"], gdf.iloc[0]["network"], ts)
            )
            icursor.execute(
                """
                INSERT into """
                + table
                + """
                (iemid, day) values (%s, %s)
            """,
                (iemid, ts),
            )
            current.loc[iemid] = None
        newdata = {}
        currentrow = current.loc[iemid]
        compute_wind_gusts(gdf, currentrow, newdata)
        # take the nearest value
        ldf = gdf.copy().fillna(method="bfill").fillna(method="ffill")
        totsecs = ldf["timedelta"].sum()
        is_new(
            "avg_rh",
            clean((ldf["relh"] * ldf["timedelta"]).sum() / totsecs, 1, 100),
            currentrow,
            newdata,
        )
        is_new("min_rh", clean(ldf["relh"].min(), 1, 100), currentrow, newdata)
        is_new("max_rh", clean(ldf["relh"].max(), 1, 100), currentrow, newdata)

        uavg = (ldf["u"] * ldf["timedelta"]).sum() / totsecs
        vavg = (ldf["v"] * ldf["timedelta"]).sum() / totsecs
        is_new(
            "vector_avg_drct",
            clean(
                mcalc.wind_direction(uavg * munits.knots, vavg * munits.knots),
                0,
                360,
            ),
            currentrow,
            newdata,
        )
        is_new(
            "avg_sknt",
            clean((ldf["sknt"] * ldf["timedelta"]).sum() / totsecs, 0, 150),
            currentrow,
            newdata,
        )
        is_new(
            "max_feel",
            clean(ldf["feel"].max(), -150, 200),
            currentrow,
            newdata,
        )
        is_new(
            "avg_feel",
            clean((ldf["feel"] * ldf["timedelta"]).sum() / totsecs, -150, 200),
            currentrow,
            newdata,
        )
        is_new(
            "min_feel",
            clean(ldf["feel"].min(), -150, 200),
            currentrow,
            newdata,
        )
        if not newdata:
            continue
        cols = []
        args = []
        # print(gdf.iloc[0]['station'])
        for key, val in newdata.items():
            # print("  %s %s -> %s" % (key, currentrow[key], val))
            cols.append("%s = %%s" % (key,))
            args.append(val)
        args.extend([iemid, ts])

        sql = ", ".join(cols)

        icursor.execute(
            """
        UPDATE """
            + table
            + """
        SET """
            + sql
            + """
        WHERE
        iemid = %s and day = %s
        """,
            args,
        )
        if icursor.rowcount == 0:
            print(
                "compute_daily update of %s[%s] was 0"
                % (gdf.iloc[0]["station"], gdf.iloc[0]["network"])
            )

    icursor.close()
    iemaccess.commit()
    iemaccess.close()
예제 #24
0
def main():
    ### START OF USER SETTINGS BLOCK ###
    # FILE/DATA SETTINGS
    # file path to input
    datafile = '/home/jgodwin/python/sfc_observations/surface_observations.txt'
    timefile = '/home/jgodwin/python/sfc_observations/validtime.txt'

    # MAP SETTINGS
    # map names (for tracking purposes)
    maps = ['CONUS','Texas','Floater 1']
    restart_domain = [True,False]
    # map boundaries
    west = [-120,-108,-108]
    east = [-70,-93,-85]
    south = [20,25,37]
    north = [50,38,52]

    # OUTPUT SETTINGS
    # save directory for output
    savedir = '/var/www/html/images/'
    # filenames ("_[variable].png" will be appended, so only a descriptor like "conus" is needed)
    savenames = ['conus','texas','floater1']

    # TEST MODE SETTINGS
    test = False
    testnum = 3

    ### END OF USER SETTINGS BLOCK ###

    for i in range(len(maps)):
        if test and i != testnum:
            continue
        print(maps[i])
        # create the map projection
        cenlon = (west[i] + east[i]) / 2.0
        cenlat = (south[i] + north[i]) / 2.0
        sparallel = cenlat
        if cenlat > 0:
            cutoff = -30
            flip = False
        elif cenlat < 0:
            cutoff = 30
            flip = True
        if restart_domain:
            to_proj = ccrs.LambertConformal(central_longitude=cenlon,central_latitude=cenlat,standard_parallels=[sparallel],cutoff=cutoff)
        # open the data
        vt = open(timefile).read()
        with open(datafile) as f:
            data = pd.read_csv(f,header=0,names=['siteID','lat','lon','elev','slp','temp','sky','dpt','wx','wdr',\
                'wsp'],na_values=-99999)

        # filter data by lat/lon
        data = data[(data['lat'] >= south[i]-2.0) & (data['lat'] <= north[i]+2.0) & (data['lon'] >= west[i]-2.0)\
            & (data['lon'] <= east[i]+2.0)]
        # remove questionable data
        data = data[(cToF(data['temp']) <= 120) & (cToF(data['dpt']) <= 80)]

        # project lat/lon onto final projection
        print("Creating map projection.")
        lon = data['lon'].values
        lat = data['lat'].values
        xp, yp, _ = to_proj.transform_points(ccrs.Geodetic(), lon, lat).T

        # remove missing data from pressure and interpolate
        # we'll give this a try and see if it can help with my CPU credit problem
        if restart_domain:
            print("Performing Cressman interpolation.")
            x_masked, y_masked, pres = remove_nan_observations(xp, yp, data['slp'].values)
            slpgridx, slpgridy, slp = interpolate_to_grid(x_masked, y_masked, pres, interp_type='cressman',
                                                          minimum_neighbors=1, search_radius=400000,
                                                          hres=100000)

            # get wind information and remove missing data
            wind_speed = (data['wsp'].values * units('knots'))
            wind_dir = data['wdr'].values * units.degree
            good_indices = np.where((~np.isnan(wind_dir)) & (~np.isnan(wind_speed)))
            x_masked = xp[good_indices]
            y_masked = yp[good_indices]
            wind_speed = wind_speed[good_indices]
            wind_dir = wind_dir[good_indices]
            u, v = wind_components(wind_speed, wind_dir)
            windgridx, windgridy, uwind = interpolate_to_grid(x_masked, y_masked, np.array(u),
                                                              interp_type='cressman', search_radius=400000,
                                                              hres=100000)
            _, _, vwind = interpolate_to_grid(x_masked, y_masked, np.array(v), interp_type='cressman',
                                              search_radius=400000, hres=100000)

            # get temperature information
            data['temp'] = cToF(data['temp'])
            x_masked, y_masked, t = remove_nan_observations(xp, yp, data['temp'].values)
            tempx, tempy, temp = interpolate_to_grid(x_masked, y_masked, t, interp_type='cressman',
                                                     minimum_neighbors=3, search_radius=200000, hres=18000)
            temp = np.ma.masked_where(np.isnan(temp), temp)

            # get dewpoint information
            data['dpt'] = cToF(data['dpt'])
            x_masked,y_masked,td = remove_nan_observations(xp,yp,data['dpt'].values)
            dptx,dpty,dewp = interpolate_to_grid(x_masked,y_masked,td,interp_type='cressman',\
                minimum_neighbors=3,search_radius=200000,hres=18000)
            dewp = np.ma.masked_where(np.isnan(dewp),dewp)

            # interpolate wind speed
            x_masked,y_masked,wspd = remove_nan_observations(xp,yp,data['wsp'].values)
            wspx,wspy,speed = interpolate_to_grid(x_masked,y_masked,wspd,interp_type='cressman',\
                minimum_neighbors=3,search_radius=200000,hres=18000)
            speed = np.ma.masked_where(np.isnan(speed),speed)

            # derived values
            # station pressure
            data['pres'] = stationPressure(data['slp'],data['elev'])
            # theta-E
            data['thetae'] = equivalent_potential_temperature(data['pres'].values*units.hPa,data['temp'].values*units.degF,data['dpt'].values*units.degF)
            x_masked,y_masked,thetae = remove_nan_observations(xp,yp,data['thetae'].values)
            thex,they,thte = interpolate_to_grid(x_masked,y_masked,thetae,interp_type='cressman',\
                minimum_neighbors=3,search_radius=200000,hres=18000)
            thte = np.ma.masked_where(np.isnan(thte),thte)

            # mixing ratio
            relh = relative_humidity_from_dewpoint(data['temp'].values*units.degF,data['dpt'].values*units.degF)
            mixr = mixing_ratio_from_relative_humidity(relh,data['temp'].values*units.degF,data['pres'].values*units.hPa) * 1000.0
            x_masked,y_masked,mixrat = remove_nan_observations(xp,yp,mixr)
            mrx,mry,mrat = interpolate_to_grid(x_masked,y_masked,mixrat,interp_type='cressman',\
                minimum_neighbors=3,search_radius=200000,hres=18000)
            mrat = np.ma.masked_where(np.isnan(mrat),mrat)

        # set up the state borders
        state_boundaries = cfeature.NaturalEarthFeature(category='cultural',\
            name='admin_1_states_provinces_lines',scale='50m',facecolor='none')

        # SCALAR VARIABLES TO PLOT
        # variable names (will appear in plot title)
        variables = ['Temperature','Dewpoint','Wind Speed','Theta-E','Mixing Ratio']
        # units (for colorbar label)
        unitlabels = ['F','F','kt','K','g/kg']
        # list of actual variables to plot
        vardata = [temp,dewp,speed,thte,mrat]
        # tag in output filename
        varplots = ['temp','dewp','wspd','thte','mrat']
        # levels: (lower,upper,step)
        levs = [[-20,105,5],[30,85,5],[0,70,5],[250,380,5],[0,22,2]]
        # colormaps
        colormaps = ['hsv_r','Greens','plasma','hsv_r','Greens']

        for j in range(len(variables)):
            print("\t%s" % variables[j])
            fig = plt.figure(figsize=(20, 10))
            view = fig.add_subplot(1, 1, 1, projection=to_proj)
            
            # set up the map and plot the interpolated grids
            levels = list(range(levs[j][0],levs[j][1],levs[j][2]))
            cmap = plt.get_cmap(colormaps[j])
            norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)
            labels = variables[j] + " (" + unitlabels[j] + ")"

            # add map features
            view.set_extent([west[i],east[i],south[i],north[i]])
            view.add_feature(state_boundaries,edgecolor='black')
            view.add_feature(cfeature.OCEAN,zorder=-1)
            view.add_feature(cfeature.COASTLINE,zorder=2)
            view.add_feature(cfeature.BORDERS, linewidth=2,edgecolor='black')

            # plot the sea-level pressure
            cs = view.contour(slpgridx, slpgridy, slp, colors='k', levels=list(range(990, 1034, 4)))
            view.clabel(cs, inline=1, fontsize=12, fmt='%i')

            # plot the scalar background
            mmb = view.pcolormesh(tempx, tempy, vardata[j], cmap=cmap, norm=norm)
            fig.colorbar(mmb, shrink=.4, orientation='horizontal', pad=0.02, boundaries=levels, \
                extend='both',label=labels)

            # plot the wind barbs
            view.barbs(windgridx, windgridy, uwind, vwind, alpha=.4, length=5,flip_barb=flip)

            # plot title and save
            view.set_title('%s (shaded), SLP, and Wind (valid %s)' % (variables[j],vt))
            plt.savefig('/var/www/html/images/%s_%s.png' % (savenames[i],varplots[j]),bbox_inches='tight')

            # close everything
            fig.clear()
            view.clear()
            plt.close(fig)
            f.close()

    print("Script finished.")
예제 #25
0
        f.write(item)
###############################################################################
############# Read WRF data, only the variable of interest  ###########
ncfiles = [Dataset(x) for x in wrf_file]  # leer todos los wrfout
# print ncfiles

print("vientos")
wind = getvar(ncfiles, 'uvmet10_wspd_wdir', timeidx=ALL_TIMES, method='cat')
ws = wind.sel(wspd_wdir='wspd')
wd = wind.sel(wspd_wdir='wdir')
###############################################################################
##################### cambiar de m/s para knots ###############################
ws10 = (ws.values * units('m/s')).to('knots')
wd10 = wd.values * units.degree
##################### calcular los componentes U y V ##########################
u10, v10 = wind_components(ws10, wd10)

############################### AOD #######################################
tau1 = getvar(ncfiles, "TAUAER1", timeidx=ALL_TIMES,
              method='cat')  # AOD in 300nm
tau2 = getvar(ncfiles, "TAUAER2", timeidx=ALL_TIMES,
              method='cat')  # AOD in 400nm
tau3 = getvar(ncfiles, "TAUAER3", timeidx=ALL_TIMES,
              method='cat')  # AOD in 600nm
tau4 = getvar(ncfiles, "TAUAER4", timeidx=ALL_TIMES,
              method='cat')  # AOD in 1000nm

angstrom = np.zeros(
    (tau1.shape[0], tau1.shape[1], tau1.shape[2], tau1.shape[3]))
aod_550 = np.zeros(
    (tau1.shape[0], tau1.shape[1], tau1.shape[2], tau1.shape[3]))
예제 #26
0
def plot_map_temperature(proj,
                         point_locs,
                         df_t,
                         area='EU',
                         west=-5.5,
                         east=32,
                         south=42,
                         north=62,
                         fonts=14,
                         cm='gist_ncar',
                         path=None,
                         SLP=False):
    if path is None:
        # set up the paths and test for existence
        path = expanduser('~') + '/Documents/Metar_plots'
        try:
            os.listdir(path)
        except FileNotFoundError:
            os.mkdir(path)
    else:
        path = path
    df = df_t
    plt.rcParams['savefig.dpi'] = 300
    # =========================================================================
    # Create the figure and an axes set to the projection.
    fig = plt.figure(figsize=(20, 16))
    ax = fig.add_subplot(1, 1, 1, projection=proj)
    if area == 'Antarctica':
        df = df.loc[df['latitude'] < north]
        ax.set_extent([-180, 180, -90, -60], ccrs.PlateCarree())
        theta = np.linspace(0, 2 * np.pi, 100)
        center, radius = [0.5, 0.5], 0.5
        verts = np.vstack([np.sin(theta), np.cos(theta)]).T
        circle = mpath.Path(verts * radius + center)
        ax.set_boundary(circle, transform=ax.transAxes)
    elif area == 'Arctic':
        df = df.loc[df['latitude'] > south]
        ax.set_extent([-180, 180, 60, 90], ccrs.PlateCarree())
        theta = np.linspace(0, 2 * np.pi, 100)
        center, radius = [0.5, 0.5], 0.5
        verts = np.vstack([np.sin(theta), np.cos(theta)]).T
        circle = mpath.Path(verts * radius + center)
        ax.set_boundary(circle, transform=ax.transAxes)

    else:
        ax.set_extent((west, east, south, north))
    # Set up a cartopy feature for state borders.
    state_boundaries = feat.NaturalEarthFeature(category='cultural',
                                                name='admin_0_countries',
                                                scale='10m',
                                                facecolor='#d8dcd6',
                                                alpha=0.5)
    ax.coastlines(resolution='10m', zorder=1, color='black')
    ax.add_feature(state_boundaries, zorder=1, edgecolor='black')
    # ax.add_feature(cartopy.feature.OCEAN, zorder=0)
    # Set plot bounds
    # reset index for easier loop
    df = df.dropna(how='any', subset=['TT'])
    df = df.reset_index()
    cmap = matplotlib.cm.get_cmap(cm)
    norm = matplotlib.colors.Normalize(vmin=-30.0, vmax=30.0)
    # Start the station plot by specifying the axes to draw on, as well as the
    # lon/lat of the stations (with transform). We also the fontsize to 12 pt.
    index = 0
    a = np.arange(-30, 30, 1)
    for x in a:
        if index == 0:
            df_min = df.loc[df['TT'] < min(a)]
            df_max = df.loc[df['TT'] > max(a)]
            j = 0
            list_ex = [min(a) - 5, max(a) + 5]
            for arr in [df_min, df_max]:
                stationplot = StationPlot(ax,
                                          arr['longitude'],
                                          arr['latitude'],
                                          clip_on=True,
                                          transform=ccrs.PlateCarree(),
                                          fontsize=fonts)
                Temp = stationplot.plot_parameter('NW',
                                                  arr['TT'],
                                                  color=cmap(norm(list_ex[j])))
                try:
                    Temp.set_path_effects([
                        path_effects.Stroke(linewidth=1.5, foreground='black'),
                        path_effects.Normal()
                    ])
                except AttributeError:
                    pass
                j += 1
        # slice out values between x and x+1
        df_cur = df.loc[(df['TT'] < x + 1) & (df['TT'] >= x)]
        stationplot = StationPlot(ax,
                                  df_cur['longitude'],
                                  df_cur['latitude'],
                                  clip_on=True,
                                  transform=ccrs.PlateCarree(),
                                  fontsize=fonts)
        # plot the sliced values with a different color for each loop
        Temp = stationplot.plot_parameter('NW',
                                          df_cur['TT'],
                                          color=cmap(norm(x + 0.5)))
        try:
            Temp.set_path_effects([
                path_effects.Stroke(linewidth=1.5, foreground='black'),
                path_effects.Normal()
            ])
        except AttributeError:
            pass
        print('x={} done correctly '.format(x))
        index += 1
    # fontweight = 'bold'
    # More complex ex. uses custom formatter to control how sea-level pressure
    # values are plotted. This uses the standard trailing 3-digits of


# the pressure value in tenths of millibars.
    stationplot = StationPlot(ax,
                              df['longitude'].values,
                              df['latitude'].values,
                              clip_on=True,
                              transform=ccrs.PlateCarree(),
                              fontsize=fonts)
    try:
        u, v = wind_components(((df['ff'].values) * units('knots')),
                               (df['dd'].values * units.degree))
        cloud_frac = df['cloud_cover']
        if area != 'Arctic':
            stationplot.plot_barb(u, v, zorder=1000, linewidth=2)
            stationplot.plot_symbol('C', cloud_frac, sky_cover)
            # stationplot.plot_text((2, 0), df['Station'])

        for val in range(0, 2):
            wx = df[['ww', 'StationType']]
            if val == 0:
                # mask all the unmanned stations
                wx['ww'].loc[wx['StationType'] > 3] = np.nan
                wx2 = wx['ww'].fillna(00).astype(int).values.tolist()
                stationplot.plot_symbol('W', wx2, current_weather, zorder=2000)
            else:
                # mask all the manned stations
                wx['ww'].loc[(wx['StationType'] <= 3)] = np.nan
                # mask all reports smaller than 9
                # =7 is an empty symbol!
                wx['ww'].loc[wx['ww'] <= 9] = 7
                wx2 = wx['ww'].fillna(7).astype(int).values.tolist()
                stationplot.plot_symbol('W',
                                        wx2,
                                        current_weather_auto,
                                        zorder=2000)
        # print(u, v)
    except (ValueError, TypeError) as error:
        pass

    if SLP is True:
        lon = df['longitude'].loc[(df.PressureDefId == 'mean sea level')
                                  & (df.Hp <= 750)].values
        lat = df['latitude'].loc[(df.PressureDefId == 'mean sea level')
                                 & (df.Hp <= 750)].values
        xp, yp, _ = proj.transform_points(ccrs.PlateCarree(), lon, lat).T
        sea_levelp = df['SLP'].loc[(df.PressureDefId == 'mean sea level')
                                   & (df.Hp <= 750)]
        x_masked, y_masked, pres = remove_nan_observations(
            xp, yp, sea_levelp.values)
        slpgridx, slpgridy, slp = interpolate_to_grid(x_masked,
                                                      y_masked,
                                                      pres,
                                                      interp_type='cressman',
                                                      search_radius=400000,
                                                      rbf_func='quintic',
                                                      minimum_neighbors=1,
                                                      hres=100000,
                                                      rbf_smooth=100000)
        Splot_main = ax.contour(slpgridx,
                                slpgridy,
                                slp,
                                colors='k',
                                linewidths=2,
                                extent=(west, east, south, north),
                                levels=list(range(950, 1050, 10)))
        plt.clabel(Splot_main, inline=1, fontsize=12, fmt='%i')

        Splot = ax.contour(slpgridx,
                           slpgridy,
                           slp,
                           colors='k',
                           linewidths=1,
                           linestyles='--',
                           extent=(west, east, south, north),
                           levels=[
                               x for x in range(950, 1050, 1)
                               if x not in list(range(950, 1050, 10))
                           ])
        plt.clabel(Splot, inline=1, fontsize=10, fmt='%i')

    # stationplot.plot_text((2, 0), df['Station'])
    # Also plot the actual text of the station id. Instead of cardinal
    # directions, plot further out by specifying a location of 2 increments
    # in x and 0 in y.stationplot.plot_text((2, 0), df['station'])

    if (area == 'Antarctica' or area == 'Arctic'):
        plt.savefig(path + '/CURR_SYNOP_color_' + area + '.png',
                    bbox_inches='tight',
                    pad_inches=0)
    else:
        plt.savefig(path + '/CURR_SYNOP_color_' + area + '.png',
                    bbox_inches='tight',
                    transparent="True",
                    pad_inches=0)
예제 #27
0
from metpy.plots import Hodograph, SkewT
from metpy.units import units

#########################################################################
# Getting Data
# ------------
#
# Upper air data can be obtained using the siphon package, but for this tutorial we will use
# some of MetPy's sample data. This event is the Veterans Day tornado outbreak in 2002.

col_names = ['pressure', 'height', 'temperature', 'dewpoint', 'direction', 'speed']

df = pd.read_fwf(get_test_data('nov11_sounding.txt', as_file_obj=False),
                 skiprows=5, usecols=[0, 1, 2, 3, 6, 7], names=col_names)

df['u_wind'], df['v_wind'] = mpcalc.wind_components(df['speed'],
                                                    np.deg2rad(df['direction']))

# Drop any rows with all NaN values for T, Td, winds
df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed',
                       'u_wind', 'v_wind'), how='all').reset_index(drop=True)

##########################################################################

# We will pull the data out of the example dataset into individual variables and
# assign units.

p = df['pressure'].values * units.hPa
T = df['temperature'].values * units.degC
Td = df['dewpoint'].values * units.degC
wind_speed = df['speed'].values * units.knots
wind_dir = df['direction'].values * units.degrees
예제 #28
0
파일: test_basic.py 프로젝트: akrherz/MetPy
def test_warning_dir():
    """Test that warning is raised wind direction > 2Pi."""
    with pytest.warns(UserWarning):
        wind_components(3. * units('m/s'), 270)
예제 #29
0
def main():
    ### START OF USER SETTINGS BLOCK ###

    # FILE/DATA SETTINGS
    # file path to input file
    datafile = '/home/jgodwin/python/sfc_observations/surface_observations.txt'
    timefile = '/home/jgodwin/python/sfc_observations/validtime.txt'
    # file path to county shapefile
    ctyshppath = '/home/jgodwin/python/sfc_observations/shapefiles/counties/countyl010g.shp'
    # file path to ICAO list
    icaopath = '/home/jgodwin/python/sfc_observations/icao_list.csv'
    icaodf = pd.read_csv(icaopath, index_col='STATION')

    # MAP SETTINGS
    # map names (doesn't go anywhere (yet), just for tracking purposes)
    maps = ['CONUS', 'Texas', 'Tropical Atlantic']
    # minimum radius allowed between points (in km)
    radius = [100.0, 50.0, 75.0]
    # map boundaries (longitude/latitude degrees)
    west = [-122, -108, -100]
    east = [-73, -93, -60]
    south = [23, 25, 10]
    north = [50, 38, 35]
    restart_projection = [True, False, True]
    # use county map? (True/False): warning, counties load slow!
    usecounties = [False, False, False]

    # OUTPUT SETTINGS
    # save directory for output
    savedir = '/var/www/html/images/'
    # filenames for output
    savenames = ['conus.png', 'texas.png', 'atlantic.png']

    # TEST MODE SETTINGS
    test = False  # True/False
    testnum = 5  # which map are you testing? corresponds to index in "maps" above

    ### END OF USER SETTING SECTION ###

    # if there are any missing weather codes, add them here
    wx_code_map.update({
        '-RADZ': 59,
        '-TS': 17,
        'VCTSSH': 80,
        '-SGSN': 77,
        'SHUP': 76,
        'FZBR': 48,
        'FZUP': 76
    })

    ### READ IN DATA / SETUP MAP ###
    # read in the valid time file
    vt = open(timefile).read()
    # read in the data
    for i in range(len(maps)):
        if test and i != testnum:
            continue
        with open(datafile) as f:
            data = pd.read_csv(f,header=0,names=['siteID','lat','lon','elev','slp','temp','sky','dpt','wx','wdr',\
                'wsp'],na_values=-99999)
            # drop rows with missing winds
            data = data.dropna(how='any', subset=['wdr', 'wsp'])

        # remove data not within our domain

        data = data[(data['lat'] >= south[i]-2.0) & (data['lat'] <= north[i]+2.0) \
            & (data['lon'] >= west[i]-2.0) & (data['lon'] <= east[i]+2.0)]

        # filter data (there seems to be one site always reporting a really anomalous temperature
        data = data[data['temp'] <= 50]

        print("Working on %s" % maps[i])
        # set up the map projection central longitude/latitude and the standard parallels
        cenlon = (west[i] + east[i]) / 2.0
        cenlat = (south[i] + north[i]) / 2.0
        sparallel = cenlat
        if cenlat > 0:
            cutoff = -30
            flip = False
        elif cenlat < 0:
            cutoff = 30
            flip = True
        # create the projection
        if restart_projection:
            proj = ccrs.LambertConformal(central_longitude=cenlon,
                                         central_latitude=cenlat,
                                         standard_parallels=[sparallel],
                                         cutoff=cutoff)
            point_locs = proj.transform_points(ccrs.PlateCarree(),
                                               data['lon'].values,
                                               data['lat'].values)
        data = data[reduce_point_density(point_locs, radius[i] * 1000)]
        # state borders
        state_boundaries = cfeature.NaturalEarthFeature(category='cultural',\
            name='admin_1_states_provinces_lines',scale='50m',facecolor='none')
        # county boundaries
        if usecounties[i]:
            county_reader = shpreader.Reader(ctyshppath)
            counties = list(county_reader.geometries())
            COUNTIES = cfeature.ShapelyFeature(counties, ccrs.PlateCarree())
        ### DO SOME CONVERSIONS ###
        # get the wind components
        u, v = wind_components(data['wsp'].values * units('knots'),
                               data['wdr'].values * units.degree)
        # convert temperature from Celsius to Fahrenheit
        data['temp'] = cToF(data['temp'])
        data['dpt'] = cToF(data['dpt'])
        # convert the cloud fraction value into a code of 0-8 (oktas) and compenate for NaN values
        cloud_frac = (8 * data['sky'])
        cloud_frac[np.isnan(cloud_frac)] = 10
        cloud_frac = cloud_frac.astype(int)
        # map weather strings to WMO codes (only use first symbol if multiple are present
        data['wx'] = data.wx.str.split('/').str[0] + ''
        wx = [
            wx_code_map[s.split()[0] if ' ' in s else s]
            for s in data['wx'].fillna('')
        ]

        # get the minimum and maximum temperatures in domain
        searchdata = data[(data['lat'] >= south[i]) & (data['lat'] <= north[i]) \
            & (data['lon'] >= west[i]) & (data['lon'] <= east[i])]
        min_temp = searchdata.loc[searchdata['temp'].idxmin()]
        max_temp = searchdata.loc[searchdata['temp'].idxmax()]
        max_dewp = searchdata.loc[searchdata['dpt'].idxmax()]

        # look up the site names for the min/max temp locations
        min_temp_loc = icaoLookup(min_temp['siteID'], icaodf)
        max_temp_loc = icaoLookup(max_temp['siteID'], icaodf)
        max_dewp_loc = icaoLookup(max_dewp['siteID'], icaodf)
        text_str = "Min temp: %.0f F at %s (%s)\nMax temp: %.0f F at %s (%s)\nMax dewpoint: %.0f F at %s (%s)"\
             % (min_temp['temp'],min_temp['siteID'],min_temp_loc,\
                max_temp['temp'],max_temp['siteID'],max_temp_loc,\
                max_dewp['dpt'],max_dewp['siteID'],max_dewp_loc)

        ### PLOTTING SECTION ###
        # change the DPI to increase the resolution
        plt.rcParams['savefig.dpi'] = 255
        # create the figure and an axes set to the projection
        fig = plt.figure(figsize=(20, 10))
        ax = fig.add_subplot(1, 1, 1, projection=proj)
        # add various map elements
        ax.add_feature(cfeature.LAND, zorder=-1)
        ax.add_feature(cfeature.OCEAN, zorder=-1)
        ax.add_feature(cfeature.LAKES, zorder=-1)
        ax.add_feature(cfeature.COASTLINE, zorder=2, edgecolor='black')
        ax.add_feature(state_boundaries, edgecolor='black')
        if usecounties[i]:
            ax.add_feature(COUNTIES,
                           facecolor='none',
                           edgecolor='gray',
                           zorder=-1)
        ax.add_feature(cfeature.BORDERS, linewidth=2, edgecolor='black')
        # set plot bounds
        ax.set_extent((west[i], east[i], south[i], north[i]))

        ### CREATE STATION PLOTS ###
        # lat/lon of the station plots
        stationplot = StationPlot(ax,data['lon'].values,data['lat'].values,clip_on=True,\
            transform=ccrs.PlateCarree(),fontsize=6)
        # plot the temperature and dewpoint
        stationplot.plot_parameter('NW', data['temp'], color='red')
        stationplot.plot_parameter('SW', data['dpt'], color='darkgreen')
        # plot the SLP using the standard trailing three digits
        stationplot.plot_parameter(
            'NE', data['slp'], formatter=lambda v: format(10 * v, '.0f')[-3:])
        # plot the sky condition
        stationplot.plot_symbol('C', cloud_frac, sky_cover)
        # plot the present weather
        stationplot.plot_symbol('W', wx, current_weather)
        # plot the wind barbs
        stationplot.plot_barb(u, v, flip_barb=flip)
        # plot the text of the station ID
        stationplot.plot_text((2, 0), data['siteID'])
        # plot the valid time
        plt.title('Surface Observations valid %s' % vt)
        # plot the min/max temperature info and draw circle around warmest and coldest obs
        props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
        plt.text(west[i],
                 south[i],
                 text_str,
                 fontsize=12,
                 verticalalignment='top',
                 bbox=props,
                 transform=ccrs.Geodetic())
        projx1, projy1 = proj.transform_point(min_temp['lon'], min_temp['lat'],
                                              ccrs.Geodetic())
        ax.add_patch(
            matplotlib.patches.Circle(xy=[projx1, projy1],
                                      radius=50000,
                                      facecolor="None",
                                      edgecolor='blue',
                                      linewidth=3,
                                      transform=proj))
        projx2, projy2 = proj.transform_point(max_temp['lon'], max_temp['lat'],
                                              ccrs.Geodetic())
        ax.add_patch(
            matplotlib.patches.Circle(xy=[projx2, projy2],
                                      radius=50000,
                                      facecolor="None",
                                      edgecolor='red',
                                      linewidth=3,
                                      transform=proj))
        projx3, projy3 = proj.transform_point(max_dewp['lon'], max_dewp['lat'],
                                              ccrs.Geodetic())
        ax.add_patch(
            matplotlib.patches.Circle(xy=[projx3, projy3],
                                      radius=30000,
                                      facecolor="None",
                                      edgecolor='green',
                                      linewidth=3,
                                      transform=proj))
        # save the figure
        outfile_name = savedir + savenames[i]
        plt.savefig(outfile_name, bbox_inches='tight')

        # clear and close everything
        fig.clear()
        ax.clear()
        plt.close(fig)
        f.close()

    print("Script finished.")
예제 #30
0
def preprocessDataResample(file, path, spatialResolution, lambda1, lambda2,
                           order):
    #delete gloabal data variable as soon as troubleshooting is complete
    """ prepare data for hodograph analysis. non numeric values & values > 999999 removed, brunt-viasala freq
        calculated, background wind removed

        Different background removal techniques used: rolling average, savitsky-golay filter, nth order polynomial fits
    """

    data = openFile(file, path)
    data = interpolateVertically(data)

    #change data container name, sounds silly but useful for troubleshooting data-cleaning bugs
    global df
    df = data
    #print(df)
    #make following vars availabale outside of function - convenient for time being, but consider changing in future
    """
    global Time 
    global Pres 
    global Temp 
    global Hu 
    global Wd 
    global Long 
    global Lat 
    global Alt 
    global potentialTemp
    global bv2
    global u, v 
    global uBackground 
    global vBackground
    global tempBackground

    #for comparing rolling ave to savitsky golay
    #global uBackgroundRolling
    #global vBackgroundRolling
    #global tempBackgroundRolling
    #global uRolling
    #global vRolling
    #global tRolling
    
    """
    #individual series for each variable, local
    Time = df['Time'].to_numpy()
    Pres = df['P'].to_numpy() * units.hPa
    Temp = df['T'].to_numpy() * units.degC
    Ws = df['Ws'].to_numpy() * units.m / units.second
    Wd = df['Wd'].to_numpy() * units.degree
    Long = df['Long.'].to_numpy()
    Lat = df['Lat.'].to_numpy()
    Alt = df['Alt'].to_numpy().astype(int) * units.meter

    #calculate brunt-viasala frequency **2
    tempK = Temp.to('kelvin')
    potentialTemperature = tempK * (p_0 / Pres)**(
        2 / 7)  #https://glossary.ametsoc.org/wiki/Potential_temperature
    bv2 = mpcalc.brunt_vaisala_frequency_squared(
        Alt, potentialTemperature).magnitude  #N^2
    #bv2 = bruntViasalaFreqSquared(potentialTemperature, heightSamplingFreq)     #Maybe consider using metpy version of N^2 ? Height sampling is not used in hodo method, why allow it to affect bv ?

    #convert wind from polar to cartesian c.s.
    u, v = mpcalc.wind_components(
        Ws, Wd)  #raw u,v components - no different than using trig fuctions
    print("Size of u: ", len(u))
    #subtract nth order polynomials to find purturbation profile

    return
예제 #31
0
# - Convert cardinal wind direction to degrees
# - Get wind components from speed and direction

# Read in the data and assign units as defined by the Mesonet
temperature = data['TAIR'].values * units.degF
dewpoint = data['TDEW'].values * units.degF
pressure = data['PRES'].values * units.hPa
wind_speed = data['WSPD'].values * units.mph
wind_direction = data['WDIR']
latitude = data['LAT']
longitude = data['LON']
station_id = data['STID']

# Take cardinal direction and convert to degrees, then convert to components
wind_direction = mpcalc.parse_angle(list(wind_direction))
u, v = mpcalc.wind_components(wind_speed.to('knots'), wind_direction)

###########################################
# Create the figure
# -----------------

# Create the figure and an axes set to the projection.
fig = plt.figure(figsize=(20, 8))
add_metpy_logo(fig, 70, 30, size='large')
ax = fig.add_subplot(1, 1, 1, projection=proj)

# Add some various map elements to the plot to make it recognizable.
ax.add_feature(cfeature.LAND)
ax.add_feature(cfeature.STATES.with_scale('50m'))

# Set plot bounds
예제 #32
0
# Drop any rows with all NaN values for T, Td, winds
# df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed',
#                       'u_wind', 'v_wind'), how='all').reset_index(drop=True)

##########################################################################

# We will pull the data out of the example dataset into individual variables and
# assign units.

p = df['pressure'].values * units.hPa
T = df['temperature'].values * units.degC
Td = df['dewpoint'].values * units.degC
wind_speed = df['speed'].values * units.knots
wind_dir = df['direction'].values * units.degrees
u, v = mpcalc.wind_components(wind_speed, wind_dir)
u_wind = df['u_wind'].values * units(df.units['u_wind'])
v_wind = df['v_wind'].values * units(df.units['v_wind'])

##########################################################################
# Thermodynamic Calculations
# --------------------------
#
# Often times we will want to calculate some thermodynamic parameters of a
# sounding. The MetPy calc module has many such calculations already implemented!
#
# * **Lifting Condensation Level (LCL)** - The level at which an air parcel's
#   relative humidity becomes 100% when lifted along a dry adiabatic path.
# * **Parcel Path** - Path followed by a hypothetical parcel of air, beginning
#   at the surface temperature/pressure and rising dry adiabatically until
#   reaching the LCL, then rising moist adiabatically.
예제 #33
0
def wind_rh_according_to_4D_data(
        initTime=None,
        fhour=6,
        day_back=0,
        model='ECMWF',
        sta_fcs={
            'lon': [101.82, 101.32, 101.84, 102.23, 102.2681],
            'lat': [28.35, 27.91, 28.32, 27.82, 27.8492],
            'altitude': [3600, 3034.62, 3240, 1669, 1941.5],
            'name': ['健美乡', '项脚乡', '\n锦屏镇', '\n马道镇', 'S9005  ']
        },
        draw_zd=True,
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500],
        map_ratio=19 / 9,
        zoom_ratio=1,
        south_China_sea=False,
        area='全国',
        city=False,
        output_dir=None,
        bkgd_type='satellite',
        data_source='MICAPS'):

    # micaps data directory
    if (area != '全国'):
        south_China_sea = False

    # prepare data
    if (area != '全国'):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    cntr_pnt = np.append(np.mean(sta_fcs['lon']), np.mean(sta_fcs['lat']))
    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    bkgd_level = utl.cal_background_zoom_ratio(zoom_ratio)
    # micaps data directory
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='HGT',
                                  lvl=''),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='RH',
                                  lvl=''),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl=''),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl=''),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='u10m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='v10m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='Td2m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='T2m')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, fhour)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour)
            initTime = filename[0:8]

        # retrieve data from micaps server
        gh = MICAPS_IO.get_model_3D_grid(directory=data_dir[0][0:-1],
                                         filename=filename,
                                         levels=levels)
        if (gh is None):
            return
        gh['data'].values = gh['data'].values * 10

        rh = MICAPS_IO.get_model_3D_grid(directory=data_dir[1][0:-1],
                                         filename=filename,
                                         levels=levels,
                                         allExists=False)
        if rh is None:
            return

        u = MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1],
                                        filename=filename,
                                        levels=levels,
                                        allExists=False)
        if u is None:
            return

        v = MICAPS_IO.get_model_3D_grid(directory=data_dir[3][0:-1],
                                        filename=filename,
                                        levels=levels,
                                        allExists=False)
        if v is None:
            return

        u10m = MICAPS_IO.get_model_grid(directory=data_dir[4],
                                        filename=filename)
        if u10m is None:
            return

        v10m = MICAPS_IO.get_model_grid(directory=data_dir[5],
                                        filename=filename)
        if v10m is None:
            return

        td2m = MICAPS_IO.get_model_grid(directory=data_dir[6],
                                        filename=filename)
        if td2m is None:
            return

        t2m = MICAPS_IO.get_model_grid(directory=data_dir[7],
                                       filename=filename)
        if t2m is None:
            return

        if (draw_zd == True):
            validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') +
                         timedelta(hours=fhour)).strftime("%Y%m%d%H")
            directory_obs = utl.Cassandra_dir(data_type='surface',
                                              data_source='OBS',
                                              var_name='PLOT_ALL')
            try:
                zd_sta = MICAPS_IO.get_station_data(filename=validtime +
                                                    '0000.000',
                                                    directory=directory_obs,
                                                    dropna=True,
                                                    cache=False)
                obs_valid = True
            except:
                zd_sta = MICAPS_IO.get_station_data(directory=directory_obs,
                                                    dropna=True,
                                                    cache=False)
                obs_valid = False

            zd_lon = zd_sta['lon'].values
            zd_lat = zd_sta['lat'].values
            zd_alt = zd_sta['Alt'].values
            zd_u, zd_v = mpcalc.wind_components(
                zd_sta['Wind_speed_2m_avg'].values * units('m/s'),
                zd_sta['Wind_angle_2m_avg'].values * units.deg)

            idx_zd = np.where((zd_lon > map_extent[0])
                              & (zd_lon < map_extent[1])
                              & (zd_lat > map_extent[2])
                              & (zd_lat < map_extent[3]))

            zd_sm_lon = zd_lon[idx_zd[0]]
            zd_sm_lat = zd_lat[idx_zd[0]]
            zd_sm_alt = zd_alt[idx_zd[0]]
            zd_sm_u = zd_u[idx_zd[0]]
            zd_sm_v = zd_v[idx_zd[0]]

    if (data_source == 'CIMISS'):
        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, fhour, UTC=True)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour,
                                                   UTC=True)
        try:
            # retrieve data from CMISS server

            gh = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="GPH",
                units='gpm')
            if gh is None:
                return

            rh = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='RHU'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="RHU",
                units='%')
            if rh is None:
                return

            u = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="WIU",
                units='m/s')
            if u is None:
                return

            v = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="WIV",
                units='m/s')
            if v is None:
                return

            if (model == 'ECMWF'):
                td2m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='DPT'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="DPT",
                    units='K')
                if td2m is None:
                    return

                t2m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='TEF2'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="TEF2",
                    units='K')
                if t2m is None:
                    return

                v10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIV10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="WIV10",
                    units='m/s')
                if v10m is None:
                    return

                u10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIU10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="WIU10",
                    units='m/s')
                if u10m is None:
                    return

            if (model == 'GRAPES_GFS'):
                rh2m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='RHF2'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=2,
                    fcst_ele="RHF2",
                    units='%')
                if rh2m is None:
                    return

                v10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIV10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=10,
                    fcst_ele="WIV10",
                    units='m/s')
                if v10m is None:
                    return

                u10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIU10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=10,
                    fcst_ele="WIU10",
                    units='m/s')
                if u10m is None:
                    return
        except KeyError:
            raise ValueError('Can not find all data needed')

        if (draw_zd == True):
            if (initTime == None):
                initTime1 = CIMISS_IO.cimiss_get_obs_latest_time(
                    data_code="SURF_CHN_MUL_HOR")
                initTime = (datetime.strptime('20' + initTime1, '%Y%m%d%H') -
                            timedelta(days=day_back)).strftime("%Y%m%d%H")[2:]

            validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') +
                         timedelta(hours=fhour)).strftime("%Y%m%d%H")
            data_code = utl.CMISS_data_code(data_source='OBS',
                                            var_name='PLOT_sfc')
            zd_sta = CIMISS_IO.cimiss_obs_by_time(
                times=validtime + '0000',
                data_code=data_code,
                sta_levels="011,012,013,014",
                elements=
                "Station_Id_C,Station_Id_d,lat,lon,Alti,TEM,WIN_D_Avg_2mi,WIN_S_Avg_2mi,RHU"
            )
            obs_valid = True
            if (zd_sta is None):
                CIMISS_IO.cimiss_get_obs_latest_time(data_code=data_code,
                                                     latestTime=6)
                zd_sta = CIMISS_IO.cimiss_obs_by_time(directory=directory_obs,
                                                      dropna=True,
                                                      cache=False)
                obs_valid = False

            zd_lon = zd_sta['lon'].values
            zd_lat = zd_sta['lat'].values
            zd_alt = zd_sta['Alti'].values
            zd_u, zd_v = mpcalc.wind_components(
                zd_sta['WIN_S_Avg_2mi'].values * units('m/s'),
                zd_sta['WIN_D_Avg_2mi'].values * units.deg)

            idx_zd = np.where((zd_lon > map_extent[0])
                              & (zd_lon < map_extent[1])
                              & (zd_lat > map_extent[2])
                              & (zd_lat < map_extent[3])
                              & (zd_sta['WIN_S_Avg_2mi'].values < 1000))

            zd_sm_lon = zd_lon[idx_zd[0]]
            zd_sm_lat = zd_lat[idx_zd[0]]
            zd_sm_alt = zd_alt[idx_zd[0]]
            zd_sm_u = zd_u[idx_zd[0]]
            zd_sm_v = zd_v[idx_zd[0]]

#maskout area
    delt_xy = rh['lon'].values[1] - rh['lon'].values[0]
    #+ to solve the problem of labels on all the contours
    mask1 = (rh['lon'] > map_extent[0] - delt_xy) & (
        rh['lon'] < map_extent[1] + delt_xy) & (
            rh['lat'] > map_extent[2] - delt_xy) & (rh['lat'] <
                                                    map_extent[3] + delt_xy)
    mask2 = (u10m['lon'] > map_extent[0] - delt_xy) & (
        u10m['lon'] < map_extent[1] + delt_xy) & (
            u10m['lat'] > map_extent[2] - delt_xy) & (u10m['lat'] <
                                                      map_extent[3] + delt_xy)
    #- to solve the problem of labels on all the contours
    rh = rh.where(mask1, drop=True)
    u = u.where(mask1, drop=True)
    v = v.where(mask1, drop=True)
    gh = gh.where(mask1, drop=True)
    u10m = u10m.where(mask2, drop=True)
    v10m = v10m.where(mask2, drop=True)
    #prepare interpolator
    Ex1 = np.squeeze(u['data'].values).flatten()
    Ey1 = np.squeeze(v['data'].values).flatten()
    Ez1 = np.squeeze(rh['data'].values).flatten()
    z = (np.squeeze(gh['data'].values)).flatten()

    coords = np.zeros((np.size(levels), u['lat'].size, u['lon'].size, 3))
    coords[..., 1] = u['lat'].values.reshape((1, u['lat'].size, 1))
    coords[..., 2] = u['lon'].values.reshape((1, 1, u['lon'].size))
    coords = coords.reshape((Ex1.size, 3))
    coords[:, 0] = z

    interpolator_U = LinearNDInterpolator(coords, Ex1, rescale=True)
    interpolator_V = LinearNDInterpolator(coords, Ey1, rescale=True)
    interpolator_RH = LinearNDInterpolator(coords, Ez1, rescale=True)

    #process sta_fcs 10m wind
    coords2 = np.zeros((np.size(sta_fcs['lon']), 3))
    coords2[:, 0] = sta_fcs['altitude']
    coords2[:, 1] = sta_fcs['lat']
    coords2[:, 2] = sta_fcs['lon']
    u_sta = interpolator_U(coords2)
    v_sta = interpolator_V(coords2)
    RH_sta = interpolator_RH(coords2)
    wsp_sta = (u_sta**2 + v_sta**2)**0.5
    u10m_2D = u10m.interp(lon=('points', sta_fcs['lon']),
                          lat=('points', sta_fcs['lat']))
    v10m_2D = v10m.interp(lon=('points', sta_fcs['lon']),
                          lat=('points', sta_fcs['lat']))
    if (model == 'GRAPES_GFS' and data_source == 'CIMISS'):
        rh2m_2D = rh2m.interp(lon=('points', sta_fcs['lon']),
                              lat=('points', sta_fcs['lat']))['data'].values
    else:
        td2m_2D = td2m.interp(lon=('points', sta_fcs['lon']),
                              lat=('points', sta_fcs['lat']))
        t2m_2D = t2m.interp(lon=('points', sta_fcs['lon']),
                            lat=('points', sta_fcs['lat']))
        if (data_source == 'MICAPS'):
            rh2m_2D = mpcalc.relative_humidity_from_dewpoint(
                t2m_2D['data'].values * units('degC'),
                td2m_2D['data'].values * units('degC')) * 100
        else:
            rh2m_2D = mpcalc.relative_humidity_from_dewpoint(
                t2m_2D['data'].values * units('kelvin'),
                td2m_2D['data'].values * units('kelvin')) * 100

    wsp10m_2D = (u10m_2D['data'].values**2 + v10m_2D['data'].values**2)**0.5
    winddir10m = mpcalc.wind_direction(u10m_2D['data'].values * units('m/s'),
                                       v10m_2D['data'].values * units('m/s'))
    if (np.isnan(wsp_sta).any()):
        if (wsp_sta.size == 1):
            wsp_sta[np.isnan(wsp_sta)] = np.squeeze(
                wsp10m_2D[np.isnan(wsp_sta)])
            RH_sta[np.isnan(RH_sta)] = np.squeeze(
                np.array(rh2m_2D)[np.isnan(RH_sta)])
        else:
            wsp_sta[np.isnan(wsp_sta)] = np.squeeze(wsp10m_2D)[np.isnan(
                wsp_sta)]
            RH_sta[np.isnan(RH_sta)] = np.squeeze(
                np.array(rh2m_2D))[np.isnan(RH_sta)]
    u_sta, v_sta = mpcalc.wind_components(wsp_sta * units('m/s'), winddir10m)

    #process zd_sta 10m wind
    zd_fcst_obs = None
    if (draw_zd is True):
        coords3 = np.zeros((np.size(zd_sm_alt), 3))
        coords3[:, 0] = zd_sm_alt
        coords3[:, 1] = zd_sm_lat
        coords3[:, 2] = zd_sm_lon
        u_sm_sta = interpolator_U(coords3)
        v_sm_sta = interpolator_V(coords3)
        wsp_sm_sta = (u_sm_sta**2 + v_sm_sta**2)**0.5
        u10m_sm = u10m.interp(lon=('points', zd_sm_lon),
                              lat=('points', zd_sm_lat))
        v10m_sm = v10m.interp(lon=('points', zd_sm_lon),
                              lat=('points', zd_sm_lat))
        wsp10m_sta = np.squeeze(
            (u10m_sm['data'].values**2 + v10m_sm['data'].values**2)**0.5)
        winddir10m_sm = mpcalc.wind_direction(
            u10m_sm['data'].values * units('m/s'),
            v10m_sm['data'].values * units('m/s'))
        if (np.isnan(wsp_sm_sta).any()):
            wsp_sm_sta[np.isnan(wsp_sm_sta)] = wsp10m_sta[np.isnan(wsp_sm_sta)]
        u_sm_sta, v_sm_sta = mpcalc.wind_components(wsp_sm_sta * units('m/s'),
                                                    winddir10m_sm)

        zd_fcst_obs = {
            'lon': zd_sm_lon,
            'lat': zd_sm_lat,
            'altitude': zd_sm_alt,
            'U': np.squeeze(np.array(u_sm_sta)),
            'V': np.squeeze(np.array(v_sm_sta)),
            'obs_valid': obs_valid,
            'U_obs': np.squeeze(np.array(zd_sm_u)),
            'V_obs': np.squeeze(np.array(zd_sm_v))
        }
#prepare for graphics
    sta_fcs_fcst = {
        'lon': sta_fcs['lon'],
        'lat': sta_fcs['lat'],
        'altitude': sta_fcs['altitude'],
        'name': sta_fcs['name'],
        'RH': np.array(RH_sta),
        'U': np.squeeze(np.array(u_sta)),
        'V': np.squeeze(np.array(v_sta))
    }

    fcst_info = gh.coords

    local_scale_graphics.draw_wind_rh_according_to_4D_data(
        sta_fcs_fcst=sta_fcs_fcst,
        zd_fcst_obs=zd_fcst_obs,
        fcst_info=fcst_info,
        map_extent=map_extent,
        draw_zd=draw_zd,
        bkgd_type=bkgd_type,
        bkgd_level=bkgd_level,
        output_dir=None)
예제 #34
0
def test_warning_dir():
    """Test that warning is raised wind direction > 2Pi."""
    with pytest.warns(UserWarning):
        wind_components(3. * units('m/s'), 270)
예제 #35
0
    'dew_point_temperature'].values * units.degC
data['air_pressure_at_sea_level'] = data_arr['slp'].values * units('mbar')

###########################################
# Notice that the names (the keys) in the dictionary are the same as those that the
# layout is expecting.
#
# Now perform a few conversions:
#
# - Get wind components from speed and direction
# - Convert cloud fraction values to integer codes [0 - 8]
# - Map METAR weather codes to WMO codes for weather symbols

# Get the wind components, converting from m/s to knots as will be appropriate
# for the station plot
u, v = wind_components(data_arr['wind_speed'].values * units('m/s'),
                       data_arr['wind_dir'].values * units.degree)
data['eastward_wind'], data['northward_wind'] = u, v

# Convert the fraction value into a code of 0-8, which can be used to pull out
# the appropriate symbol
data['cloud_coverage'] = (
    8 * data_arr['cloud_fraction']).fillna(10).values.astype(int)

# Map weather strings to WMO codes, which we can use to convert to symbols
# Only use the first symbol if there are multiple
wx_text = data_arr['weather'].fillna('')
data['current_wx1_symbol'] = [
    wx_code_map[s.split()[0] if ' ' in s else s] for s in wx_text
]

###########################################
    print (dir_name+save_dir_name, 'is exist')

filename = 'UPPER_SONDE_47185_ALL_2014_2014_2015.csv'
fullname = dir_name + filename

fullname_el = fullname.split('\\')
#filename = fullname_el[-1]
filename_el = filename.split('_')
obs_year = int(filename_el[-3])

#지점,일시(UTC),기압(hPa),고도(gpm),기온(°C),이슬점온도(°C),풍향(deg),풍속(knot),지상 FLAG(null),권계면 FLAG(null),최대풍 FLAG(null)
df = pd.read_csv(dir_name+filename, skiprows=1, sep=',', header=None, index_col=0,
                   names = ['site', 'time', 'pressure', 'height', 'temperature', 'dewpoint', 'direction', 'speed', 'FLAG1', 'FLAG2', 'FLAG3'],
                   skipfooter=1, engine='python')

df['u_wind'], df['v_wind'] = mpcalc.wind_components(df['speed'],
                                                    np.deg2rad(df['direction']))
#%%
# Drop any rows with all NaN values for T, Td, winds
#df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed', 'u_wind', 'v_wind'), how='all').reset_index(drop=True)
#df = df.dropna(subset=('temperature', 'dewpoint'), how='all').reset_index(drop=True)
df = df.dropna(subset=('temperature', 'dewpoint', 'height'), how='all').reset_index(drop=True)

# Rows that do not meet the condition alpha + num are eliminated
s_start_date = datetime(obs_year, 1, 1) #convert startdate to date type
s_end_date = datetime(obs_year+1, 1, 1)

date1 = s_start_date
selected_times = []
while date1 < s_end_date : 
    date1_strf = date1.strftime('%Y-%m-%d %H:%M')
    selected_times.append(date1_strf)
예제 #37
0
# Get wind information and mask where either speed or direction is unavailable
wind_speed = (data['wind_speed'].values * units('m/s')).to('knots')
wind_dir = data['wind_dir'].values * units.degree

good_indices = np.where((~np.isnan(wind_dir)) & (~np.isnan(wind_speed)))

x_masked = xp[good_indices]
y_masked = yp[good_indices]
wind_speed = wind_speed[good_indices]
wind_dir = wind_dir[good_indices]

###########################################
# Calculate u and v components of wind and then interpolate both.
#
# Both will have the same underlying grid so throw away grid returned from v interpolation.
u, v = wind_components(wind_speed, wind_dir)

windgridx, windgridy, uwind = interpolate_to_grid(x_masked, y_masked, np.array(u),
                                                  interp_type='cressman', search_radius=400000,
                                                  hres=100000)

_, _, vwind = interpolate_to_grid(x_masked, y_masked, np.array(v), interp_type='cressman',
                                  search_radius=400000, hres=100000)

###########################################
# Get temperature information
x_masked, y_masked, t = remove_nan_observations(xp, yp, data['temperature'].values)
tempx, tempy, temp = interpolate_to_grid(x_masked, y_masked, t, interp_type='cressman',
                                         minimum_neighbors=3, search_radius=400000, hres=35000)

temp = np.ma.masked_where(np.isnan(temp), temp)
예제 #38
0
# Use the cartopy map projection to transform station locations to the map and
# then refine the number of stations plotted by setting a 300km radius
point_locs = proj.transform_points(ccrs.PlateCarree(), data['lon'].values, data['lat'].values)
data = data[reduce_point_density(point_locs, 300000.)]

###########################################
# Now that we have the data we want, we need to perform some conversions:
#
# - Get wind components from speed and direction
# - Convert cloud fraction values to integer codes [0 - 8]
# - Map METAR weather codes to WMO codes for weather symbols

# Get the wind components, converting from m/s to knots as will be appropriate
# for the station plot.
u, v = wind_components((data['wind_speed'].values * units('m/s')).to('knots'),
                       data['wind_dir'].values * units.degree)

# Convert the fraction value into a code of 0-8 and compensate for NaN values,
# which can be used to pull out the appropriate symbol
cloud_frac = (8 * data['cloud_fraction'])
cloud_frac[np.isnan(cloud_frac)] = 10
cloud_frac = cloud_frac.astype(int)

# Map weather strings to WMO codes, which we can use to convert to symbols
# Only use the first symbol if there are multiple
wx = [wx_code_map[s.split()[0] if ' ' in s else s] for s in data['weather'].fillna('')]


###########################################
# The payoff
# ----------
예제 #39
0
파일: test_basic.py 프로젝트: akrherz/MetPy
def test_wind_comps_scalar():
    """Test wind components calculation with scalars."""
    u, v = wind_components(8 * units('m/s'), 150 * units.deg)
    assert_almost_equal(u, -4 * units('m/s'), 3)
    assert_almost_equal(v, 6.9282 * units('m/s'), 3)