Example #1
0
    def _write_profile(self, csv_path):

        profiles = self.atmo_profiles  # dictionary
        pres = profiles.get('pres').get('data')
        u = profiles.get('u').get('data')
        v = profiles.get('v').get('data')
        temp = profiles.get('temp').get('data').to('degC')
        sphum = profiles.get('sphum').get('data')

        dewpt = np.array(
            mpcalc.dewpoint_from_specific_humidity(sphum, temp,
                                                   pres).to('degC'))
        wspd = np.array(mpcalc.wind_speed(u, v))
        wdir = np.array(mpcalc.wind_direction(u, v))

        pres = np.array(pres)
        temp = np.array(temp)

        profile = pd.DataFrame({
            'LEVEL': pres,
            'TEMP': temp,
            'DWPT': dewpt,
            'WDIR': wdir,
            'WSPD': wspd,
        })

        profile.to_csv(csv_path, index=False, float_format="%10.2f")
Example #2
0
    def _process_feature(self):
        metero_var = config['data']['metero_var']
        metero_use = config['experiments']['metero_use']
        metero_idx = [metero_var.index(var) for var in metero_use]
        self.feature = self.feature[:, :, metero_idx]

        u = self.feature[:, :, -2] * units.meter / units.second
        v = self.feature[:, :, -1] * units.meter / units.second
        speed = 3.6 * mpcalc.wind_speed(u, v)._magnitude
        direc = mpcalc.wind_direction(u, v)._magnitude

        h_arr = []
        w_arr = []
        for i in self.time_arrow:
            h_arr.append(i.hour)
            w_arr.append(i.isoweekday())
        h_arr = np.stack(h_arr, axis=-1)
        w_arr = np.stack(w_arr, axis=-1)
        h_arr = np.repeat(h_arr[:, None], self.graph.node_num, axis=1)
        w_arr = np.repeat(w_arr[:, None], self.graph.node_num, axis=1)

        self.feature = np.concatenate([
            self.feature, h_arr[:, :, None], w_arr[:, :, None],
            speed[:, :, None], direc[:, :, None]
        ],
                                      axis=-1)
Example #3
0
    def _process_feature(self):
        metero_var = config['data']['metero_var']
        metero_use = config['experiments']['metero_use']
        metero_idx = [metero_var.index(var) for var in metero_use]
        self.feature = self.feature[:, :, metero_idx]

        if config['experiments']['use_wind_coordinates']:
            u = self.feature[:, :,
                             -2] * units.meter / units.second  # u_component_of_wind+950
            v = self.feature[:, :,
                             -1] * units.meter / units.second  # v_component_of_wind+950
            speed = 3.6 * mpcalc.wind_speed(u, v)._magnitude
            direc = mpcalc.wind_direction(u, v)._magnitude
        else:
            print("Not using wind coordinates, but speed/direction directly")
            speed = self.feature[:, :, -2]
            direc = self.feature[:, :, -1]

        h_arr = []
        w_arr = []
        for i in self.time_arrow:
            h_arr.append(i.hour)
            w_arr.append(i.isoweekday())
        h_arr = np.stack(h_arr, axis=-1)
        w_arr = np.stack(w_arr, axis=-1)
        h_arr = np.repeat(h_arr[:, None], self.graph.node_num, axis=1)
        w_arr = np.repeat(w_arr[:, None], self.graph.node_num, axis=1)

        self.feature = np.concatenate([
            self.feature, h_arr[:, :, None], w_arr[:, :, None],
            speed[:, :, None], direc[:, :, None]
        ],
                                      axis=-1)
Example #4
0
def calc_filter_wind_speed(u, v, filter=True):
    """
    """

    wind = mpcalc.wind_speed(u, v)
    if filter:
        wind = ndimage.gaussian_filter(wind, sigma=3, order=0)
    return wind
Example #5
0
def uv_to_ws(u, v):
    """Return wind speed using u and v winds in m/s
    """
    u = u.values * units.meters / units.second
    v = v.values * units.meters / units.second
    ws = mpcalc.wind_speed(u, v)  # m/s
    ws = np.array(ws)
    return ws
Example #6
0
def compute_wind_speed(dset, uvar='u', vvar='v'):
    wind = mpcalc.wind_speed(dset[uvar], dset[vvar]).to(units.kph)
    wind = xr.DataArray(wind,
                        coords=dset[uvar].coords,
                        attrs={
                            'standard_name': 'wind intensity',
                            'units': wind.units
                        },
                        name='wind_speed')

    return xr.merge([dset, wind])
Example #7
0
def calculate_wspd(level="sfc", field_type="ltm"):
    # For this, we need to calculate from the UWND and VWND components.
    uwnd = load_data("uwnd", level=level, field_type=field_type)
    vwnd = load_data("vwnd", level=level, field_type=field_type)
    lat, lon = uwnd["lat"].values, uwnd["lon"].values
    speed = mcalc.wind_speed(uwnd['uwnd'].values * units('m/s'),
                             vwnd['vwnd'].values * units('m/s')).m
    data = xr.DataArray(speed, coords=[lat, lon], dims=['lat', 'lon']).to_dataset(name = 'wspd')
    data.to_netcdf(DATA_DIRECTORY + "wspd" + '_' + str(level) + '_' + field_type + '.nc', 
                   format='NETCDF4')
    return None
Example #8
0
def test_speed():
    """Test calculating wind speed."""
    u = np.array([4., 2., 0., 0.]) * units('m/s')
    v = np.array([0., 2., 4., 0.]) * units('m/s')

    speed = wind_speed(u, v)

    s2 = np.sqrt(2.)
    true_speed = np.array([4., 2 * s2, 4., 0.]) * units('m/s')

    assert_array_almost_equal(true_speed, speed, 4)
Example #9
0
def test_speed():
    """Test calculating wind speed."""
    u = np.array([4., 2., 0., 0.]) * units('m/s')
    v = np.array([0., 2., 4., 0.]) * units('m/s')

    speed = wind_speed(u, v)

    s2 = np.sqrt(2.)
    true_speed = np.array([4., 2 * s2, 4., 0.]) * units('m/s')

    assert_array_almost_equal(true_speed, speed, 4)
Example #10
0
 def jp_300_hw(self, path):  # 300hPa高度/風(日本域)
     path_fig = os.path.join(path, self.time_str1 + '.jpg')
     if (os.path.exists(path_fig)): return
     # 300hPa高度、緯度、経度の取得
     height, lat, lon = self.grib2_select_jp('gh', 300)
     # ガウシアンフィルター
     height = gaussian_filter(height, sigma=self.height_sigma)
     # 300hPa風の取得
     wind_u, _, _ = self.grib2_select_jp('u', 300) * units('m/s')
     wind_v, _, _ = self.grib2_select_jp('v', 300) * units('m/s')
     wind_speed = mpcalc.wind_speed(wind_u, wind_v).to('kt')
     # 地図の描画
     fig, ax = self.draw_map(self.mapcrs_jp, self.extent_jp)
     # 等風速線を引く
     wind_constant = ax.contourf(lon,
                                 lat,
                                 wind_speed,
                                 np.arange(0, 220, 20),
                                 extend='max',
                                 cmap='YlGnBu',
                                 transform=self.datacrs,
                                 alpha=0.9)
     # カラーバーをつける
     cbar = self.draw_jp_colorbar(wind_constant)
     cbar.set_label('ISOTECH(kt)', fontsize=self.fontsize)
     # 風ベクトルの表示
     wind_arrow = (slice(None, None, 10), slice(None, None, 10))
     ax.barbs(lon[wind_arrow],
              lat[wind_arrow],
              wind_u[wind_arrow].to('kt').m,
              wind_v[wind_arrow].to('kt').m,
              pivot='middle',
              color='black',
              alpha=0.5,
              transform=self.datacrs,
              length=10)
     # 等高度線を引く
     height_line = ax.contour(lon,
                              lat,
                              height,
                              np.arange(5400, 12000, 120),
                              colors='black',
                              transform=self.datacrs)
     plt.clabel(height_line, fmt='%d', fontsize=self.fontsize)
     # タイトルをつける
     self.draw_title(ax, '300hPa: HEIGHT(M), ISOTACH(kt), WIND ARROW(kt)',
                     self.time_str2)
     # 大きさの調整
     plt.subplots_adjust(bottom=0.05, top=0.95, left=0, right=1.0)
     # 保存
     print(f'[{self.time_str2}] 300hPa高度/風(日本域)...{path_fig}'.format())
     plt.savefig(path_fig)
     # 閉じる
     plt.close(fig=fig)
Example #11
0
def test_speed(array_type):
    """Test calculating wind speed."""
    mask = [False, True, False, True]
    u = array_type([4., 2., 0., 0.], 'm/s', mask=mask)
    v = array_type([0., 2., 4., 0.], 'm/s', mask=mask)

    speed = wind_speed(u, v)

    s2 = np.sqrt(2.)
    true_speed = array_type([4., 2 * s2, 4., 0.], 'm/s', mask=mask)

    assert_array_almost_equal(true_speed, speed, 4)
Example #12
0
def grad_mask(Zint, REFmasked, REF, storm_relative_dir, ZDRmasked1,
              ZDRrmasked1, CC, CCall):
    #Inputs,
    #Zint: 1km AFL grid level
    #REFmasked: REF masked below 20 dBz
    #REF: 1km Reflectivity grid
    #storm_relative_dir: Vector direction along the reflectivity gradient in the forward flank
    #ZDRmasked1: 1km Differential Reflectiity (Zdr) grid, masked below 20 dBz reflectivity
    #ZDRrmasked1: Full volume Zdr gridded, masked below 20 dBz reflectivity
    #CC: 1km Correlation Coefficient (CC) grid
    #CCall: Full volume CC gridded
    print('Gradient Analysis and Masking')
    #Determining gradient direction and masking some Zhh and Zdr grid fields

    smoothed_ref1 = ndi.gaussian_filter(REFmasked, sigma=2, order=0)
    REFgradient = np.asarray(np.gradient(smoothed_ref1))
    REFgradient[0, :, :] = ma.masked_where(REF < 20, REFgradient[0, :, :])
    REFgradient[1, :, :] = ma.masked_where(REF < 20, REFgradient[1, :, :])
    grad_dir1 = wind_direction(REFgradient[1, :, :] * units('m/s'),
                               REFgradient[0, :, :] * units('m/s'))
    grad_mag = wind_speed(REFgradient[1, :, :] * units('m/s'),
                          REFgradient[0, :, :] * units('m/s'))
    grad_dir = ma.masked_where(REF < 20, grad_dir1)

    #Get difference between the gradient direction and the FFD gradient direction calculated earlier
    srdir = storm_relative_dir
    srirad = np.copy(srdir) * units('degrees').to('radian')
    grad_dir = grad_dir * units('degrees').to('radian')
    grad_ffd = np.abs(
        np.arctan2(np.sin(grad_dir - srirad), np.cos(grad_dir - srirad)))
    grad_ffd = np.asarray(grad_ffd) * units('radian')
    grad_ex = np.copy(grad_ffd)
    grad_ffd = grad_ffd.to('degrees')

    #Mask out areas where the difference between the two is too large and the ZDR is likely not in the forward flank
    ZDRmasked2 = ma.masked_where(grad_ffd > 120 * units('degrees'), ZDRmasked1)
    ZDRmasked = ma.masked_where(CC < .60, ZDRmasked2)
    ZDRallmasked = ma.masked_where(CCall < .70, ZDRrmasked1)
    ZDRallmasked = ma.filled(ZDRallmasked, fill_value=-2)
    ZDRrmasked = ZDRallmasked[Zint, :, :]

    #Add a fill value for the ZDR mask so that contours will be closed
    ZDRmasked = ma.filled(ZDRmasked, fill_value=-2)
    ZDRrmasked = ma.filled(ZDRrmasked, fill_value=-2)

    #Returning variables,
    #grad_mag: Array of wind velocity magnitude along reflectivity gradient
    #grad_ffd: Angle (degrees) used to indicate angular region of supercell containing the forward flank
    #ZDRmasked: Masked array ZDRmasked1 in regions outside the forward flank (grad_ffd) and below 0.6 CC
    #ZDRallmasked: Masked volume array (ZDRrmasked1) below 0.7 CC and filled with -2.0 values
    #ZDRrmasked: ZDRallmasked slice at 1km above freezing level
    return grad_mag, grad_ffd, ZDRmasked, ZDRallmasked, ZDRrmasked
Example #13
0
def test_speed_direction_roundtrip():
    """Test round-tripping between speed/direction and components."""
    # Test each quadrant of the whole circle
    wspd = np.array([15., 5., 2., 10.]) * units.meters / units.seconds
    wdir = np.array([160., 30., 225., 350.]) * units.degrees

    u, v = wind_components(wspd, wdir)

    wdir_out = wind_direction(u, v)
    wspd_out = wind_speed(u, v)

    assert_array_almost_equal(wspd, wspd_out, 4)
    assert_array_almost_equal(wdir, wdir_out, 4)
Example #14
0
def test_speed_direction_roundtrip():
    """Test round-tripping between speed/direction and components."""
    # Test each quadrant of the whole circle
    wspd = np.array([15., 5., 2., 10.]) * units.meters / units.seconds
    wdir = np.array([160., 30., 225., 350.]) * units.degrees

    u, v = wind_components(wspd, wdir)

    wdir_out = wind_direction(u, v)
    wspd_out = wind_speed(u, v)

    assert_array_almost_equal(wspd, wspd_out, 4)
    assert_array_almost_equal(wdir, wdir_out, 4)
Example #15
0
def _write_to_sounding(data_cube, idx_locs, ids, date, fmt=None):
    """
    Writes data to sounding files. Added BUFKIT-readable output capabilities. 
    """
    if fmt is None:
        fmt = 'sharppy'

    knt = 0
    for idx in idx_locs:
        t_out = data_cube[0, :, idx[0], idx[1]] - 273.15
        td_out = data_cube[1, :, idx[0], idx[1]]
        u = data_cube[2, :, idx[0], idx[1]] * units('m/s')
        v = data_cube[3, :, idx[0], idx[1]] * units('m/s')
        wdir_out = mpcalc.wind_direction(u, v).magnitude
        wspd_out = mpcalc.wind_speed(u, v).magnitude
        hgt_out = data_cube[4, :, idx[0], idx[1]]

        out_time = "%s%s%s/%s00" % (date[2:4], date[4:6], date[6:8],
                                    date[8:10])
        if fmt == 'sharppy':
            out_file = "%s/%s.%s" % (SOUNDING_DIR, date, ids[knt])
            f = open(out_file, 'w')

            f.write("%TITLE%\n")
            f.write(" %s   %s" % (ids[knt], out_time))
            f.write("\n\n")
            f.write('   LEVEL     HGHT     TEMP     DWPT     WDIR     WSPD\n')
            f.write('------------------------------------------------------\n')
            f.write('%RAW%\n')

            # This is a weird one. At some point in the past, the GRIB files
            # were ordered differently. Need to check for monotonic increasing
            # or decreasing heights and adjust pressures accordingly
            if strictly_increasing(list(hgt_out)):
                pres_incr = 1
                start_, end_, inc_ = 0, t_out.shape[0], 1
            else:
                pres_incr = -1
                start_, end_, inc_ = t_out.shape[0] - 1, -1, -1

            print(levs)
            print(hgt_out)
            print(start_, end_, inc_, pres_incr)
            for row in range(start_, end_, inc_):
                if hgt_out[row] > 0:
                    out_line = "%s,%s,%s,%s,%s,%s" % (
                        levs[::pres_incr][row], hgt_out[row], t_out[row],
                        td_out[row], wdir_out[row], wspd_out[row])
                    f.write(out_line + '\n')
            f.write('%END%')
        knt += 1
Example #16
0
 def wind_speed(self, level):
     """
     Receives the integer value of the desired vertical pressure level
     to extract from data a tuple containing values of wind speed.
     In return you will have a pint.Quantity array for the desired time and level.
     """
     # Obtaining the index for the given pressure level
     index_level = np.where(np.array(self.data["isobaric"]) == level *
                            100)[0][0]
     # Extracting wind components data
     uwnd = self.data["u-component_of_wind_isobaric"][
         self.time_step][index_level]
     vwnd = self.data["v-component_of_wind_isobaric"][
         self.time_step][index_level]
     # Calculate wind speed using metpy functions
     wind_spd = mpcalc.wind_speed(uwnd, vwnd)
     return np.array(wind_spd)
def readWeatherData(filepath):
    fullshape = (949, 739)
    use_keys = [
        'x_wind_gust_10m', 'y_wind_gust_10m'
    ]  #"U-momentum of gusts in 10m height"m/s, "V-momentum of gusts in 10m height"m/s
    uparam, vparam = use_keys
    dataset = xr.open_dataset(filepath)
    dataset = dataset.metpy.parse_cf()  #[uparam, vparam])
    dataset[uparam].metpy.convert_units('knots')
    dataset[vparam].metpy.convert_units('knots')
    data_crs = dataset[uparam].metpy.cartopy_crs
    wind_speed = mpcalc.wind_speed(dataset[uparam], dataset[uparam])
    wind_direction = mpcalc.wind_direction(dataset[uparam], dataset[uparam])
    dataset['wind_speed'] = xr.DataArray(wind_speed.magnitude,
                                         coords=dataset[uparam].coords,
                                         dims=dataset[uparam].dims)
    dataset['wind_speed'].attrs['units'] = wind_speed.units
    dataset['wind_direction'] = xr.DataArray(wind_direction.magnitude,
                                             coords=dataset[uparam].coords,
                                             dims=dataset[uparam].dims)
    dataset['wind_direction'].attrs['units'] = wind_direction.units
    return dataset
Example #18
0
def calculating_density_height(u, v, tmp, prs=100367.63, cp=0.59, R=286.7):
    """ Returns new arranged DataArray of density at desired height (related to in which height the values are given)
    
        u  = u wind speed at any level (can be Xarray Datarray) m/s
        v  = v wind speed at any level (can be Xarray Datarray) m/s
        prs = pressure at any level,  to be dafult 80 metres is choosen --> 100367.63 Pa (can be Xarray Datarray)
        tmp = temperature at any level (can be Xarray Datarray) K
        cp = efficiency parameter
        R = Characteristic Gas Constant of air 286.7 J/kgK
        ws = wind speed at any level m/s 
        dense_height = The density that calculated with given values m3/kg
        
        IMPORTANT:cut_in, cut_out, rated_wind_speed, rated_power values are unique to the turbine used, can differ.
                    For Default Values "Vestas V82-1.5"  model is used.
        
        """

    u = u.values * units.meters / units.second
    v = v.values * units.meters / units.second
    ws = mpcalc.wind_speed(u, v)  # m/s
    ws = np.array(ws)
    dense_height = (prs / (R * tmp))
    return np.array(dense_height)
Example #19
0
def scalardata(field, valid_time, targetdir=".", debug=False):
    # Get color map, levels, and netCDF variable name appropriate for requested variable (from fieldinfo module).
    info = fieldinfo[field]
    if debug:
        print("scalardata: found", field, "fieldinfo:", info)
    cmap = colors.ListedColormap(info['cmap'])
    levels = info['levels']
    fvar = info['fname'][0]

    # Get narr file and filename.
    ifile = get(valid_time, targetdir=targetdir, narrtype=info['filename'])

    if debug:
        print("About to open " + ifile)
    nc = xarray.open_dataset(ifile)
    # Tried to rename vars and dimensions so metpy.parse_cf() would not warn "Found latitude/longitude values, assuming latitude_longitude for projection grid_mapping variable"
    # It didn't help. Only commenting out the metpy.parse_cf() line helped.
    # It didn't help with MetpyDeprecationWarning: Multidimensional coordinate lat assigned for axis "y". This behavior has been deprecated and will be removed in v1.0 (only one-dimensional coordinates will be available for the "y" axis) either
    #nc = nc.rename_vars({"gridlat_221": "lat", "gridlon_221" : "lon"})
    #nc = nc.rename_dims({"gridx_221": "x", "gridy_221" : "y"})
    #nc = nc.metpy.parse_cf() # TODO: figure out why filled contour didn't have .metpy.parse_cf()

    if fvar not in nc.variables:
        print(fvar, "not in", ifile, '. Try', nc.var())
        sys.exit(1)

    # Define data array. Speed and shear derived differently.
    # Define 'long_name' attribute
    #
    if field[0:5] == "speed":
        u = nc[info['fname'][0]]
        v = nc[info['fname'][1]]
        data = u  # copy metadata/coordinates from u
        data.values = wind_speed(u, v)
        data.attrs['long_name'] = "wind speed"
    elif field[0:3] == 'shr' and '_' in field:
        du, dv = shear(field,
                       valid_time=valid_time,
                       targetdir=targetdir,
                       debug=debug)
        ws = wind_speed(du, dv)
        attrs = {
            'long_name': 'wind shear',
            'units': str(ws.units),
            'verttitle': du.attrs["verttitle"]
        }
        # Use .m magnitude because you can't transfer units of pint quantity to xarray numpy array (xarray.values)
        data = xarray.DataArray(data=ws.m,
                                dims=du.dims,
                                coords=du.coords,
                                name=field,
                                attrs=attrs)
    elif field == 'theta2':
        pres = nc[info['fname'][0]]
        temp = nc[info['fname'][1]]
        data = pres  # retain xarray metadata/coordinates
        theta = potential_temperature(pres, temp)
        data.values = theta
        data.attrs['units'] = str(theta.units)
        data.attrs['long_name'] = 'potential temperature'
    elif field == 'thetae2':
        pres = nc[info['fname'][0]]
        temp = nc[info['fname'][1]]
        dwpt = nc[info['fname'][2]]
        data = pres  # retain xarray metadata/coordinates
        thetae = equivalent_potential_temperature(pres, temp, dwpt)
        data.values = thetae
        data.attrs['units'] = str(thetae.units)
        data.attrs['long_name'] = 'equivalent potential temperature'
    elif field == 'scp' or field == 'stp' or field == 'tctp':
        cape = nc[info['fname'][0]]
        cin = nc[info['fname'][1]]
        ifile = get(valid_time, targetdir=targetdir, narrtype=narrFlx)
        ncFlx = xarray.open_dataset(ifile).metpy.parse_cf()
        srh = ncFlx[info['fname'][2]]
        shear_layer = info['fname'][3]
        bulk_shear = scalardata(shear_layer,
                                valid_time,
                                targetdir=targetdir,
                                debug=debug)
        lifted_condensation_level_height = scalardata('zlcl',
                                                      valid_time,
                                                      targetdir=targetdir,
                                                      debug=debug)

        if field == 'scp':
            # In SPC help, cin is positive in SCP formulation.
            cin_term = -40 / cin
            cin_term = cin_term.where(cin < -40, other=1)
            scp = supercell_composite(cape, srh,
                                      bulk_shear) * cin_term.metpy.unit_array
            attrs = {
                'units': str(scp.units),
                'long_name': 'supercell composite parameter'
            }
            data = xarray.DataArray(data=scp,
                                    dims=cape.dims,
                                    coords=cape.coords,
                                    name=field,
                                    attrs=attrs)
        if field == 'stp':
            cin_term = (200 + cin) / 150
            cin_term = cin_term.where(cin <= -50, other=1)
            cin_term = cin_term.where(cin >= -200, other=0)
            # CAPE, srh, bulk_shear, cin may be one vertical level, but LCL may be multiple heights.
            # xarray.broadcast() makes them all multiple heights with same shape, so significant_tornado doesn't
            # complain about expecting lat/lon 2 dimensions and getting 3 dimensions..
            (cape, lifted_condensation_level_height, srh, bulk_shear,
             cin_term) = xarray.broadcast(cape,
                                          lifted_condensation_level_height,
                                          srh, bulk_shear, cin_term)
            stp = significant_tornado(cape, lifted_condensation_level_height,
                                      srh,
                                      bulk_shear) * cin_term.metpy.unit_array
            attrs = {
                'units': str(stp.units),
                'long_name': 'significant tornado parameter',
                'verttitle':
                lifted_condensation_level_height.attrs['verttitle']
            }
            data = xarray.DataArray(data=stp,
                                    dims=cape.dims,
                                    coords=cape.coords,
                                    name=field,
                                    attrs=attrs)
        if field == 'tctp':
            tctp = srh / (40 * munits['m**2/s**2']) * bulk_shear / (
                12 * munits['m/s']) * (2000 - lifted_condensation_level_height
                                       ) / (1400 * munits.m)
            # But NARR storm relative helicity (srh) is 0-3 km AGL, while original TCTP expects 0-1 km AGL.
            # So the shear term is too large using the NARR srh. Normalize the srh term with a larger denominator.
            # In STP, srh is normalized by 150 m**2/s**2. Use that.
            tctp_0_3kmsrh = srh / (150 * munits['m**2/s**2']) * bulk_shear / (
                12 * munits['m/s']) * (2000 - lifted_condensation_level_height
                                       ) / (1400 * munits.m)
            attrs = {
                'units': 'dimensionless',
                'long_name': 'TC tornado parameter'
            }
            data = xarray.DataArray(data=tctp_0_3kmsrh,
                                    dims=cape.dims,
                                    coords=cape.coords,
                                    name=field,
                                    attrs=attrs)
    elif field == 'lcl':
        pres = nc[info['fname'][0]]
        temp = nc[info['fname'][1]]
        dwpt = nc[info['fname'][2]]
        LCL_pressure, LCL_temperature = lcl(pres.fillna(pres.mean()),
                                            temp.fillna(temp.mean()),
                                            dwpt.fillna(dwpt.mean()))
        # convert units to string or xarray.DataArray.metpy.unit_array dies with ttributeError: 'NoneType' object has no attribute 'evaluate'
        attrs = {
            "long_name": "lifted condensation level",
            "units": str(LCL_pressure.units),
            "from": "metpy.calc.lcl"
        }
        data = xarray.DataArray(data=LCL_pressure,
                                coords=pres.coords,
                                dims=pres.dims,
                                name='LCL',
                                attrs=attrs)
    elif field == 'zlcl':
        LCL_pressure = scalardata('lcl',
                                  valid_time,
                                  targetdir=targetdir,
                                  debug=debug)
        ifile = get(valid_time, targetdir=targetdir, narrtype=narr3D)
        nc3D = xarray.open_dataset(ifile).metpy.parse_cf()
        hgt3D = nc3D["HGT_221_ISBL"]
        data = pressure_to_height(LCL_pressure, hgt3D, targetdir=targetdir)
    else:
        data = nc[fvar]
    data = units(data, info, debug=debug)
    data = vertical(data, info, debug=debug)
    data = temporal(data, info, debug=debug)

    data.attrs['field'] = field
    data.attrs['ifile'] = os.path.realpath(ifile)
    data.attrs['levels'] = levels
    data.attrs['cmap'] = cmap

    if data.min() > levels[-1] or data.max() < levels[0]:
        print('levels', levels, 'out of range of data', data.min(), data.max())
        sys.exit(2)

    return data
Example #20
0
def plot_geop_wind(fig,
                   lon,
                   lat,
                   u,
                   v,
                   geop,
                   dom="d01",
                   ffmin=0,
                   ffmax=270,
                   ffinterval=2,
                   windunits="km/h"):
    """Plot geoptotential height + wind speed

	Author: Albenis Pérez Alarcón
	contact:[email protected]
	
	Parameters
	----------
	fig : `matplotlib.figure`
	The `figure` instance used for plotting
	lon : numpy 2d array
	array of longitude

	lat : numpy 2d array
	array of latitude

	u : numpy 2d array
	array of u wind component

	v : numpy 2d array
	array of v wind component

	geop : numpy 2d array
	array of geoptotential height

	dom : str
	domain using by file to plot  d01, d02 d03

	ffmin : int
	min value for ff to colorbar

	ffmax : int
	max value for ff to colorbar

	ffmax : int
	max value for ff to colorbar

	ffinterval : int
	interval for colorbar

	windunits : str
	units for wind plot 

	barbs : str
	plot barbs  yes or no
	

	Returns
	-------
	`matplotlib.image.FigureImage`
	The `matplotlib.image.FigureImage` instance created

	"""

    crs = ccrs.PlateCarree()
    ax = plt.subplot(111, projection=ccrs.PlateCarree())
    ax.add_feature(cfeature.COASTLINE.with_scale('10m'), linewidth=0.6)
    ax.add_feature(cfeature.STATES, linewidth=0.6)

    if dom == "d01":
        ax.set_extent(
            [lon.min() + 10,
             lon.max() - 10,
             lat.min() + 7.5,
             lat.max() - 2],
            crs=ccrs.PlateCarree())
        paso_h = 5
        cbposition = 'vertical'
        s = 7
    if dom == "d02":
        ax.set_extent(
            [lon.min() + 0.5,
             lon.max() - 0.5,
             lat.min() + 2,
             lat.max() - 0.2],
            crs=ccrs.PlateCarree())
        paso_h = 2
        cbposition = 'vertical'
        s = 6

    ff = cl.wind_speed(u, v)
    cf = ax.contourf(lon,
                     lat,
                     ff,
                     range(int(ffmin), int(ffmax), int(ffinterval)),
                     cmap=colorbarwind(),
                     transform=ccrs.PlateCarree(),
                     extend='both')
    cs = ax.contour(lon,
                    lat,
                    geop,
                    transform=ccrs.PlateCarree(),
                    colors='k',
                    linewidths=1.5,
                    linestyles='solid')
    ax.clabel(cs,
              fontsize=14,
              inline=1,
              inline_spacing=5,
              fmt='%i',
              rightside_up=True,
              use_clabeltext=True)
    cb = fig.colorbar(cf,
                      orientation=cbposition,
                      extend='both',
                      aspect=40,
                      shrink=0.6,
                      pad=0.06)
    cb.set_label(windunits, size=17)
    cb.ax.tick_params(labelsize=17)
    gl = ax.gridlines(crs=ccrs.PlateCarree(),
                      draw_labels=True,
                      linewidth=1,
                      color='gray',
                      alpha=0.2,
                      linestyle='--')
    gl.xlabels_top = False
    gl.ylabels_left = True
    gl.ylabels_right = False
    gl.xlines = True

    lons = np.arange(ceil(lon.min()), ceil(lon.max()), paso_h)
    gl.xlocator = mticker.FixedLocator(lons)
    gl.xformatter = LONGITUDE_FORMATTER
    gl.yformatter = LATITUDE_FORMATTER
    gl.xlabel_style = {'size': 17, 'color': 'black'}
    gl.ylabel_style = {'size': 17, 'color': 'black'}
    return
Example #21
0
# Getting the cartopy coordinate reference system (CRS) of the projection of a DataArray
data_crs = data['u'].metpy.cartopy_crs

# Get multiple coordinates (for example, in just the x and y direction)
x, y = data['u'].metpy.coordinates('x', 'y')

# Or, we can just get a coordinate from the property
time = data['u'].metpy.time

# Select the data for this time
data_month = data.isel(time=0).squeeze('depth')
data_month['u'].attrs['units'] = 'm/s'
data_month['v'].attrs['units'] = 'm/s'

current_spd = mpcalc.wind_speed(data_month['u'], data_month['v'])
data_month = data_month.assign(cspd=(('latitude', 'longitude'), current_spd.m,
                                     {
                                         'units': str(current_spd.units)
                                     }))

# Create the matplotlib figure and axis
fig, ax = plt.subplots(1,
                       1,
                       figsize=(12, 8),
                       subplot_kw={'projection': data_crs})

# Add geographic features
ax.add_feature(cfeature.LAND.with_scale('50m'),
               facecolor=cfeature.COLORS['land'])
ax.add_feature(cfeature.OCEAN.with_scale('50m'),
Example #22
0
#Select starting hour
hour = 45

#Select variables from dataset
lon = data["lon"]
lat = data["lat"]
uwind = data["u-component_of_wind_isobaric"][hour].sel(isobaric5=20000)
vwind = data["v-component_of_wind_isobaric"][hour].sel(isobaric5=20000)
Geo_250 = data["Geopotential_height_isobaric"][hour].sel(isobaric3=25000) / 10

#assign time variables
time1 = datetime.strptime(str(data.time.data[hour].astype('datetime64[ms]')),
                          '%Y-%m-%dT%H:%M:%S.%f')
time2 = datetime.strftime(time1, "%Y-%m-%d %H00 UCT")

sped_250 = mpcalc.wind_speed(uwind, vwind).to('kt')

#Create figure
fig = plt.figure(figsize=(14, 9))
ax = fig.add_axes([1, 1, 1, 1], projection=ccrs.Miller())

#List map Coordinates
Lat_west = -105
Lat_east = -55
Lon_south = 15
Lon_north = 38

#Plot extent
ax.set_extent([Lat_east, Lat_west, Lon_north, Lon_south])

#add landforms
Example #23
0
def test_scalar_speed():
    """Test wind speed with scalars."""
    s = wind_speed(-3. * units('m/s'), -4. * units('m/s'))
    assert_almost_equal(s, 5. * units('m/s'), 3)
Example #24
0
def test_scalar_speed():
    """Test wind speed with scalars."""
    s = wind_speed(-3. * units('m/s'), -4. * units('m/s'))
    assert_almost_equal(s, 5. * units('m/s'), 3)
Example #25
0
fnl_hght = ndimage.gaussian_filter(hght_vars, sigma=2, order=0)
fnl_uwnd = units('m/s') * ndimage.gaussian_filter(uwnd_vars, sigma=2, order=0)
fnl_vwnd = units('m/s') * ndimage.gaussian_filter(vwnd_vars, sigma=2, order=0)

lon = hght_data.variables['lon'][:]
lat = hght_data.variables['lat'][:]

time = hght_data.variables[
    hght_data.variables['Geopotential_height_isobaric'].dimensions[0]]
vtime = num2date(time[:], time.units)
ntime = vtime[0]
datatime = ntime.strftime("%H:%M" + "Z")

# Use MetPy to parse the wind data.
wndspeed = mpcalc.wind_speed(fnl_uwnd, fnl_vwnd).to('kt')

# Define the projection.
ax = plt.axes(projection=ccrs.LambertConformal(
    central_latitude=35, central_longitude=-101, standard_parallels=(30, 60)))

# Create the map figure.
fig = plt.figure(1, figsize=(10, 10))
ax.set_extent([-125, -89, 25, 50], ccrs.PlateCarree())

# Create the map features.
ax.add_feature(cfeature.OCEAN.with_scale('50m'),
               facecolor='#F2F2F2',
               edgecolor='black',
               zorder=0,
               linewidth=.5)
Example #26
0
def periodmean_gh_uv_pwat_ulj(initTimes=None,
                              fhours=[0],
                              day_back=0,
                              model='ECMWF',
                              gh_lev=500,
                              uv_lev=850,
                              ulj_lev=200,
                              map_ratio=14 / 9,
                              zoom_ratio=20,
                              cntr_pnt=[104, 34],
                              south_China_sea=True,
                              area=None,
                              city=False,
                              output_dir=None,
                              data_source='MICAPS',
                              Global=False,
                              **kwargs):

    if (area != None):
        south_China_sea = False

    # micaps data directory
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='HGT',
                                  lvl=gh_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl=uv_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl=uv_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl=ulj_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl=ulj_lev),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='TCWV'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='PSFC')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        filenames = []
        # get filename
        if (initTimes != None):
            for initTime in initTimes:
                for fhour in fhours:
                    filenames.append(utl.model_filename(initTime, fhour))
        else:
            filenames = utl.filename_day_back_model(day_back=day_back,
                                                    fhour=fhour)

        # retrieve data from micaps server
        gh = MICAPS_IO.get_model_grids(data_dir[0], filenames=filenames)
        u = MICAPS_IO.get_model_grids(data_dir[1], filenames=filenames)
        v = MICAPS_IO.get_model_grids(data_dir[2], filenames=filenames)
        u2 = MICAPS_IO.get_model_grids(data_dir[3], filenames=filenames)
        v2 = MICAPS_IO.get_model_grids(data_dir[4], filenames=filenames)
        pwat = MICAPS_IO.get_model_grids(data_dir[5], filenames=filenames)
        psfc = MICAPS_IO.get_model_grids(data_dir[6], filenames=filenames)

    if (data_source == 'CIMISS'):
        # get filename
        filenames = []
        if (initTimes != None):
            for initTime in initTimes:
                for fhour in fhours:
                    filenames.append(
                        '20' + utl.model_filename(initTime, fhour, UTC=True))
        else:
            filenames = utl.filename_day_back_model(day_back=day_back,
                                                    fhour=fhour,
                                                    UTC=True)
        try:
            # retrieve data from CIMISS server
            gh = utl.cimiss_model_ana_grids(data_code=utl.CMISS_data_code(
                data_source=model, var_name='GPH'),
                                            filenames=filenames,
                                            fcst_level=gh_lev,
                                            fcst_ele="GPH",
                                            units='gpm')
            gh['data'].values = gh['data'].values / 10.

            u = utl.cimiss_model_ana_grids(data_code=utl.CMISS_data_code(
                data_source=model, var_name='WIU'),
                                           filenames=filenames,
                                           fcst_level=uv_lev,
                                           fcst_ele="WIU",
                                           units='m/s')

            v = utl.cimiss_model_ana_grids(data_code=utl.CMISS_data_code(
                data_source=model, var_name='WIV'),
                                           filenames=filenames,
                                           fcst_level=uv_lev,
                                           fcst_ele="WIV",
                                           units='m/s')

            u2 = utl.cimiss_model_ana_grids(data_code=utl.CMISS_data_code(
                data_source=model, var_name='WIU'),
                                            filenames=filenames,
                                            fcst_level=ulj_lev,
                                            fcst_ele="WIU",
                                            units='m/s')

            v2 = utl.cimiss_model_ana_grids(data_code=utl.CMISS_data_code(
                data_source=model, var_name='WIV'),
                                            filenames=filenames,
                                            fcst_level=ulj_lev,
                                            fcst_ele="WIV",
                                            units='m/s')

            if (model == 'ECMWF'):
                pwat = utl.cimiss_model_ana_grids(
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='TCWV'),
                    filenames=filenames,
                    fcst_level=0,
                    fcst_ele="TCWV",
                    units='kg m-2')
            else:
                pwat = utl.cimiss_model_ana_grids(
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='TIWV'),
                    filenames=filenames,
                    fcst_level=0,
                    fcst_ele="TIWV",
                    units='kg m-2')

            psfc = utl.cimiss_model_ana_grids(data_code=utl.CMISS_data_code(
                data_source=model, var_name='PRS'),
                                              filenames=filenames,
                                              fcst_level=0,
                                              fcst_ele="PRS",
                                              units='Pa')
            psfc['data'] = psfc['data'] / 100.

        except KeyError:
            raise ValueError('Can not find all data needed')
    # prepare data

    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent, delt_x, delt_y = utl.get_map_extent(cntr_pnt=cntr_pnt,
                                                    zoom_ratio=zoom_ratio,
                                                    map_ratio=map_ratio)

    gh = utl.cut_xrdata(map_extent, gh, delt_x=delt_x, delt_y=delt_y)
    u = utl.cut_xrdata(map_extent, u, delt_x=delt_x, delt_y=delt_y)
    v = utl.cut_xrdata(map_extent, v, delt_x=delt_x, delt_y=delt_y)
    u2 = utl.cut_xrdata(map_extent, u2, delt_x=delt_x, delt_y=delt_y)
    v2 = utl.cut_xrdata(map_extent, v2, delt_x=delt_x, delt_y=delt_y)
    pwat = utl.cut_xrdata(map_extent, pwat, delt_x=delt_x, delt_y=delt_y)

    gh = utl.mask_terrian(gh_lev, psfc, gh)
    u = utl.mask_terrian(uv_lev, psfc, u)
    v = utl.mask_terrian(uv_lev, psfc, v)
    u2 = utl.mask_terrian(ulj_lev, psfc, u2)
    v2 = utl.mask_terrian(ulj_lev, psfc, v2)

    uv = xr.merge([u.rename({'data': 'u'}), v.rename({'data': 'v'})])
    ulj = mpcalc.wind_speed(u2['data'].values * units('m/s'),
                            v2['data'].values * units('m/s'))
    ulj_xr = u2.copy(deep=True)
    ulj_xr['data'].values = ulj.magnitude

    pwat_mean = pwat.mean('time')
    gh_mean = gh.mean('time')
    ulj_mean = ulj_xr.mean('time')
    uv_mean = uv.mean('time')

    gh_mean.attrs['model'] = model
    gh_mean.attrs['st_time'] = gh['time'].values[0]
    gh_mean.attrs['ed_time'] = gh['time'].values[-1]

    synoptic_graphics.draw_gh_uv_pwat_ulj(pwat=pwat_mean,
                                          gh=gh_mean,
                                          uv=uv_mean,
                                          ulj=ulj_mean,
                                          map_extent=map_extent,
                                          regrid_shape=20,
                                          city=city,
                                          south_China_sea=south_China_sea,
                                          output_dir=output_dir)
Example #27
0
dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)
ugeo, vgeo = mpcalc.geostrophic_wind(Z * units.meter,
                                     f,
                                     dx,
                                     dy,
                                     dim_order='yx')

# Get the wind direction for each point
wdir = mpcalc.wind_direction(ugeo, vgeo)

# Compute the Gradient Wind via an approximation
dydx = mpcalc.first_derivative(Z, delta=dx, axis=1)
d2ydx2 = mpcalc.first_derivative(dydx, delta=dx, axis=1)
R = ((1 + dydx.m**2)**(3. / 2.)) / d2ydx2.m

geo_mag = mpcalc.wind_speed(ugeo, vgeo)
grad_mag = geo_mag.m - (geo_mag.m**2) / (f.magnitude * R)

ugrad, vgrad = mpcalc.wind_components(grad_mag * units('m/s'), wdir)

# Calculate Ageostrophic wind
uageo = ugrad - ugeo
vageo = vgrad - vgeo

# Compute QVectors
uqvect, vqvect = mpcalc.q_vector(ugeo, vgeo, T * units.degC, 500 * units.hPa,
                                 dx, dy)

# Calculate divergence of the ageostrophic wind
div = mpcalc.divergence(uageo, vageo, dx, dy, dim_order='yx')
Example #28
0
time_vals = num2date(time[:].squeeze(), time.units)
print(time_vals[:5])

##############################################
#### Now we can plot these up using matplotlib, which has ready-made support for `datetime`
#### objects.
###fig, ax = plt.subplots(1, 1, figsize=(9, 8))
###ax.plot(time_vals, temp[:].squeeze(), 'r', linewidth=2)
###ax.plot(time_vals, dp[:].squeeze(), 'g', linewidth=2)
###ax.set_ylabel('{} ({})'.format(temp.standard_name, temp.units))
###ax.set_xlabel('Forecast Time (UTC)')
###ax.grid(True)
###plt.show()

# Calculate wind speed and direction from UV wind
ws = wind_speed(np.array(uwind) * units('m/s'),np.array(vwind) * units('m/s'))
wd = wind_direction(np.array(uwind) * units('m/s'),np.array(vwind) * units('m/s'))

# ID For Plotting on Meteogram
probe_id = '0102A'

ddata = {'wind_speed': (np.array(ws).squeeze() * units('m/s')).to(units('knots')),
        #'wind_speed_max': (np.array(wsmax) * units('m/s')).to(units('knots')),
        'wind_direction': np.array(wd).squeeze() * units('degrees'),
        'dewpoint': dewpoint_from_relative_humidity((np.array(temp).squeeze() * units.K),
                                                    np.array(rh).squeeze() / 100.).to(units('degF')),
        'air_temperature': (np.array(temp).squeeze() * units('K')).to(units('degF')),
        'mean_slp': (np.array(pmsl).squeeze() * units('Pa')).to(units('hPa')),
        #'mean_slp': calc_mslp(np.array(temp), np.array(pres), hgt_example) * units('hPa'),
        'relative_humidity': np.array(rh).squeeze(), 
        'precipitation': np.array(prcip).squeeze(), 
Example #29
0
lons = ds.lon.data

# Select and grab data
hght = ds['Geopotential_height_isobaric']
uwnd = ds['u-component_of_wind_isobaric']
vwnd = ds['v-component_of_wind_isobaric']

# Select and grab 500-hPa geopotential heights and wind components, smooth with gaussian_filter
hght_500 = gaussian_filter(hght.sel(isobaric=500).data[0], sigma=3.0)
uwnd_500 = gaussian_filter(uwnd.sel(isobaric=500).data[0],
                           sigma=3.0) * units('m/s')
vwnd_500 = gaussian_filter(vwnd.sel(isobaric=500).data[0],
                           sigma=3.0) * units('m/s')

# Use MetPy to calculate the wind speed for colorfill plot, change units to knots from m/s
sped_500 = mpcalc.wind_speed(uwnd_500, vwnd_500).to('kt')

# Create a clean datetime object for plotting based on time of Geopotential heights
vtime = datetime.strptime(str(ds.time.data[0].astype('datetime64[ms]')),
                          '%Y-%m-%dT%H:%M:%S.%f')

######################################################################
# Map Creation
# ------------
#
# This next set of code creates the plot and draws contours on a Lambert
# Conformal map centered on -100 E longitude. The main view is over the
# CONUS with geopotential heights contoured every 60 m and wind speed in
# knots every 20 knots starting at 30 kt.
#
cbar = plt.colorbar(cs,
                    cax=ax2,
                    shrink=0.75,
                    pad=0.01,
                    ticks=[20, 30, 40, 50, 60, 70])

print("Filled contours for PWAT")

#--------------------------------------------------------------------------------------------------------
# 250-hPa wind
#--------------------------------------------------------------------------------------------------------

#Get the data for this variable
u = data['u'].sel(lev=250)
v = data['v'].sel(lev=250)
wind = calc.wind_speed(u, v)

#Specify contour settings
clevs = [40, 50, 60, 70, 80, 90, 100, 110]
cmap = col.ListedColormap([
    '#99E3FB', '#47B6FB', '#0F77F7', '#AC97F5', '#A267F4', '#9126F5',
    '#E118F3', '#E118F3'
])
extend = "max"

#Contour fill this variable
norm = col.BoundaryNorm(clevs, cmap.N)
cs = ax.contourf(lon,
                 lat,
                 wind,
                 clevs,
Example #31
0
    coords = ght.coordinates.split()
    levels = dataset.variables[coords[2]]
    lindex = np.where(levels[:]==float(plev)*100.)[0][0]
    datal = ght[0,lindex,r0:r1,c0:c1]  # get CONUS subregion and desired level
if field == 'vort':
    ## compute relative vorticity from absolute vorticity
    absv = dataset.variables['Absolute_vorticity_isobaric']
    coords = absv.coordinates.split()
    levels = dataset.variables[coords[2]]
    lindex = np.where(levels[:]==float(plev)*100.)[0][0]
    absv = absv[0,lindex,r0:r1,c0:c1]  # this is ABSOLUTE vorticity
    planetary = 2.*(7.2921*10.**-5.)*np.sin(np.radians(glat))  # compute PLANETARY vorticity
    data = (absv - planetary) * 10**5.  # now compute RELATIVE vorticity
if field == 'wind' and int(plev) <= 500:
    ## only compute wind speed for 500- and 300-hPa 'wind' maps
    data = mpcalc.wind_speed(uwind, vwind).m
if plev == '700':
    ## get relative humidity field
    data = dataset.variables['Relative_humidity_isobaric']
    coords = data.coordinates.split()
    levels = dataset.variables[coords[2]]
    lindex = np.where(levels[:]==float(plev)*100.)[0][0]
    data = data[0,lindex,r0:r1,c0:c1]
    ## get temperature field
    datal = dataset.variables['Temperature_isobaric']
    coords = datal.coordinates.split()
    levels = dataset.variables[coords[2]]
    lindex = np.where(levels[:]==float(plev)*100.)[0][0]
    datal = datal[0,lindex,r0:r1,c0:c1] - 273.15  # convert from Kelvin to degrees Celsius
    bcolor = 'black'
if plev == 'surface':
#   Surface dewpoint
#
#   700-hPa dewpoint depression
#
#   12-hr surface pressure falls and 500-hPa height changes

# 500 hPa CVA
dx, dy = mpcalc.lat_lon_grid_deltas(lon, lat)
vort_adv_500 = mpcalc.advection(
    avor_500, [u_500.to('m/s'), v_500.to('m/s')],
    (dx, dy), dim_order='yx') * 1e9
vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4)

####################################
# For the jet axes, we will calculate the windspeed at each level, and plot the highest values
wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5)
wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5)
wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5)

################################
# 700-hPa dewpoint depression will be calculated from Temperature_isobaric and RH
Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.)

######################################
# 12-hr surface pressure falls and 500-hPa height changes
pmsl_change = pmsl - pmsl_00z
hgt_500_change = hgt_500 - hgt_500_00z

######################################
# To plot the jet axes, we will mask the wind fields below the upper 1/3 of windspeed.
Example #33
0
def Miller_Composite_Chart(initial_time=None,
                           fhour=24,
                           day_back=0,
                           model='GRAPES_GFS',
                           map_ratio=19 / 9,
                           zoom_ratio=20,
                           cntr_pnt=[102, 34],
                           Global=False,
                           south_China_sea=True,
                           area='全国',
                           city=False,
                           output_dir=None):

    # micaps data directory
    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl='700'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='300'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='300'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='500'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='500'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='850'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='850'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl='700'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl='500'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='BLI'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='Td2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='PRMSL')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # get filename
    if (initial_time != None):
        filename = utl.model_filename(initial_time, fhour)
        filename2 = utl.model_filename(initial_time, fhour - 12)
    else:
        filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour)
        filename2 = utl.filename_day_back_model(day_back=day_back,
                                                fhour=fhour - 12)

    # retrieve data from micaps server
    rh_700 = get_model_grid(directory=data_dir[0], filename=filename)
    if rh_700 is None:
        return

    u_300 = get_model_grid(directory=data_dir[1], filename=filename)
    if u_300 is None:
        return

    v_300 = get_model_grid(directory=data_dir[2], filename=filename)
    if v_300 is None:
        return

    u_500 = get_model_grid(directory=data_dir[3], filename=filename)
    if u_500 is None:
        return

    v_500 = get_model_grid(directory=data_dir[4], filename=filename)
    if v_500 is None:
        return

    u_850 = get_model_grid(directory=data_dir[5], filename=filename)
    if u_850 is None:
        return

    v_850 = get_model_grid(directory=data_dir[6], filename=filename)
    if v_850 is None:
        return

    t_700 = get_model_grid(directory=data_dir[7], filename=filename)
    if t_700 is None:
        return

    hgt_500 = get_model_grid(directory=data_dir[8], filename=filename)
    if hgt_500 is None:
        return

    hgt_500_2 = get_model_grid(directory=data_dir[8], filename=filename2)
    if hgt_500_2 is None:
        return

    BLI = get_model_grid(directory=data_dir[9], filename=filename)
    if BLI is None:
        return

    Td2m = get_model_grid(directory=data_dir[10], filename=filename)
    if Td2m is None:
        return

    PRMSL = get_model_grid(directory=data_dir[11], filename=filename)
    if PRMSL is None:
        return

    PRMSL2 = get_model_grid(directory=data_dir[11], filename=filename2)
    if PRMSL2 is None:
        return

    lats = np.squeeze(rh_700['lat'].values)
    lons = np.squeeze(rh_700['lon'].values)
    x, y = np.meshgrid(rh_700['lon'], rh_700['lat'])

    tmp_700 = t_700['data'].values.squeeze() * units('degC')
    u_300 = (u_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_300 = (v_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_500 = (u_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_500 = (v_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_850 = (u_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_850 = (v_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    hgt_500 = (hgt_500['data'].values.squeeze()) * 10 / 9.8 * units.meter
    rh_700 = rh_700['data'].values.squeeze()
    lifted_index = BLI['data'].values.squeeze() * units.kelvin
    Td_sfc = Td2m['data'].values.squeeze() * units('degC')
    dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)

    avor_500 = mpcalc.absolute_vorticity(u_500, v_500, dx, dy,
                                         y * units.degree)
    pmsl = PRMSL['data'].values.squeeze() * units('hPa')

    hgt_500_2 = (hgt_500_2['data'].values.squeeze()) * 10 / 9.8 * units.meter
    pmsl2 = PRMSL2['data'].values.squeeze() * units('hPa')

    # 500 hPa CVA
    vort_adv_500 = mpcalc.advection(
        avor_500, [u_500.to('m/s'), v_500.to('m/s')],
        (dx, dy), dim_order='yx') * 1e9
    vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4)

    wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5)
    wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5)
    wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5)

    Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.)

    pmsl_change = pmsl - pmsl2
    hgt_500_change = hgt_500 - hgt_500_2

    mask_500 = ma.masked_less_equal(wspd_500, 0.66 * np.max(wspd_500)).mask
    u_500[mask_500] = np.nan
    v_500[mask_500] = np.nan

    # 300 hPa
    mask_300 = ma.masked_less_equal(wspd_300, 0.66 * np.max(wspd_300)).mask
    u_300[mask_300] = np.nan
    v_300[mask_300] = np.nan

    # 850 hPa
    mask_850 = ma.masked_less_equal(wspd_850, 0.66 * np.max(wspd_850)).mask
    u_850[mask_850] = np.nan
    v_850[mask_850] = np.nan

    # prepare data
    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1

    #+ to solve the problem of labels on all the contours
    idx_x1 = np.where((lons > map_extent[0] - delt_x)
                      & (lons < map_extent[1] + delt_x))
    idx_y1 = np.where((lats > map_extent[2] - delt_y)
                      & (lats < map_extent[3] + delt_y))

    fcst_info = {
        'lon': lons,
        'lat': lats,
        'fhour': fhour,
        'model': model,
        'init_time': t_700.coords['forecast_reference_time'].values
    }

    synthetical_graphics.draw_Miller_Composite_Chart(
        fcst_info=fcst_info,
        u_300=u_300,
        v_300=v_300,
        u_500=u_500,
        v_500=v_500,
        u_850=u_850,
        v_850=v_850,
        pmsl_change=pmsl_change,
        hgt_500_change=hgt_500_change,
        Td_dep_700=Td_dep_700,
        Td_sfc=Td_sfc,
        pmsl=pmsl,
        lifted_index=lifted_index,
        vort_adv_500_smooth=vort_adv_500_smooth,
        map_extent=map_extent,
        add_china=True,
        city=False,
        south_China_sea=True,
        output_dir=None,
        Global=False)