Beispiel #1
0
 def wind_composite(self, uv_file):
     ini_time = self.time['ini'].format('YYYYMMDDHH')
     shift_time = str(self.time['shift']).zfill(2)
     composite_name = 'ws_{}_{}.nc'.format(ini_time, shift_time)
     composite_file = self.data_path + self.base_path + composite_name
     try:
         os.makedirs(self.data_path + self.base_path)
     except OSError:
         pass
     finally:
         if not os.path.exists(composite_file):
             try:
                 u_cube = iris.load(uv_file['u'][0])[0][:, :, :, :]
                 v_cube = iris.load(uv_file['v'][0])[0][:, :, :, :]
             except IndexError:
                 Logger(_log, level='debug').logger.info(
                     'EC Ens Wind Composite Failed: no uv files in {}'.
                     format(self.data_path + self.base_path))
             else:
                 Logger(_log, level='debug').logger.info(
                     'EC Ens wind composite: {}'.format(composite_file))
                 ws = np.zeros(shape=(u_cube.shape[0], u_cube.shape[1] + 1,
                                      u_cube.shape[2], u_cube.shape[3]))
                 wd = np.zeros(shape=(u_cube.shape[0], u_cube.shape[1] + 1,
                                      u_cube.shape[2], u_cube.shape[3]))
                 for member in list(range(u_cube.shape[1] + 1))[:]:
                     if member != 0:
                         constraint = iris.Constraint(
                             ensemble_member=member)
                         u = u_cube.extract(constraint).data
                         v = v_cube.extract(constraint).data
                         ws[:, member, :, :] = (u**2 + v**2)**0.5
                         wd[:, member, :, :] = mpcalc.wind_direction(
                             u, v).magnitude
                     else:
                         u = iris.load(
                             uv_file['u_control'][0])[0][:, :, :].data
                         v = iris.load(
                             uv_file['v_control'][0])[0][:, :, :].data
                         ws[:, 0, :, :] = (u**2 + v**2)**0.5
                         wd[:, 0, :, :] = mpcalc.wind_direction(u,
                                                                v).magnitude
                 ws_cube = iris.cube.Cube(ws, 'wind_speed', units='m s**-1')
                 ws_cube.add_dim_coord(u_cube.coords('time')[0], 0)
                 ws_cube.add_dim_coord(u_cube.coords('latitude')[0], 2)
                 ws_cube.add_dim_coord(u_cube.coords('longitude')[0], 3)
                 number = iris.coords.DimCoord(np.arange(u_cube.shape[1] +
                                                         1,
                                                         dtype=np.int32),
                                               standard_name=None,
                                               long_name='number',
                                               var_name='number')
                 ws_cube.add_dim_coord(number, 1)
                 ws = xr.DataArray.from_iris(ws_cube)
                 wd = ws.copy(data=wd)
                 wind = xr.Dataset({'ws': ws, 'wd': wd})
                 wind.to_netcdf(composite_file)
Beispiel #2
0
def wind_direction(u, v):
    # depending on input dataset, the lev dimension is transposed or not. I want all levels above 990 hPa,
    # not the 1015-level below 990 hPa.
    if ls.u.sel(lev=slice(990, None)).lev.max() == 990:
        wind_dir = xr.full_like(ls.u.sel(lev=slice(990, None)), np.nan)
        wind_dir[:, :] = mpcalc.wind_direction(ls.u.sel(lev=slice(990, None)),
                                               ls.v.sel(lev=slice(990, None)))
    else:
        wind_dir = xr.full_like(ls.u.sel(lev=slice(None, 990)), np.nan)
        wind_dir[:, :] = mpcalc.wind_direction(ls.u.sel(lev=slice(None, 990)),
                                               ls.v.sel(lev=slice(None, 990)))
    wind_dir.attrs['long_name'] = 'wind direction'
    wind_dir.attrs['units'] = 'degrees'
    return xr.merge([ls, xr.Dataset({'wind_dir': wind_dir})])
Beispiel #3
0
    def _process_feature(self):
        metero_var = config['data']['metero_var']
        metero_use = config['experiments']['metero_use']
        metero_idx = [metero_var.index(var) for var in metero_use]
        self.feature = self.feature[:, :, metero_idx]

        if config['experiments']['use_wind_coordinates']:
            u = self.feature[:, :,
                             -2] * units.meter / units.second  # u_component_of_wind+950
            v = self.feature[:, :,
                             -1] * units.meter / units.second  # v_component_of_wind+950
            speed = 3.6 * mpcalc.wind_speed(u, v)._magnitude
            direc = mpcalc.wind_direction(u, v)._magnitude
        else:
            print("Not using wind coordinates, but speed/direction directly")
            speed = self.feature[:, :, -2]
            direc = self.feature[:, :, -1]

        h_arr = []
        w_arr = []
        for i in self.time_arrow:
            h_arr.append(i.hour)
            w_arr.append(i.isoweekday())
        h_arr = np.stack(h_arr, axis=-1)
        w_arr = np.stack(w_arr, axis=-1)
        h_arr = np.repeat(h_arr[:, None], self.graph.node_num, axis=1)
        w_arr = np.repeat(w_arr[:, None], self.graph.node_num, axis=1)

        self.feature = np.concatenate([
            self.feature, h_arr[:, :, None], w_arr[:, :, None],
            speed[:, :, None], direc[:, :, None]
        ],
                                      axis=-1)
Beispiel #4
0
    def _process_feature(self):
        metero_var = config['data']['metero_var']
        metero_use = config['experiments']['metero_use']
        metero_idx = [metero_var.index(var) for var in metero_use]
        self.feature = self.feature[:, :, metero_idx]

        u = self.feature[:, :, -2] * units.meter / units.second
        v = self.feature[:, :, -1] * units.meter / units.second
        speed = 3.6 * mpcalc.wind_speed(u, v)._magnitude
        direc = mpcalc.wind_direction(u, v)._magnitude

        h_arr = []
        w_arr = []
        for i in self.time_arrow:
            h_arr.append(i.hour)
            w_arr.append(i.isoweekday())
        h_arr = np.stack(h_arr, axis=-1)
        w_arr = np.stack(w_arr, axis=-1)
        h_arr = np.repeat(h_arr[:, None], self.graph.node_num, axis=1)
        w_arr = np.repeat(w_arr[:, None], self.graph.node_num, axis=1)

        self.feature = np.concatenate([
            self.feature, h_arr[:, :, None], w_arr[:, :, None],
            speed[:, :, None], direc[:, :, None]
        ],
                                      axis=-1)
Beispiel #5
0
    def _write_profile(self, csv_path):

        profiles = self.atmo_profiles  # dictionary
        pres = profiles.get('pres').get('data')
        u = profiles.get('u').get('data')
        v = profiles.get('v').get('data')
        temp = profiles.get('temp').get('data').to('degC')
        sphum = profiles.get('sphum').get('data')

        dewpt = np.array(
            mpcalc.dewpoint_from_specific_humidity(sphum, temp,
                                                   pres).to('degC'))
        wspd = np.array(mpcalc.wind_speed(u, v))
        wdir = np.array(mpcalc.wind_direction(u, v))

        pres = np.array(pres)
        temp = np.array(temp)

        profile = pd.DataFrame({
            'LEVEL': pres,
            'TEMP': temp,
            'DWPT': dewpt,
            'WDIR': wdir,
            'WSPD': wspd,
        })

        profile.to_csv(csv_path, index=False, float_format="%10.2f")
Beispiel #6
0
def test_oceanographic_direction(array_type):
    """Test oceanographic direction (to) convention."""
    mask = [False, True, False]
    u = array_type([5., 5., 0.], 'm/s', mask=mask)
    v = array_type([-5., 0., 5.], 'm/s', mask=mask)

    direc = wind_direction(u, v, convention='to')
    true_dir = array_type([135., 90., 360.], 'deg', mask=mask)
    assert_array_almost_equal(direc, true_dir, 4)
Beispiel #7
0
def test_direction_with_north_and_calm():
    """Test how wind direction handles northerly and calm winds."""
    u = np.array([0., -0., 0.]) * units('m/s')
    v = np.array([0., 0., -5.]) * units('m/s')

    direc = wind_direction(u, v)

    true_dir = np.array([0., 0., 360.]) * units.deg

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #8
0
def test_direction_without_units():
    """Test calculating wind direction without units."""
    u = np.array([0., -5., -4., -3.])
    v = np.array([0., 5., 0., -3.])

    direc = wind_direction(u, v)

    true_dir = np.array([0., 135., 90., 45.]) * units.deg

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #9
0
def test_direction():
    """Test calculating wind direction."""
    u = np.array([4., 2., 0., 0.]) * units('m/s')
    v = np.array([0., 2., 4., 0.]) * units('m/s')

    direc = wind_direction(u, v)

    true_dir = np.array([270., 225., 180., 0.]) * units.deg

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #10
0
def test_direction_with_north_and_calm():
    """Test how wind direction handles northerly and calm winds."""
    u = np.array([0., -0., 0.]) * units('m/s')
    v = np.array([0., 0., -5.]) * units('m/s')

    direc = wind_direction(u, v)

    true_dir = np.array([0., 0., 360.]) * units.deg

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #11
0
def test_direction_without_units():
    """Test calculating wind direction without units."""
    u = np.array([0., -5., -4., -3.])
    v = np.array([0., 5., 0., -3.])

    direc = wind_direction(u, v)

    true_dir = np.array([0., 135., 90., 45.]) * units.deg

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #12
0
def test_direction():
    """Test calculating wind direction."""
    u = np.array([4., 2., 0., 0.]) * units('m/s')
    v = np.array([0., 2., 4., 0.]) * units('m/s')

    direc = wind_direction(u, v)

    true_dir = np.array([270., 225., 180., 0.]) * units.deg

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #13
0
def sta_SkewT(model='ECMWF',points={'lon':[116.3833], 'lat':[39.9]},
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,250,200,150,100],
    fhour=3,output_dir=None):

    try:
        data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl='')]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # # 度数据
    initTime = get_latest_initTime(data_dir[0][0:-1]+"850")
    filename = initTime+'.'+str(fhour).zfill(3)
    TMP_4D=get_model_3D_grid(directory=data_dir[0][0:-1],filename=filename,levels=levels, allExists=False)
    TMP_2D=TMP_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    u_4D=get_model_3D_grid(directory=data_dir[1][0:-1],filename=filename,levels=levels, allExists=False)
    u_2D=u_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    v_4D=get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False)
    v_2D=v_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    HGT_4D=get_model_3D_grid(directory=data_dir[3][0:-1],filename=filename,levels=levels, allExists=False)
    HGT_2D=HGT_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    HGT_2D.attrs['model']=model
    HGT_2D.attrs['points']=points

    RH_4D=get_model_3D_grid(directory=data_dir[4][0:-1],filename=filename,levels=levels, allExists=False)
    RH_2D=RH_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    wind_dir_2D=mpcalc.wind_direction(u_2D['data'].values* units.meter / units.second,
        v_2D['data'].values* units.meter / units.second)
    wsp10m_2D=(u_2D['data']**2+v_2D['data']**2)**0.5
    Td2m=mpcalc.dewpoint_rh(TMP_2D['data'].values*units('degC'),RH_2D['data'].values/100.)

    p = np.squeeze(levels) * units.hPa
    T = np.squeeze(TMP_2D['data'].values) * units.degC
    Td = np.squeeze(np.array(Td2m)) * units.degC
    wind_speed = np.squeeze(wsp10m_2D.values) * units.meter
    wind_dir = np.squeeze(np.array(wind_dir_2D)) * units.degrees
    u=np.squeeze(u_2D['data'].values)* units.meter
    v=np.squeeze(v_2D['data'].values)* units.meter

    fcst_info= xr.DataArray(np.array(u_2D['data'].values),
                        coords=u_2D['data'].coords,
                        dims=u_2D['data'].dims,
                        attrs={'points': points,
                                'model': model})

    sta_graphics.draw_sta_skewT(
        p=p,T=T,Td=Td,wind_speed=wind_speed,wind_dir=wind_dir,u=u,v=v,
        fcst_info=fcst_info)
Beispiel #14
0
def test_direction_with_north_and_calm(array_type):
    """Test how wind direction handles northerly and calm winds."""
    mask = [False, False, False, True]
    u = array_type([0., -0., 0., 1.], 'm/s', mask=mask)
    v = array_type([0., 0., -5., 1.], 'm/s', mask=mask)

    direc = wind_direction(u, v)

    true_dir = array_type([0., 0., 360., 225.], 'deg', mask=mask)

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #15
0
def test_direction(array_type):
    """Test calculating wind direction."""
    # The last two (u, v) pairs and their masks test masking calm and negative directions
    mask = [False, True, False, True, True]
    u = array_type([4., 2., 0., 0., 1.], 'm/s', mask=mask)
    v = array_type([0., 2., 4., 0., -1], 'm/s', mask=mask)

    direc = wind_direction(u, v)

    true_dir = array_type([270., 225., 180., 0., 315.], 'degree', mask=mask)

    assert_array_almost_equal(true_dir, direc, 4)
Beispiel #16
0
def grad_mask(Zint, REFmasked, REF, storm_relative_dir, ZDRmasked1,
              ZDRrmasked1, CC, CCall):
    #Inputs,
    #Zint: 1km AFL grid level
    #REFmasked: REF masked below 20 dBz
    #REF: 1km Reflectivity grid
    #storm_relative_dir: Vector direction along the reflectivity gradient in the forward flank
    #ZDRmasked1: 1km Differential Reflectiity (Zdr) grid, masked below 20 dBz reflectivity
    #ZDRrmasked1: Full volume Zdr gridded, masked below 20 dBz reflectivity
    #CC: 1km Correlation Coefficient (CC) grid
    #CCall: Full volume CC gridded
    print('Gradient Analysis and Masking')
    #Determining gradient direction and masking some Zhh and Zdr grid fields

    smoothed_ref1 = ndi.gaussian_filter(REFmasked, sigma=2, order=0)
    REFgradient = np.asarray(np.gradient(smoothed_ref1))
    REFgradient[0, :, :] = ma.masked_where(REF < 20, REFgradient[0, :, :])
    REFgradient[1, :, :] = ma.masked_where(REF < 20, REFgradient[1, :, :])
    grad_dir1 = wind_direction(REFgradient[1, :, :] * units('m/s'),
                               REFgradient[0, :, :] * units('m/s'))
    grad_mag = wind_speed(REFgradient[1, :, :] * units('m/s'),
                          REFgradient[0, :, :] * units('m/s'))
    grad_dir = ma.masked_where(REF < 20, grad_dir1)

    #Get difference between the gradient direction and the FFD gradient direction calculated earlier
    srdir = storm_relative_dir
    srirad = np.copy(srdir) * units('degrees').to('radian')
    grad_dir = grad_dir * units('degrees').to('radian')
    grad_ffd = np.abs(
        np.arctan2(np.sin(grad_dir - srirad), np.cos(grad_dir - srirad)))
    grad_ffd = np.asarray(grad_ffd) * units('radian')
    grad_ex = np.copy(grad_ffd)
    grad_ffd = grad_ffd.to('degrees')

    #Mask out areas where the difference between the two is too large and the ZDR is likely not in the forward flank
    ZDRmasked2 = ma.masked_where(grad_ffd > 120 * units('degrees'), ZDRmasked1)
    ZDRmasked = ma.masked_where(CC < .60, ZDRmasked2)
    ZDRallmasked = ma.masked_where(CCall < .70, ZDRrmasked1)
    ZDRallmasked = ma.filled(ZDRallmasked, fill_value=-2)
    ZDRrmasked = ZDRallmasked[Zint, :, :]

    #Add a fill value for the ZDR mask so that contours will be closed
    ZDRmasked = ma.filled(ZDRmasked, fill_value=-2)
    ZDRrmasked = ma.filled(ZDRrmasked, fill_value=-2)

    #Returning variables,
    #grad_mag: Array of wind velocity magnitude along reflectivity gradient
    #grad_ffd: Angle (degrees) used to indicate angular region of supercell containing the forward flank
    #ZDRmasked: Masked array ZDRmasked1 in regions outside the forward flank (grad_ffd) and below 0.6 CC
    #ZDRallmasked: Masked volume array (ZDRrmasked1) below 0.7 CC and filled with -2.0 values
    #ZDRrmasked: ZDRallmasked slice at 1km above freezing level
    return grad_mag, grad_ffd, ZDRmasked, ZDRallmasked, ZDRrmasked
Beispiel #17
0
def test_speed_direction_roundtrip():
    """Test round-tripping between speed/direction and components."""
    # Test each quadrant of the whole circle
    wspd = np.array([15., 5., 2., 10.]) * units.meters / units.seconds
    wdir = np.array([160., 30., 225., 350.]) * units.degrees

    u, v = wind_components(wspd, wdir)

    wdir_out = wind_direction(u, v)
    wspd_out = wind_speed(u, v)

    assert_array_almost_equal(wspd, wspd_out, 4)
    assert_array_almost_equal(wdir, wdir_out, 4)
Beispiel #18
0
def test_speed_direction_roundtrip():
    """Test round-tripping between speed/direction and components."""
    # Test each quadrant of the whole circle
    wspd = np.array([15., 5., 2., 10.]) * units.meters / units.seconds
    wdir = np.array([160., 30., 225., 350.]) * units.degrees

    u, v = wind_components(wspd, wdir)

    wdir_out = wind_direction(u, v)
    wspd_out = wind_speed(u, v)

    assert_array_almost_equal(wspd, wspd_out, 4)
    assert_array_almost_equal(wdir, wdir_out, 4)
Beispiel #19
0
def _write_to_sounding(data_cube, idx_locs, ids, date, fmt=None):
    """
    Writes data to sounding files. Added BUFKIT-readable output capabilities. 
    """
    if fmt is None:
        fmt = 'sharppy'

    knt = 0
    for idx in idx_locs:
        t_out = data_cube[0, :, idx[0], idx[1]] - 273.15
        td_out = data_cube[1, :, idx[0], idx[1]]
        u = data_cube[2, :, idx[0], idx[1]] * units('m/s')
        v = data_cube[3, :, idx[0], idx[1]] * units('m/s')
        wdir_out = mpcalc.wind_direction(u, v).magnitude
        wspd_out = mpcalc.wind_speed(u, v).magnitude
        hgt_out = data_cube[4, :, idx[0], idx[1]]

        out_time = "%s%s%s/%s00" % (date[2:4], date[4:6], date[6:8],
                                    date[8:10])
        if fmt == 'sharppy':
            out_file = "%s/%s.%s" % (SOUNDING_DIR, date, ids[knt])
            f = open(out_file, 'w')

            f.write("%TITLE%\n")
            f.write(" %s   %s" % (ids[knt], out_time))
            f.write("\n\n")
            f.write('   LEVEL     HGHT     TEMP     DWPT     WDIR     WSPD\n')
            f.write('------------------------------------------------------\n')
            f.write('%RAW%\n')

            # This is a weird one. At some point in the past, the GRIB files
            # were ordered differently. Need to check for monotonic increasing
            # or decreasing heights and adjust pressures accordingly
            if strictly_increasing(list(hgt_out)):
                pres_incr = 1
                start_, end_, inc_ = 0, t_out.shape[0], 1
            else:
                pres_incr = -1
                start_, end_, inc_ = t_out.shape[0] - 1, -1, -1

            print(levs)
            print(hgt_out)
            print(start_, end_, inc_, pres_incr)
            for row in range(start_, end_, inc_):
                if hgt_out[row] > 0:
                    out_line = "%s,%s,%s,%s,%s,%s" % (
                        levs[::pres_incr][row], hgt_out[row], t_out[row],
                        td_out[row], wdir_out[row], wspd_out[row])
                    f.write(out_line + '\n')
            f.write('%END%')
        knt += 1
Beispiel #20
0
def test_direction_masked():
    """Test calculating wind direction from masked wind components."""
    mask = np.array([True, False, True, False])
    u = np.array([4., 2., 0., 0.])
    v = np.array([0., 2., 4., 0.])

    u_masked = units.Quantity(np.ma.array(u, mask=mask), units('m/s'))
    v_masked = units.Quantity(np.ma.array(v, mask=mask), units('m/s'))

    direc = wind_direction(u_masked, v_masked)

    true_dir = np.array([270., 225., 180., 0.])
    true_dir_masked = units.Quantity(np.ma.array(true_dir, mask=mask), units.deg)

    assert_array_almost_equal(true_dir_masked, direc, 4)
 def wind_direction(self, level):
     """
     Receives the integer value of the desired vertical pressure level
     to extract from data a tuple containing values of wind direction.
     In return you will have a pint.Quantity array for the desired time and level.
     """
     # Obtaining the index for the given pressure level
     index_level = np.where(np.array(self.data["isobaric"]) == level *
                            100)[0][0]
     # Extracting wind components data
     uwnd = self.data["u-component_of_wind_isobaric"][
         self.time_step][index_level]
     vwnd = self.data["v-component_of_wind_isobaric"][
         self.time_step][index_level]
     # Calculate wind direction using metpy function
     wind_dir = mpcalc.wind_direction(uwnd, vwnd)
     return np.array(wind_dir)
Beispiel #22
0
 def wind_composite(self, uv_files):
     ini_time = self.time['ini'].format('YYYYMMDDHH')
     shift_time = str(self.time['shift']).zfill(2)
     composite_name = 'ws_{}_{}.nc'.format(ini_time, shift_time)
     composite_file = self.data_path + self.base_path + composite_name
     # os.system('rm {}'.format(composite_file))
     dataset = []
     if not os.path.exists(composite_file):
         if len(uv_files) == 21:
             for uv_file in uv_files:
                 try:
                     uv = xr.open_dataset(uv_file,
                                          engine='cfgrib',
                                          backend_kwargs={
                                              'filter_by_keys': {
                                                  'typeOfLevel':
                                                  'heightAboveGround',
                                                  'level': 10
                                              }
                                          })
                 except Exception as e:
                     Logger(_log, level='debug').logger.warning(
                         'GEFS fcst Wind composite failed: uv files broken {} -> {}'
                         .format(uv_file, e))
                     return
                 else:
                     Logger(_log, level='debug').logger.info(
                         'GEFS fcst wind composite: {}'.format(uv_file))
                     ws = (uv["u10"]**2 + uv["v10"]**2)**0.5
                     wd = ws.copy(data=mpcalc.wind_direction(
                         uv["u10"], uv["v10"]).magnitude)
                     wind = xr.Dataset({'ws': ws, 'wd': wd})
                     wind = wind.expand_dims(['valid_time', 'number']).drop(
                         ['time', 'step']).rename({'valid_time': 'time'})
                     dataset.append(wind)
             wind_ens = xr.auto_combine(dataset)
             wind_ens.to_netcdf(composite_file)
         else:
             Logger(_log, level='debug').logger.info(
                 'GEFS fcst wind composite failes: no enough grb files in {}'
                 .format(self.gefs_path + self.base_path))
     idx_files = glob.glob(self.data_path + self.base_path + '*.idx')
     for file in idx_files:
         os.remove(file)
Beispiel #23
0
    def _gen_edges(self):
        coords = []
        lonlat = {}
        for i in self.nodes:
            coords.append([self.nodes[i]['lon'], self.nodes[i]['lat']])
        #计算欧几里得距离
        dist = distance.cdist(coords, coords, 'euclidean')
        adj = np.zeros((self.node_num, self.node_num), dtype=np.uint8)
        #得到小于距离阈值的adj
        adj[dist <= self.dist_thres] = 1
        #print(adj)
        assert adj.shape == dist.shape
        dist = dist * adj
        edge_index, dist = dense_to_sparse(torch.tensor(dist))
        edge_index, dist = edge_index.numpy(), dist.numpy()
        direc_arr = []
        dist_kilometer = []
        for i in range(edge_index.shape[1]):
            src, dest = edge_index[0, i], edge_index[1, i]
            #src和dest分别是两个顶点的
            src_lat, src_lon = self.nodes[src]['lat'], self.nodes[src]['lon']
            dest_lat, dest_lon = self.nodes[dest]['lat'], self.nodes[dest][
                'lon']
            src_location = (src_lat, src_lon)
            dest_location = (dest_lat, dest_lon)
            dist_km = geodesic(src_location, dest_location).kilometers
            #两个点的经纬度距离
            v, u = src_lat - dest_lat, src_lon - dest_lon
            #经纬度的风向,u,v风
            u = u * units.meter / units.second
            v = v * units.meter / units.second

            direc = mpcalc.wind_direction(u, v)._magnitude
            #风速情况列表
            direc_arr.append(direc)
            #地理距离列表
            dist_kilometer.append(dist_km)

        direc_arr = np.stack(direc_arr)
        dist_arr = np.stack(dist_kilometer)
        #地理距离和风传距离
        attr = np.stack([dist_arr, direc_arr], axis=-1)
        return edge_index, attr
def readWeatherData(filepath):
    fullshape = (949, 739)
    use_keys = [
        'x_wind_gust_10m', 'y_wind_gust_10m'
    ]  #"U-momentum of gusts in 10m height"m/s, "V-momentum of gusts in 10m height"m/s
    uparam, vparam = use_keys
    dataset = xr.open_dataset(filepath)
    dataset = dataset.metpy.parse_cf()  #[uparam, vparam])
    dataset[uparam].metpy.convert_units('knots')
    dataset[vparam].metpy.convert_units('knots')
    data_crs = dataset[uparam].metpy.cartopy_crs
    wind_speed = mpcalc.wind_speed(dataset[uparam], dataset[uparam])
    wind_direction = mpcalc.wind_direction(dataset[uparam], dataset[uparam])
    dataset['wind_speed'] = xr.DataArray(wind_speed.magnitude,
                                         coords=dataset[uparam].coords,
                                         dims=dataset[uparam].dims)
    dataset['wind_speed'].attrs['units'] = wind_speed.units
    dataset['wind_direction'] = xr.DataArray(wind_direction.magnitude,
                                             coords=dataset[uparam].coords,
                                             dims=dataset[uparam].dims)
    dataset['wind_direction'].attrs['units'] = wind_direction.units
    return dataset
Beispiel #25
0
 def wind_composite(self, uv_file):
     ini_time = self.time['ini'].format('YYYYMMDDHH')
     shift_time = str(self.time['shift']).zfill(2)
     composite_name = 'ws_{}_{}.nc'.format(ini_time, shift_time)
     composite_file = os.path.dirname(uv_file) + '/' + composite_name
     # os.system('rm {}'.format(composite_file))
     if not os.path.exists(composite_file):
         try:
             uv = xr.open_dataset(uv_file)
         except Exception as e:
             Logger(_log, level='debug').logger.warning(
                 'GFS fcst Wind composite failed: {} -> {}'.format(
                     uv_file, e))
             return
         else:
             # print('GFS fcst wind composite: {}'.format(uv_file))
             uv = uv.sel(time=self.time['ini'].shift(
                 hours=self.time['shift']).datetime).expand_dims('time')
             ws = (uv["u10"]**2 + uv["v10"]**2)**0.5
             wd = ws.copy(
                 data=mpcalc.wind_direction(uv["u10"], uv["v10"]).magnitude)
             wind = xr.Dataset({'ws': ws, 'wd': wd}).squeeze('record')
             wind.to_netcdf(composite_file)
Beispiel #26
0
def test_scalar_direction():
    """Test wind direction with scalars."""
    d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
    assert_almost_equal(d, 216.870 * units.deg, 3)
Beispiel #27
0
def test_direction_dimensions():
    """Verify wind_direction returns degrees."""
    d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
    assert str(d.units) == 'degree'
Beispiel #28
0
def test_direction_dimensions():
    """Verify wind_direction returns degrees."""
    d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
    assert str(d.units) == 'degree'
Beispiel #29
0
lats = np.linspace(35, 50, 101)
lons = np.linspace(260, 290, 101)
lon, lat = np.meshgrid(lons, lats)

# Calculate Geostrophic Wind from Analytic Heights
f = mpcalc.coriolis_parameter(lat * units('degrees'))
dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)
ugeo, vgeo = mpcalc.geostrophic_wind(Z * units.meter,
                                     f,
                                     dx,
                                     dy,
                                     dim_order='yx')

# Get the wind direction for each point
wdir = mpcalc.wind_direction(ugeo, vgeo)

# Compute the Gradient Wind via an approximation
dydx = mpcalc.first_derivative(Z, delta=dx, axis=1)
d2ydx2 = mpcalc.first_derivative(dydx, delta=dx, axis=1)
R = ((1 + dydx.m**2)**(3. / 2.)) / d2ydx2.m

geo_mag = mpcalc.wind_speed(ugeo, vgeo)
grad_mag = geo_mag.m - (geo_mag.m**2) / (f.magnitude * R)

ugrad, vgrad = mpcalc.wind_components(grad_mag * units('m/s'), wdir)

# Calculate Ageostrophic wind
uageo = ugrad - ugeo
vageo = vgrad - vgeo
Beispiel #30
0
def do(ts):
    """Process this date timestamp"""
    asos = get_dbconn("asos", user="******")
    iemaccess = get_dbconn("iem")
    icursor = iemaccess.cursor()
    table = "summary_%s" % (ts.year,)
    # Get what we currently know, just grab everything
    current = read_sql(
        """
        SELECT * from """
        + table
        + """ WHERE day = %s
    """,
        iemaccess,
        params=(ts.strftime("%Y-%m-%d"),),
        index_col="iemid",
    )
    df = read_sql(
        """
    select station, network, iemid, drct, sknt, gust,
    valid at time zone tzname as localvalid, valid,
    tmpf, dwpf, relh, feel,
    peak_wind_gust, peak_wind_drct, peak_wind_time,
    peak_wind_time at time zone tzname as local_peak_wind_time from
    alldata d JOIN stations t on (t.id = d.station)
    where (network ~* 'ASOS' or network = 'AWOS')
    and valid between %s and %s and t.tzname is not null
    and date(valid at time zone tzname) = %s
    ORDER by valid ASC
    """,
        asos,
        params=(
            ts - datetime.timedelta(days=2),
            ts + datetime.timedelta(days=2),
            ts.strftime("%Y-%m-%d"),
        ),
        index_col=None,
    )
    if df.empty:
        print("compute_daily no ASOS database entries for %s" % (ts,))
        return
    # derive some parameters
    df["u"], df["v"] = mcalc.wind_components(
        df["sknt"].values * munits.knots, df["drct"].values * munits.deg
    )
    df["localvalid_lag"] = df.groupby("iemid")["localvalid"].shift(1)
    df["timedelta"] = df["localvalid"] - df["localvalid_lag"]
    ndf = df[pd.isna(df["timedelta"])]
    df.loc[ndf.index.values, "timedelta"] = pd.to_timedelta(
        ndf["localvalid"].dt.hour * 3600.0
        + ndf["localvalid"].dt.minute * 60.0,
        unit="s",
    )
    df["timedelta"] = df["timedelta"] / np.timedelta64(1, "s")

    for iemid, gdf in df.groupby("iemid"):
        if len(gdf.index) < 6:
            # print(" Quorum not meet for %s" % (gdf.iloc[0]['station'], ))
            continue
        if iemid not in current.index:
            print(
                ("compute_daily Adding %s for %s %s %s")
                % (table, gdf.iloc[0]["station"], gdf.iloc[0]["network"], ts)
            )
            icursor.execute(
                """
                INSERT into """
                + table
                + """
                (iemid, day) values (%s, %s)
            """,
                (iemid, ts),
            )
            current.loc[iemid] = None
        newdata = {}
        currentrow = current.loc[iemid]
        compute_wind_gusts(gdf, currentrow, newdata)
        # take the nearest value
        ldf = gdf.copy().fillna(method="bfill").fillna(method="ffill")
        totsecs = ldf["timedelta"].sum()
        is_new(
            "avg_rh",
            clean((ldf["relh"] * ldf["timedelta"]).sum() / totsecs, 1, 100),
            currentrow,
            newdata,
        )
        is_new("min_rh", clean(ldf["relh"].min(), 1, 100), currentrow, newdata)
        is_new("max_rh", clean(ldf["relh"].max(), 1, 100), currentrow, newdata)

        uavg = (ldf["u"] * ldf["timedelta"]).sum() / totsecs
        vavg = (ldf["v"] * ldf["timedelta"]).sum() / totsecs
        is_new(
            "vector_avg_drct",
            clean(
                mcalc.wind_direction(uavg * munits.knots, vavg * munits.knots),
                0,
                360,
            ),
            currentrow,
            newdata,
        )
        is_new(
            "avg_sknt",
            clean((ldf["sknt"] * ldf["timedelta"]).sum() / totsecs, 0, 150),
            currentrow,
            newdata,
        )
        is_new(
            "max_feel",
            clean(ldf["feel"].max(), -150, 200),
            currentrow,
            newdata,
        )
        is_new(
            "avg_feel",
            clean((ldf["feel"] * ldf["timedelta"]).sum() / totsecs, -150, 200),
            currentrow,
            newdata,
        )
        is_new(
            "min_feel",
            clean(ldf["feel"].min(), -150, 200),
            currentrow,
            newdata,
        )
        if not newdata:
            continue
        cols = []
        args = []
        # print(gdf.iloc[0]['station'])
        for key, val in newdata.items():
            # print("  %s %s -> %s" % (key, currentrow[key], val))
            cols.append("%s = %%s" % (key,))
            args.append(val)
        args.extend([iemid, ts])

        sql = ", ".join(cols)

        icursor.execute(
            """
        UPDATE """
            + table
            + """
        SET """
            + sql
            + """
        WHERE
        iemid = %s and day = %s
        """,
            args,
        )
        if icursor.rowcount == 0:
            print(
                "compute_daily update of %s[%s] was 0"
                % (gdf.iloc[0]["station"], gdf.iloc[0]["network"])
            )

    icursor.close()
    iemaccess.commit()
    iemaccess.close()
Beispiel #31
0
def test_oceanographic_direction():
    """Test oceanographic direction (to) convention."""
    d = wind_direction(5 * units('m/s'), -5 * units('m/s'), convention='to')
    true_dir = 135 * units.deg
    assert_almost_equal(d, true_dir, 4)
Beispiel #32
0
def point_fcst_uv_tmp_according_to_3D_field_vs_sounding(
        output_dir=None,
        obs_ID='55664',
        initTime=None,
        fhour=6,
        day_back=0,
        extra_info={
            'output_head_name':
            ' ',
            'output_tail_name':
            ' ',
            'point_name':
            ' ',
            'drw_thr':
            True,
            'levels_for_interp': [
                1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 250,
                200, 150
            ]
        },
        **kwargs):

    model = 'GRAPES_GFS'
    try:
        dir_rqd = [
            utl.Cassandra_dir(data_type='high',
                              data_source='OBS',
                              var_name='TLOGP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl='')
        ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')

    if (initTime == None):
        initTime = get_latest_initTime(dir_rqd[1][0:-1] + '/850')

    filename_obs = (datetime.strptime('20' + initTime, '%Y%m%d%H') +
                    timedelta(hours=fhour)).strftime('%Y%m%d%H%M%S') + '.000'
    obs_pfl_all = MICAPS_IO.get_tlogp(dir_rqd[0][0:-1],
                                      filename=filename_obs,
                                      cache=False)
    if (obs_pfl_all is None):
        return
    obs_pfl_raw = obs_pfl_all[obs_pfl_all.ID == obs_ID]
    obs_pfl = obs_pfl_raw.replace(9999.0, np.nan).dropna(how='any')
    obs_pfl = obs_pfl[obs_pfl.p >= 200.]

    directory = dir_rqd[1][0:-1]
    filename = initTime + '.' + str(fhour).zfill(3)
    HGT_4D = get_model_3D_grid(directory=directory,
                               filename=filename,
                               levels=extra_info['levels_for_interp'],
                               allExists=False)
    directory = dir_rqd[2][0:-1]
    U_4D = get_model_3D_grid(directory=directory,
                             filename=filename,
                             levels=extra_info['levels_for_interp'],
                             allExists=False)
    directory = dir_rqd[3][0:-1]
    V_4D = get_model_3D_grid(directory=directory,
                             filename=filename,
                             levels=extra_info['levels_for_interp'],
                             allExists=False)

    directory = dir_rqd[4][0:-1]
    TMP_4D = get_model_3D_grid(directory=directory,
                               filename=filename,
                               levels=extra_info['levels_for_interp'],
                               allExists=False)

    points = {
        'lon': obs_pfl.lon.to_numpy(),
        'lat': obs_pfl.lat.to_numpy(),
        'altitude': obs_pfl.h.to_numpy() * 10
    }

    directory = dir_rqd[4][0:-1]

    delt_xy = HGT_4D['lon'].values[1] - HGT_4D['lon'].values[0]
    mask = (HGT_4D['lon'] < (points['lon'][0] + 2 * delt_xy)) & (
        HGT_4D['lon'] > (points['lon'][0] - 2 * delt_xy)
    ) & (HGT_4D['lat'] <
         (points['lat'][0] + 2 * delt_xy)) & (HGT_4D['lat'] >
                                              (points['lat'][0] - 2 * delt_xy))

    HGT_4D_sm = HGT_4D['data'].where(mask, drop=True)
    U_4D_sm = U_4D['data'].where(mask, drop=True)
    V_4D_sm = V_4D['data'].where(mask, drop=True)
    TMP_4D_sm = TMP_4D['data'].where(mask, drop=True)

    lon_md = np.squeeze(HGT_4D_sm['lon'].values)
    lat_md = np.squeeze(HGT_4D_sm['lat'].values)
    alt_md = np.squeeze(HGT_4D_sm.values * 10).flatten()
    time_md = HGT_4D_sm['forecast_period'].values

    coords = np.zeros((HGT_4D_sm.level.size, len(lat_md), len(lon_md), 3))
    coords[..., 1] = lat_md.reshape((1, len(lat_md), 1))
    coords[..., 2] = lon_md.reshape((1, 1, len(lon_md)))
    coords = coords.reshape((alt_md.size, 3))
    coords[:, 0] = alt_md

    interpolator_U = LinearNDInterpolator(coords,
                                          U_4D_sm.values.reshape(
                                              (U_4D_sm.values.size)),
                                          rescale=True)
    interpolator_V = LinearNDInterpolator(coords,
                                          V_4D_sm.values.reshape(
                                              (V_4D_sm.values.size)),
                                          rescale=True)
    interpolator_TMP = LinearNDInterpolator(coords,
                                            TMP_4D_sm.values.reshape(
                                                (TMP_4D_sm.values.size)),
                                            rescale=True)

    coords2 = np.zeros((np.size(points['lon']), 3))
    coords2[:, 0] = points['altitude']
    coords2[:, 1] = points['lat']
    coords2[:, 2] = points['lon']

    U_interped = np.squeeze(interpolator_U(coords2))
    V_interped = np.squeeze(interpolator_V(coords2))
    windsp_interped = (U_interped**2 + V_interped**2)**0.5
    winddir10m_interped = mpcalc.wind_direction(U_interped * units('m/s'),
                                                V_interped * units('m/s'))
    TMP_interped = np.squeeze(interpolator_TMP(coords2))

    fcst_pfl = obs_pfl.copy()
    fcst_pfl.wind_angle = np.array(winddir10m_interped)
    fcst_pfl.wind_speed = np.array(windsp_interped)
    fcst_pfl.t = TMP_interped

    fcst_info = xr.DataArray(np.array(U_4D_sm.values),
                             coords=U_4D_sm.coords,
                             dims=U_4D_sm.dims,
                             attrs={
                                 'points': points,
                                 'model': model
                             })

    sta_graphics.draw_sta_skewT_model_VS_obs(fcst_pfl=fcst_pfl,
                                             obs_pfl=obs_pfl,
                                             fcst_info=fcst_info,
                                             output_dir=output_dir)
Beispiel #33
0
def wind_rh_according_to_4D_data(
        initTime=None,
        fhour=6,
        day_back=0,
        model='ECMWF',
        sta_fcs={
            'lon': [101.82, 101.32, 101.84, 102.23, 102.2681],
            'lat': [28.35, 27.91, 28.32, 27.82, 27.8492],
            'altitude': [3600, 3034.62, 3240, 1669, 1941.5],
            'name': ['健美乡', '项脚乡', '\n锦屏镇', '\n马道镇', 'S9005  ']
        },
        draw_zd=True,
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500],
        map_ratio=19 / 9,
        zoom_ratio=1,
        south_China_sea=False,
        area='全国',
        city=False,
        output_dir=None,
        bkgd_type='satellite',
        data_source='MICAPS'):

    # micaps data directory
    if (area != '全国'):
        south_China_sea = False

    # prepare data
    if (area != '全国'):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    cntr_pnt = np.append(np.mean(sta_fcs['lon']), np.mean(sta_fcs['lat']))
    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    bkgd_level = utl.cal_background_zoom_ratio(zoom_ratio)
    # micaps data directory
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='HGT',
                                  lvl=''),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='RH',
                                  lvl=''),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl=''),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl=''),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='u10m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='v10m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='Td2m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='T2m')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, fhour)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour)
            initTime = filename[0:8]

        # retrieve data from micaps server
        gh = MICAPS_IO.get_model_3D_grid(directory=data_dir[0][0:-1],
                                         filename=filename,
                                         levels=levels)
        if (gh is None):
            return
        gh['data'].values = gh['data'].values * 10

        rh = MICAPS_IO.get_model_3D_grid(directory=data_dir[1][0:-1],
                                         filename=filename,
                                         levels=levels,
                                         allExists=False)
        if rh is None:
            return

        u = MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1],
                                        filename=filename,
                                        levels=levels,
                                        allExists=False)
        if u is None:
            return

        v = MICAPS_IO.get_model_3D_grid(directory=data_dir[3][0:-1],
                                        filename=filename,
                                        levels=levels,
                                        allExists=False)
        if v is None:
            return

        u10m = MICAPS_IO.get_model_grid(directory=data_dir[4],
                                        filename=filename)
        if u10m is None:
            return

        v10m = MICAPS_IO.get_model_grid(directory=data_dir[5],
                                        filename=filename)
        if v10m is None:
            return

        td2m = MICAPS_IO.get_model_grid(directory=data_dir[6],
                                        filename=filename)
        if td2m is None:
            return

        t2m = MICAPS_IO.get_model_grid(directory=data_dir[7],
                                       filename=filename)
        if t2m is None:
            return

        if (draw_zd == True):
            validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') +
                         timedelta(hours=fhour)).strftime("%Y%m%d%H")
            directory_obs = utl.Cassandra_dir(data_type='surface',
                                              data_source='OBS',
                                              var_name='PLOT_ALL')
            try:
                zd_sta = MICAPS_IO.get_station_data(filename=validtime +
                                                    '0000.000',
                                                    directory=directory_obs,
                                                    dropna=True,
                                                    cache=False)
                obs_valid = True
            except:
                zd_sta = MICAPS_IO.get_station_data(directory=directory_obs,
                                                    dropna=True,
                                                    cache=False)
                obs_valid = False

            zd_lon = zd_sta['lon'].values
            zd_lat = zd_sta['lat'].values
            zd_alt = zd_sta['Alt'].values
            zd_u, zd_v = mpcalc.wind_components(
                zd_sta['Wind_speed_2m_avg'].values * units('m/s'),
                zd_sta['Wind_angle_2m_avg'].values * units.deg)

            idx_zd = np.where((zd_lon > map_extent[0])
                              & (zd_lon < map_extent[1])
                              & (zd_lat > map_extent[2])
                              & (zd_lat < map_extent[3]))

            zd_sm_lon = zd_lon[idx_zd[0]]
            zd_sm_lat = zd_lat[idx_zd[0]]
            zd_sm_alt = zd_alt[idx_zd[0]]
            zd_sm_u = zd_u[idx_zd[0]]
            zd_sm_v = zd_v[idx_zd[0]]

    if (data_source == 'CIMISS'):
        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, fhour, UTC=True)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour,
                                                   UTC=True)
        try:
            # retrieve data from CMISS server

            gh = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="GPH",
                units='gpm')
            if gh is None:
                return

            rh = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='RHU'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="RHU",
                units='%')
            if rh is None:
                return

            u = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="WIU",
                units='m/s')
            if u is None:
                return

            v = CIMISS_IO.cimiss_model_3D_grid(
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                init_time_str='20' + filename[0:8],
                valid_time=fhour,
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_levels=levels,
                fcst_ele="WIV",
                units='m/s')
            if v is None:
                return

            if (model == 'ECMWF'):
                td2m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='DPT'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="DPT",
                    units='K')
                if td2m is None:
                    return

                t2m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='TEF2'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="TEF2",
                    units='K')
                if t2m is None:
                    return

                v10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIV10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="WIV10",
                    units='m/s')
                if v10m is None:
                    return

                u10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIU10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="WIU10",
                    units='m/s')
                if u10m is None:
                    return

            if (model == 'GRAPES_GFS'):
                rh2m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='RHF2'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=2,
                    fcst_ele="RHF2",
                    units='%')
                if rh2m is None:
                    return

                v10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIV10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=10,
                    fcst_ele="WIV10",
                    units='m/s')
                if v10m is None:
                    return

                u10m = CIMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='WIU10'),
                    levattrs={
                        'long_name': 'height_above_ground',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=10,
                    fcst_ele="WIU10",
                    units='m/s')
                if u10m is None:
                    return
        except KeyError:
            raise ValueError('Can not find all data needed')

        if (draw_zd == True):
            if (initTime == None):
                initTime1 = CIMISS_IO.cimiss_get_obs_latest_time(
                    data_code="SURF_CHN_MUL_HOR")
                initTime = (datetime.strptime('20' + initTime1, '%Y%m%d%H') -
                            timedelta(days=day_back)).strftime("%Y%m%d%H")[2:]

            validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') +
                         timedelta(hours=fhour)).strftime("%Y%m%d%H")
            data_code = utl.CMISS_data_code(data_source='OBS',
                                            var_name='PLOT_sfc')
            zd_sta = CIMISS_IO.cimiss_obs_by_time(
                times=validtime + '0000',
                data_code=data_code,
                sta_levels="011,012,013,014",
                elements=
                "Station_Id_C,Station_Id_d,lat,lon,Alti,TEM,WIN_D_Avg_2mi,WIN_S_Avg_2mi,RHU"
            )
            obs_valid = True
            if (zd_sta is None):
                CIMISS_IO.cimiss_get_obs_latest_time(data_code=data_code,
                                                     latestTime=6)
                zd_sta = CIMISS_IO.cimiss_obs_by_time(directory=directory_obs,
                                                      dropna=True,
                                                      cache=False)
                obs_valid = False

            zd_lon = zd_sta['lon'].values
            zd_lat = zd_sta['lat'].values
            zd_alt = zd_sta['Alti'].values
            zd_u, zd_v = mpcalc.wind_components(
                zd_sta['WIN_S_Avg_2mi'].values * units('m/s'),
                zd_sta['WIN_D_Avg_2mi'].values * units.deg)

            idx_zd = np.where((zd_lon > map_extent[0])
                              & (zd_lon < map_extent[1])
                              & (zd_lat > map_extent[2])
                              & (zd_lat < map_extent[3])
                              & (zd_sta['WIN_S_Avg_2mi'].values < 1000))

            zd_sm_lon = zd_lon[idx_zd[0]]
            zd_sm_lat = zd_lat[idx_zd[0]]
            zd_sm_alt = zd_alt[idx_zd[0]]
            zd_sm_u = zd_u[idx_zd[0]]
            zd_sm_v = zd_v[idx_zd[0]]

#maskout area
    delt_xy = rh['lon'].values[1] - rh['lon'].values[0]
    #+ to solve the problem of labels on all the contours
    mask1 = (rh['lon'] > map_extent[0] - delt_xy) & (
        rh['lon'] < map_extent[1] + delt_xy) & (
            rh['lat'] > map_extent[2] - delt_xy) & (rh['lat'] <
                                                    map_extent[3] + delt_xy)
    mask2 = (u10m['lon'] > map_extent[0] - delt_xy) & (
        u10m['lon'] < map_extent[1] + delt_xy) & (
            u10m['lat'] > map_extent[2] - delt_xy) & (u10m['lat'] <
                                                      map_extent[3] + delt_xy)
    #- to solve the problem of labels on all the contours
    rh = rh.where(mask1, drop=True)
    u = u.where(mask1, drop=True)
    v = v.where(mask1, drop=True)
    gh = gh.where(mask1, drop=True)
    u10m = u10m.where(mask2, drop=True)
    v10m = v10m.where(mask2, drop=True)
    #prepare interpolator
    Ex1 = np.squeeze(u['data'].values).flatten()
    Ey1 = np.squeeze(v['data'].values).flatten()
    Ez1 = np.squeeze(rh['data'].values).flatten()
    z = (np.squeeze(gh['data'].values)).flatten()

    coords = np.zeros((np.size(levels), u['lat'].size, u['lon'].size, 3))
    coords[..., 1] = u['lat'].values.reshape((1, u['lat'].size, 1))
    coords[..., 2] = u['lon'].values.reshape((1, 1, u['lon'].size))
    coords = coords.reshape((Ex1.size, 3))
    coords[:, 0] = z

    interpolator_U = LinearNDInterpolator(coords, Ex1, rescale=True)
    interpolator_V = LinearNDInterpolator(coords, Ey1, rescale=True)
    interpolator_RH = LinearNDInterpolator(coords, Ez1, rescale=True)

    #process sta_fcs 10m wind
    coords2 = np.zeros((np.size(sta_fcs['lon']), 3))
    coords2[:, 0] = sta_fcs['altitude']
    coords2[:, 1] = sta_fcs['lat']
    coords2[:, 2] = sta_fcs['lon']
    u_sta = interpolator_U(coords2)
    v_sta = interpolator_V(coords2)
    RH_sta = interpolator_RH(coords2)
    wsp_sta = (u_sta**2 + v_sta**2)**0.5
    u10m_2D = u10m.interp(lon=('points', sta_fcs['lon']),
                          lat=('points', sta_fcs['lat']))
    v10m_2D = v10m.interp(lon=('points', sta_fcs['lon']),
                          lat=('points', sta_fcs['lat']))
    if (model == 'GRAPES_GFS' and data_source == 'CIMISS'):
        rh2m_2D = rh2m.interp(lon=('points', sta_fcs['lon']),
                              lat=('points', sta_fcs['lat']))['data'].values
    else:
        td2m_2D = td2m.interp(lon=('points', sta_fcs['lon']),
                              lat=('points', sta_fcs['lat']))
        t2m_2D = t2m.interp(lon=('points', sta_fcs['lon']),
                            lat=('points', sta_fcs['lat']))
        if (data_source == 'MICAPS'):
            rh2m_2D = mpcalc.relative_humidity_from_dewpoint(
                t2m_2D['data'].values * units('degC'),
                td2m_2D['data'].values * units('degC')) * 100
        else:
            rh2m_2D = mpcalc.relative_humidity_from_dewpoint(
                t2m_2D['data'].values * units('kelvin'),
                td2m_2D['data'].values * units('kelvin')) * 100

    wsp10m_2D = (u10m_2D['data'].values**2 + v10m_2D['data'].values**2)**0.5
    winddir10m = mpcalc.wind_direction(u10m_2D['data'].values * units('m/s'),
                                       v10m_2D['data'].values * units('m/s'))
    if (np.isnan(wsp_sta).any()):
        if (wsp_sta.size == 1):
            wsp_sta[np.isnan(wsp_sta)] = np.squeeze(
                wsp10m_2D[np.isnan(wsp_sta)])
            RH_sta[np.isnan(RH_sta)] = np.squeeze(
                np.array(rh2m_2D)[np.isnan(RH_sta)])
        else:
            wsp_sta[np.isnan(wsp_sta)] = np.squeeze(wsp10m_2D)[np.isnan(
                wsp_sta)]
            RH_sta[np.isnan(RH_sta)] = np.squeeze(
                np.array(rh2m_2D))[np.isnan(RH_sta)]
    u_sta, v_sta = mpcalc.wind_components(wsp_sta * units('m/s'), winddir10m)

    #process zd_sta 10m wind
    zd_fcst_obs = None
    if (draw_zd is True):
        coords3 = np.zeros((np.size(zd_sm_alt), 3))
        coords3[:, 0] = zd_sm_alt
        coords3[:, 1] = zd_sm_lat
        coords3[:, 2] = zd_sm_lon
        u_sm_sta = interpolator_U(coords3)
        v_sm_sta = interpolator_V(coords3)
        wsp_sm_sta = (u_sm_sta**2 + v_sm_sta**2)**0.5
        u10m_sm = u10m.interp(lon=('points', zd_sm_lon),
                              lat=('points', zd_sm_lat))
        v10m_sm = v10m.interp(lon=('points', zd_sm_lon),
                              lat=('points', zd_sm_lat))
        wsp10m_sta = np.squeeze(
            (u10m_sm['data'].values**2 + v10m_sm['data'].values**2)**0.5)
        winddir10m_sm = mpcalc.wind_direction(
            u10m_sm['data'].values * units('m/s'),
            v10m_sm['data'].values * units('m/s'))
        if (np.isnan(wsp_sm_sta).any()):
            wsp_sm_sta[np.isnan(wsp_sm_sta)] = wsp10m_sta[np.isnan(wsp_sm_sta)]
        u_sm_sta, v_sm_sta = mpcalc.wind_components(wsp_sm_sta * units('m/s'),
                                                    winddir10m_sm)

        zd_fcst_obs = {
            'lon': zd_sm_lon,
            'lat': zd_sm_lat,
            'altitude': zd_sm_alt,
            'U': np.squeeze(np.array(u_sm_sta)),
            'V': np.squeeze(np.array(v_sm_sta)),
            'obs_valid': obs_valid,
            'U_obs': np.squeeze(np.array(zd_sm_u)),
            'V_obs': np.squeeze(np.array(zd_sm_v))
        }
#prepare for graphics
    sta_fcs_fcst = {
        'lon': sta_fcs['lon'],
        'lat': sta_fcs['lat'],
        'altitude': sta_fcs['altitude'],
        'name': sta_fcs['name'],
        'RH': np.array(RH_sta),
        'U': np.squeeze(np.array(u_sta)),
        'V': np.squeeze(np.array(v_sta))
    }

    fcst_info = gh.coords

    local_scale_graphics.draw_wind_rh_according_to_4D_data(
        sta_fcs_fcst=sta_fcs_fcst,
        zd_fcst_obs=zd_fcst_obs,
        fcst_info=fcst_info,
        map_extent=map_extent,
        draw_zd=draw_zd,
        bkgd_type=bkgd_type,
        bkgd_level=bkgd_level,
        output_dir=None)
Beispiel #34
0
def test_invalid_direction_convention():
    """Test the error that is returned if the convention kwarg is not valid."""
    with pytest.raises(KeyError):
        wind_direction(1 * units('m/s'), 5 * units('m/s'), convention='test')
Beispiel #35
0
def test_scalar_direction():
    """Test wind direction with scalars."""
    d = wind_direction(3. * units('m/s'), 4. * units('m/s'))
    assert_almost_equal(d, 216.870 * units.deg, 3)
Beispiel #36
0
####################################################
# Units can then be attached to the values from the dataframe.
pressure = df['pressure'].values * units(df.units['pressure'])
temperature = df['temperature'].values * units(df.units['temperature'])
wind_speed = df['speed'].values * units.knots
print(temperature)

dewpoint = df['dewpoint'].values * units(df.units['dewpoint'])
print(dewpoint)

u_wind = df['u_wind'].values * units(df.units['u_wind'])
print(u_wind)
v_wind = df['v_wind'].values * units(df.units['v_wind'])
heights = df['height'].values * units(df.units['height'])
print(mpcalc.wind_direction(u_wind,v_wind))
fig = plt.figure(figsize=(6,6))
ax = fig.add_subplot(1,1,1)
h= Hodograph(ax,component_range=60.)
h.plot(u_wind,v_wind,linewidth=5)
h.add_grid(increment=10)
#h.add_grid(increment=20,color='tab:orange',linestyle='-')
h.plot_colormapped(u_wind, v_wind, wind_speed)  # Plot a line colored by wind speed
plt.savefig('hodograph.png')
plt.show()
sys.exit()

#lcl_pressure, lcl_temperature = mpcalc.lcl(pressure[0], temperature[0], dewpoint[0])

#print(lcl_pressure, lcl_temperature)