Пример #1
0
def test_potential_vorticity_baroclinic_non_unity_derivative(pv_data):
    """Test potential vorticity calculation with unity stability and height on axis 0."""
    u, v, lats, _, dx, dy = pv_data

    potential_temperature = np.ones((3, 4, 4)) * units.kelvin
    potential_temperature[0] = 200 * units.kelvin
    potential_temperature[1] = 300 * units.kelvin
    potential_temperature[2] = 400 * units.kelvin

    pressure = np.ones((3, 4, 4)) * units.hPa
    pressure[2] = 1000 * units.hPa
    pressure[1] = 999 * units.hPa
    pressure[0] = 998 * units.hPa

    pvor = potential_vorticity_baroclinic(potential_temperature, pressure, u, v, dx, dy, lats)

    abs_vorticity = absolute_vorticity(u, v, dx, dy, lats)

    vort_difference = pvor - (abs_vorticity * g * (-100 * (units.kelvin / units.hPa)))

    true_vort = np.zeros_like(u) * (units.kelvin * units.meter ** 2 /
                                    (units.second * units.kilogram))

    assert_almost_equal(vort_difference, true_vort, 10)

    # Now try for xy ordered
    pvor = potential_vorticity_baroclinic(potential_temperature, pressure, u.T, v.T, dx.T,
                                          dy.T, lats.T, dim_order='xy')
    abs_vorticity = absolute_vorticity(u.T, v.T, dx.T, dy.T, lats.T, dim_order='xy')
    vort_difference = pvor - (abs_vorticity * g * (-100 * (units.kelvin / units.hPa)))
    assert_almost_equal(vort_difference, true_vort, 10)
Пример #2
0
def test_absolute_vorticity_asym():
    """Test absolute vorticity calculation with a complicated field."""
    u = np.array([[2, 4, 8], [0, 2, 2], [4, 6, 8]]) * units('m/s')
    v = np.array([[6, 4, 8], [2, 6, 0], [2, 2, 6]]) * units('m/s')
    lats = np.array([[30, 30, 30], [20, 20, 20], [10, 10, 10]]) * units.degrees
    vort = absolute_vorticity(u, v, 1 * units.meters, 2 * units.meters, lats, dim_order='yx')
    true_vort = np.array([[-2.499927, 3.500073, 13.00007],
                          [8.500050, -1.499950, -10.99995],
                          [-5.499975, -1.499975, 2.532525e-5]]) / units.sec
    assert_almost_equal(vort, true_vort, 5)

    # Now try for xy ordered
    vort = absolute_vorticity(u.T, v.T, 1 * units.meters, 2 * units.meters,
                              lats.T, dim_order='xy')
    assert_almost_equal(vort, true_vort.T, 5)
Пример #3
0
def test_potential_vorticity_barotropic(pv_data):
    """Test the barotopic (Rossby) potential vorticity."""
    u, v, lats, _, dx, dy = pv_data

    heights = np.ones_like(u) * 3 * units.km
    pv = potential_vorticity_barotropic(heights, u, v, dx, dy, lats)
    avor = absolute_vorticity(u, v, dx, dy, lats)
    truth = avor / heights
    assert_almost_equal(pv, truth, 10)

    # Now try for xy ordered
    pv = potential_vorticity_barotropic(heights.T, u.T, v.T, dx.T, dy.T, lats.T,
                                        dim_order='xy')
    avor = absolute_vorticity(u.T, v.T, dx.T, dy.T, lats.T, dim_order='xy')
    truth = avor / heights.T
    assert_almost_equal(pv, truth, 10)
    def getData(self, time, model_vars, mdl2stnd, previous_data=None):
        '''
    Name:
      awips_model_base
    Purpose:
      A function to get data from NAM40 model to create HDWX products
    Inputs:
      request    : A DataAccessLayer request object
      time       : List of datatime(s) for data to grab
      model_vars : Dictionary with variables/levels to get
      mdl2stnd   : Dictionary to convert from model variable names
                    to standardized names
    Outputs:
      Returns a dictionary containing all data
    Keywords:
      previous_data : Dictionary with data from previous time step
    '''
        log = logging.getLogger(__name__)
        # Set up function for logger
        initTime, fcstTime = get_init_fcst_times(time[0])
        data = {
            'model': self._request.getLocationNames()[0],
            'initTime': initTime,
            'fcstTime': fcstTime
        }
        # Initialize empty dictionary

        log.info('Attempting to download {} data'.format(data['model']))

        for var in model_vars:  # Iterate over variables in the vars list
            log.debug('Getting: {}'.format(var))
            self._request.setParameters(*model_vars[var]['parameters'])
            # Set parameters for the download request
            self._request.setLevels(*model_vars[var]['levels'])
            # Set levels for the download request

            response = DAL.getGridData(self._request, time)  # Request the data

            for res in response:  # Iterate over all data request responses
                varName = res.getParameter()
                # Get name of the variable in the response
                varLvl = res.getLevel()
                # Get level of the variable in the response
                varName = mdl2stnd[varName]
                # Convert variable name to local standarized name
                if varName not in data:
                    data[varName] = {}
                    # If variable name NOT in data dictionary, initialize new dictionary under key
                data[varName][varLvl] = res.getRawData()
                # Add data under level name
                try:  # Try to
                    unit = units(res.getUnit())
                    # Get units and convert to MetPy units
                except:  # On exception
                    unit = '?'
                    # Set units to ?
                else:  # If get units success
                    data[varName][varLvl] *= unit
                    # Get data and create MetPy quantity by multiplying by units

                log.debug(
                    'Got data for:\n  Var:  {}\n  Lvl:  {}\n  Unit: {}'.format(
                        varName, varLvl, unit))
        data['lon'], data['lat'] = res.getLatLonCoords()
        # Get latitude and longitude values
        data['lon'] *= units('degree')
        # Add units of degree to longitude
        data['lat'] *= units('degree')
        # Add units of degree to latitude

        # Absolute vorticity
        dx, dy = lat_lon_grid_deltas(data['lon'], data['lat'])
        # Get grid spacing in x and y
        uTag = mdl2stnd[model_vars['wind']['parameters'][0]]
        # Get initial tag name for u-wind
        vTag = mdl2stnd[model_vars['wind']['parameters'][1]]
        # Get initial tag name for v-wind
        if (uTag in data) and (
                vTag in data):  # If both tags are in the data structure
            data['abs_vort'] = {}
            # Add absolute vorticity key
            for lvl in model_vars['wind'][
                    'levels']:  # Iterate over all leves in the wind data
                if (lvl in data[uTag]) and (
                        lvl in data[vTag]
                ):  # If given level in both u- and v-wind dictionaries
                    log.debug('Computing absolute vorticity at {}'.format(lvl))
                    data['abs_vort'][ lvl ] = \
                      absolute_vorticity( data[uTag][lvl], data[vTag][lvl],
                                          dx, dy, data['lat'] )
                    # Compute absolute vorticity

        # 1000 MB equivalent potential temperature
        if ('temperature' in data) and (
                'dewpoint'
                in data):  # If temperature AND depoint data were downloaded
            data['theta_e'] = {}
            T, Td = 'temperature', 'dewpoint'
            if ('1000.0MB' in data[T]) and (
                    '1000.0MB' in data[Td]
            ):  # If temperature AND depoint data were downloaded
                log.debug(
                    'Computing equivalent potential temperature at 1000 hPa')
                data['theta_e']['1000.0MB'] = equivalent_potential_temperature(
                    1000.0 * units('hPa'), data[T]['1000.0MB'],
                    data[Td]['1000.0MB'])

            return data
            # MLCAPE
            log.debug('Computing mixed layer CAPE')
            T_lvl = list(data[T].keys())
            Td_lvl = list(data[Td].keys())
            levels = list(set(T_lvl).intersection(Td_lvl))
            levels = [float(lvl.replace('MB', '')) for lvl in levels]
            levels = sorted(levels, reverse=True)

            nLvl = len(levels)
            if nLvl > 0:
                log.debug(
                    'Found {} matching levels in temperature and dewpoint data'
                    .format(nLvl))
                nLat, nLon = data['lon'].shape

                data['MLCAPE'] = np.zeros((
                    nLat,
                    nLon,
                ), dtype=np.float32) * units('J/kg')
                TT = np.zeros((
                    nLvl,
                    nLat,
                    nLon,
                ), dtype=np.float32) * units('degC')
                TTd = np.zeros((
                    nLvl,
                    nLat,
                    nLon,
                ), dtype=np.float32) * units('degC')

                log.debug('Sorting temperature and dewpoint data by level')
                for i in range(nLvl):
                    key = '{:.1f}MB'.format(levels[i])
                    TT[i, :, :] = data[T][key].to('degC')
                    TTd[i, :, :] = data[Td][key].to('degC')

                levels = np.array(levels) * units.hPa
                depth = 100.0 * units.hPa

                log.debug('Iterating over grid boxes to compute MLCAPE')
                for j in range(nLat):
                    for i in range(nLon):
                        try:
                            _, T_parc, Td_parc = mixed_parcel(
                                levels,
                                TT[:, j, i],
                                TTd[:, j, i],
                                depth=depth,
                                interpolate=False,
                            )
                            profile = parcel_profile(levels, T_parc, Td_parc)
                            cape, cin = cape_cin(levels, TT[:, j, i],
                                                 TTd[:, j, i], profile)
                        except:
                            log.warning(
                                'Failed to compute MLCAPE for lon/lat: {}; {}'.
                                format(data['lon'][j, i], data['lat'][j, i]))
                        else:
                            data['MLCAPE'][j, i] = cape
        return data
Пример #5
0
def Crosssection_Wind_Theta_e_Qv(
        initial_time=None,
        fhour=24,
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200],
        day_back=0,
        model='ECMWF',
        output_dir=None,
        st_point=[20, 120.0],
        ed_point=[50, 130.0],
        map_extent=[70, 140, 15, 55],
        h_pos=[0.125, 0.665, 0.25, 0.2]):

    # micaps data directory
    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl='500')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # get filename
    if (initial_time != None):
        filename = utl.model_filename(initial_time, fhour)
    else:
        filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour)

    # retrieve data from micaps server
    rh = get_model_3D_grid(directory=data_dir[0][0:-1],
                           filename=filename,
                           levels=levels,
                           allExists=False)
    if rh is None:
        return
    rh = rh.metpy.parse_cf().squeeze()

    u = get_model_3D_grid(directory=data_dir[1][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if u is None:
        return
    u = u.metpy.parse_cf().squeeze()

    v = get_model_3D_grid(directory=data_dir[2][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if v is None:
        return
    v = v.metpy.parse_cf().squeeze()

    v2 = get_model_3D_grid(directory=data_dir[2][0:-1],
                           filename=filename,
                           levels=levels,
                           allExists=False)
    if v2 is None:
        return
    v2 = v2.metpy.parse_cf().squeeze()

    t = get_model_3D_grid(directory=data_dir[3][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if t is None:
        return
    t = t.metpy.parse_cf().squeeze()

    gh = get_model_grid(data_dir[4], filename=filename)
    if t is None:
        return

    resolution = u['lon'][1] - u['lon'][0]
    x, y = np.meshgrid(u['lon'], u['lat'])

    dx, dy = mpcalc.lat_lon_grid_deltas(u['lon'], u['lat'])
    for ilvl in levels:
        u2d = u.sel(level=ilvl)
        #u2d['data'].attrs['units']=units.meter/units.second
        v2d = v.sel(level=ilvl)
        #v2d['data'].attrs['units']=units.meter/units.second

        absv2d = mpcalc.absolute_vorticity(
            u2d['data'].values * units.meter / units.second,
            v2d['data'].values * units.meter / units.second, dx, dy,
            y * units.degree)

        if (ilvl == levels[0]):
            absv3d = v2
            absv3d['data'].loc[dict(level=ilvl)] = np.array(absv2d)
        else:
            absv3d['data'].loc[dict(level=ilvl)] = np.array(absv2d)
    absv3d['data'].attrs['units'] = absv2d.units

    #rh=rh.rename(dict(lat='latitude',lon='longitude'))
    cross = cross_section(rh, st_point, ed_point)
    cross_rh = cross.set_coords(('lat', 'lon'))
    cross = cross_section(u, st_point, ed_point)
    cross_u = cross.set_coords(('lat', 'lon'))
    cross = cross_section(v, st_point, ed_point)
    cross_v = cross.set_coords(('lat', 'lon'))

    cross_u['data'].attrs['units'] = units.meter / units.second
    cross_v['data'].attrs['units'] = units.meter / units.second
    cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components(
        cross_u['data'], cross_v['data'])

    cross = cross_section(t, st_point, ed_point)
    cross_t = cross.set_coords(('lat', 'lon'))
    cross = cross_section(absv3d, st_point, ed_point)

    cross_Td = mpcalc.dewpoint_rh(cross_t['data'].values * units.celsius,
                                  cross_rh['data'].values * units.percent)

    rh, pressure = xr.broadcast(cross_rh['data'], cross_t['level'])

    Qv = mpcalc.specific_humidity_from_dewpoint(cross_Td, pressure)

    cross_Qv = xr.DataArray(np.array(Qv) * 1000.,
                            coords=cross_rh['data'].coords,
                            dims=cross_rh['data'].dims,
                            attrs={'units': units('g/kg')})

    Theta_e = mpcalc.equivalent_potential_temperature(
        pressure, cross_t['data'].values * units.celsius, cross_Td)

    cross_Theta_e = xr.DataArray(np.array(Theta_e),
                                 coords=cross_rh['data'].coords,
                                 dims=cross_rh['data'].dims,
                                 attrs={'units': Theta_e.units})

    crossection_graphics.draw_Crosssection_Wind_Theta_e_Qv(
        cross_Qv=cross_Qv,
        cross_Theta_e=cross_Theta_e,
        cross_u=cross_u,
        cross_v=cross_v,
        gh=gh,
        h_pos=h_pos,
        st_point=st_point,
        ed_point=ed_point,
        levels=levels,
        map_extent=map_extent,
        output_dir=output_dir)
Пример #6
0
# MetPy Absolute Vorticity Calculation
# ------------------------------------
#
# This code first uses MetPy to calcualte the grid deltas (sign aware) to
# use for derivative calculations with the funtcion
# ``lat_lon_grid_deltas()`` and then calculates ``absolute_vorticity()``
# using the wind components, grid deltas, and latitude values.
#

# Calculate grid spacing that is sign aware to use in absolute vorticity calculation
dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)

# Calculate absolute vorticity from MetPy function
avor_500 = mpcalc.absolute_vorticity(uwnd_500,
                                     vwnd_500,
                                     dx,
                                     dy,
                                     lats * units.degrees,
                                     dim_order='yx')

######################################################################
# Map Creation
# ------------
#
# This next set of code creates the plot and draws contours on a Lambert
# Conformal map centered on -100 E longitude. The main view is over the
# CONUS with geopotential heights contoured every 60 m and absolute
# vorticity colorshaded (:math:`*10^5`).
#

# Set up the projection that will be used for plotting
mapcrs = ccrs.LambertConformal(central_longitude=-100,
Пример #7
0
def Miller_Composite_Chart(initial_time=None,
                           fhour=24,
                           day_back=0,
                           model='GRAPES_GFS',
                           map_ratio=19 / 9,
                           zoom_ratio=20,
                           cntr_pnt=[102, 34],
                           Global=False,
                           south_China_sea=True,
                           area='全国',
                           city=False,
                           output_dir=None):

    # micaps data directory
    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl='700'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='300'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='300'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='500'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='500'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='850'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='850'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl='700'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl='500'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='BLI'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='Td2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='PRMSL')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # get filename
    if (initial_time != None):
        filename = utl.model_filename(initial_time, fhour)
        filename2 = utl.model_filename(initial_time, fhour - 12)
    else:
        filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour)
        filename2 = utl.filename_day_back_model(day_back=day_back,
                                                fhour=fhour - 12)

    # retrieve data from micaps server
    rh_700 = get_model_grid(directory=data_dir[0], filename=filename)
    if rh_700 is None:
        return

    u_300 = get_model_grid(directory=data_dir[1], filename=filename)
    if u_300 is None:
        return

    v_300 = get_model_grid(directory=data_dir[2], filename=filename)
    if v_300 is None:
        return

    u_500 = get_model_grid(directory=data_dir[3], filename=filename)
    if u_500 is None:
        return

    v_500 = get_model_grid(directory=data_dir[4], filename=filename)
    if v_500 is None:
        return

    u_850 = get_model_grid(directory=data_dir[5], filename=filename)
    if u_850 is None:
        return

    v_850 = get_model_grid(directory=data_dir[6], filename=filename)
    if v_850 is None:
        return

    t_700 = get_model_grid(directory=data_dir[7], filename=filename)
    if t_700 is None:
        return

    hgt_500 = get_model_grid(directory=data_dir[8], filename=filename)
    if hgt_500 is None:
        return

    hgt_500_2 = get_model_grid(directory=data_dir[8], filename=filename2)
    if hgt_500_2 is None:
        return

    BLI = get_model_grid(directory=data_dir[9], filename=filename)
    if BLI is None:
        return

    Td2m = get_model_grid(directory=data_dir[10], filename=filename)
    if Td2m is None:
        return

    PRMSL = get_model_grid(directory=data_dir[11], filename=filename)
    if PRMSL is None:
        return

    PRMSL2 = get_model_grid(directory=data_dir[11], filename=filename2)
    if PRMSL2 is None:
        return

    lats = np.squeeze(rh_700['lat'].values)
    lons = np.squeeze(rh_700['lon'].values)
    x, y = np.meshgrid(rh_700['lon'], rh_700['lat'])

    tmp_700 = t_700['data'].values.squeeze() * units('degC')
    u_300 = (u_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_300 = (v_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_500 = (u_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_500 = (v_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_850 = (u_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_850 = (v_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    hgt_500 = (hgt_500['data'].values.squeeze()) * 10 / 9.8 * units.meter
    rh_700 = rh_700['data'].values.squeeze()
    lifted_index = BLI['data'].values.squeeze() * units.kelvin
    Td_sfc = Td2m['data'].values.squeeze() * units('degC')
    dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)

    avor_500 = mpcalc.absolute_vorticity(u_500, v_500, dx, dy,
                                         y * units.degree)
    pmsl = PRMSL['data'].values.squeeze() * units('hPa')

    hgt_500_2 = (hgt_500_2['data'].values.squeeze()) * 10 / 9.8 * units.meter
    pmsl2 = PRMSL2['data'].values.squeeze() * units('hPa')

    # 500 hPa CVA
    vort_adv_500 = mpcalc.advection(
        avor_500, [u_500.to('m/s'), v_500.to('m/s')],
        (dx, dy), dim_order='yx') * 1e9
    vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4)

    wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5)
    wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5)
    wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5)

    Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.)

    pmsl_change = pmsl - pmsl2
    hgt_500_change = hgt_500 - hgt_500_2

    mask_500 = ma.masked_less_equal(wspd_500, 0.66 * np.max(wspd_500)).mask
    u_500[mask_500] = np.nan
    v_500[mask_500] = np.nan

    # 300 hPa
    mask_300 = ma.masked_less_equal(wspd_300, 0.66 * np.max(wspd_300)).mask
    u_300[mask_300] = np.nan
    v_300[mask_300] = np.nan

    # 850 hPa
    mask_850 = ma.masked_less_equal(wspd_850, 0.66 * np.max(wspd_850)).mask
    u_850[mask_850] = np.nan
    v_850[mask_850] = np.nan

    # prepare data
    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1

    #+ to solve the problem of labels on all the contours
    idx_x1 = np.where((lons > map_extent[0] - delt_x)
                      & (lons < map_extent[1] + delt_x))
    idx_y1 = np.where((lats > map_extent[2] - delt_y)
                      & (lats < map_extent[3] + delt_y))

    fcst_info = {
        'lon': lons,
        'lat': lats,
        'fhour': fhour,
        'model': model,
        'init_time': t_700.coords['forecast_reference_time'].values
    }

    synthetical_graphics.draw_Miller_Composite_Chart(
        fcst_info=fcst_info,
        u_300=u_300,
        v_300=v_300,
        u_500=u_500,
        v_500=v_500,
        u_850=u_850,
        v_850=v_850,
        pmsl_change=pmsl_change,
        hgt_500_change=hgt_500_change,
        Td_dep_700=Td_dep_700,
        Td_sfc=Td_sfc,
        pmsl=pmsl,
        lifted_index=lifted_index,
        vort_adv_500_smooth=vort_adv_500_smooth,
        map_extent=map_extent,
        add_china=True,
        city=False,
        south_China_sea=True,
        output_dir=None,
        Global=False)
Пример #8
0
def Crosssection_Wind_Theta_e_absv(
    initTime=None, fhour=24,lw_ratio=[16,9],
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,200],
    day_back=0,model='GRAPES_GFS',data_source='MICAPS',
    output_dir=None,
    st_point = [20, 120.0],
    ed_point = [50, 130.0],
    map_extent=[70,140,15,55],
    h_pos=[0.125, 0.665, 0.25, 0.2] ,**kwargs):

    # micaps data directory
    if(data_source == 'MICAPS'):
        try:
            data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl='500'),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename
        if(initTime != None):
            filename = utl.model_filename(initTime, fhour)
        else:
            filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour)
            
        # retrieve data from micaps server
        rh=MICAPS_IO.get_model_3D_grid(directory=data_dir[0][0:-1],filename=filename,levels=levels, allExists=False)
        rh = rh.metpy.parse_cf().squeeze()
        u=MICAPS_IO.get_model_3D_grid(directory=data_dir[1][0:-1],filename=filename,levels=levels, allExists=False)
        u = u.metpy.parse_cf().squeeze()
        v=MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False)
        v = v.metpy.parse_cf().squeeze()
        v2=MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False)
        v2 = v2.metpy.parse_cf().squeeze()
        t=MICAPS_IO.get_model_3D_grid(directory=data_dir[3][0:-1],filename=filename,levels=levels, allExists=False)
        t = t.metpy.parse_cf().squeeze()
        gh=MICAPS_IO.get_model_grid(data_dir[4], filename=filename)
        psfc=get_model_grid(data_dir[5], filename=filename)

    if(data_source == 'CIMISS'):
        # get filename
        if(initTime != None):
            filename = utl.model_filename(initTime, fhour,UTC=True)
        else:
            filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True)
        try:
            rh=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='RHU'),
                        fcst_levels=levels, fcst_ele="RHU", units='%')
            if rh is None:
                return

            u=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'),
                        fcst_levels=levels, fcst_ele="WIU", units='m/s')
            if u is None:
                return
                
            v=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'),
                        fcst_levels=levels, fcst_ele="WIV", units='m/s')
            if v is None:
                return

            v2=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'),
                        fcst_levels=levels, fcst_ele="WIV", units='m/s')
            if v2 is None:
                return            

            t=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='TEM'),
                        fcst_levels=levels, fcst_ele="TEM", units='K')
            if t is None:
                return
            t['data'].values=t['data'].values-273.15

            gh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour,
                            data_code=utl.CMISS_data_code(data_source=model,var_name='GPH'),
                            fcst_level=500, fcst_ele="GPH", units='gpm')
            if gh is None:
                return
            gh['data'].values=gh['data'].values/10.

            psfc=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'),
                        fcst_level=0, fcst_ele="PRS", units='Pa')
            psfc['data']=psfc['data']/100.

        except KeyError:
            raise ValueError('Can not find all data needed')   
    rh = rh.metpy.parse_cf().squeeze()
    u = u.metpy.parse_cf().squeeze()
    v = v.metpy.parse_cf().squeeze()
    v2 = v2.metpy.parse_cf().squeeze()
    t = t.metpy.parse_cf().squeeze()
    psfc=psfc.metpy.parse_cf().squeeze()
    resolution=u['lon'][1]-u['lon'][0]
    x,y=np.meshgrid(u['lon'], u['lat'])

    # +form 3D psfc
    mask1 = (
            (psfc['lon']>=t['lon'].values.min())&
            (psfc['lon']<=t['lon'].values.max())&
            (psfc['lat']>=t['lat'].values.min())&
            (psfc['lat']<=t['lat'].values.max())
            )

    t2,psfc_bdcst=xr.broadcast(t['data'],psfc['data'].where(mask1, drop=True))
    mask2=(psfc_bdcst > -10000)
    psfc_bdcst=psfc_bdcst.where(mask2, drop=True)
    # -form 3D psfc

    dx,dy=mpcalc.lat_lon_grid_deltas(u['lon'],u['lat'])
    for ilvl in levels:
        u2d=u.sel(level=ilvl)
        v2d=v.sel(level=ilvl)

        absv2d=mpcalc.absolute_vorticity(u2d['data'].values*units.meter/units.second,
                v2d['data'].values*units.meter/units.second,dx,dy,y*units.degree)
        
        if(ilvl == levels[0]):
            absv3d = v2.copy()
            absv3d['data'].loc[dict(level=ilvl)]=np.array(absv2d)
        else:
            absv3d['data'].loc[dict(level=ilvl)]=np.array(absv2d)
    absv3d['data'].attrs['units']=absv2d.units

    #rh=rh.rename(dict(lat='latitude',lon='longitude'))
    cross = cross_section(rh, st_point, ed_point)
    cross_rh=cross.set_coords(('lat', 'lon'))
    cross = cross_section(u, st_point, ed_point)
    cross_u=cross.set_coords(('lat', 'lon'))
    cross = cross_section(v, st_point, ed_point)
    cross_v=cross.set_coords(('lat', 'lon'))
    cross_psfc = cross_section(psfc_bdcst, st_point, ed_point)

    cross_u['data'].attrs['units']=units.meter/units.second
    cross_v['data'].attrs['units']=units.meter/units.second
    cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components(cross_u['data'],cross_v['data'])
    
    cross = cross_section(t, st_point, ed_point)
    cross_t=cross.set_coords(('lat', 'lon'))
    cross = cross_section(absv3d, st_point, ed_point)
    cross_absv3d=cross.set_coords(('lat', 'lon'))

    cross_Td = mpcalc.dewpoint_rh(cross_t['data'].values*units.celsius,
                cross_rh['data'].values* units.percent)

    rh,pressure = xr.broadcast(cross_rh['data'],cross_t['level'])
    pressure.attrs['units']='hPa'
    Theta_e=mpcalc.equivalent_potential_temperature(pressure,
                                                cross_t['data'].values*units.celsius, 
                                                cross_Td)
    cross_terrain=pressure-cross_psfc

    cross_Theta_e = xr.DataArray(np.array(Theta_e),
                        coords=cross_rh['data'].coords,
                        dims=cross_rh['data'].dims,
                        attrs={'units': Theta_e.units})

    crossection_graphics.draw_Crosssection_Wind_Theta_e_absv(
                    cross_absv3d=cross_absv3d, cross_Theta_e=cross_Theta_e, cross_u=cross_u,
                    cross_v=cross_v,cross_terrain=cross_terrain,gh=gh,
                    h_pos=h_pos,st_point=st_point,ed_point=ed_point,
                    levels=levels,map_extent=map_extent,lw_ratio=lw_ratio,
                    output_dir=output_dir)
Пример #9
0
def Miller_Composite_Chart(initTime=None,
                           fhour=24,
                           day_back=0,
                           model='GRAPES_GFS',
                           map_ratio=14 / 9,
                           zoom_ratio=20,
                           cntr_pnt=[104, 34],
                           data_source='MICAPS',
                           Global=False,
                           south_China_sea=True,
                           area=None,
                           city=False,
                           output_dir=None,
                           **kwargs):

    # micaps data directory
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='RH',
                                  lvl='700'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl='300'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl='300'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl='500'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl='500'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl='850'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl='850'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='TMP',
                                  lvl='700'),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='HGT',
                                  lvl='500'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='BLI'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='Td2m'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='PRMSL')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, fhour)
            filename2 = utl.model_filename(initTime, fhour - 12)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour)
            filename2 = utl.filename_day_back_model(day_back=day_back,
                                                    fhour=fhour - 12)

        # retrieve data from micaps server
        rh_700 = MICAPS_IO.get_model_grid(directory=data_dir[0],
                                          filename=filename)
        if rh_700 is None:
            return

        u_300 = MICAPS_IO.get_model_grid(directory=data_dir[1],
                                         filename=filename)
        if u_300 is None:
            return

        v_300 = MICAPS_IO.get_model_grid(directory=data_dir[2],
                                         filename=filename)
        if v_300 is None:
            return

        u_500 = MICAPS_IO.get_model_grid(directory=data_dir[3],
                                         filename=filename)
        if u_500 is None:
            return

        v_500 = MICAPS_IO.get_model_grid(directory=data_dir[4],
                                         filename=filename)
        if v_500 is None:
            return

        u_850 = MICAPS_IO.get_model_grid(directory=data_dir[5],
                                         filename=filename)
        if u_850 is None:
            return

        v_850 = MICAPS_IO.get_model_grid(directory=data_dir[6],
                                         filename=filename)
        if v_850 is None:
            return

        t_700 = MICAPS_IO.get_model_grid(directory=data_dir[7],
                                         filename=filename)
        if t_700 is None:
            return

        hgt_500 = MICAPS_IO.get_model_grid(directory=data_dir[8],
                                           filename=filename)
        if hgt_500 is None:
            return

        hgt_500_2 = MICAPS_IO.get_model_grid(directory=data_dir[8],
                                             filename=filename2)
        if hgt_500_2 is None:
            return

        BLI = MICAPS_IO.get_model_grid(directory=data_dir[9],
                                       filename=filename)
        if BLI is None:
            return

        Td2m = MICAPS_IO.get_model_grid(directory=data_dir[10],
                                        filename=filename)
        if Td2m is None:
            return

        PRMSL = MICAPS_IO.get_model_grid(directory=data_dir[11],
                                         filename=filename)
        if PRMSL is None:
            return

        PRMSL2 = MICAPS_IO.get_model_grid(directory=data_dir[11],
                                          filename=filename2)
        if PRMSL2 is None:
            return

    if (data_source == 'CIMISS'):

        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, fhour, UTC=True)
            filename2 = utl.model_filename(initTime, fhour - 12, UTC=True)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour,
                                                   UTC=True)
            filename2 = utl.filename_day_back_model(day_back=day_back,
                                                    fhour=fhour - 12,
                                                    UTC=True)
        try:
            # retrieve data from CIMISS server
            rh_700 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='RHU'),
                fcst_level=700,
                fcst_ele="RHU",
                units='%')
            if rh_700 is None:
                return

            hgt_500 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                fcst_level=500,
                fcst_ele="GPH",
                units='gpm')
            if hgt_500 is None:
                return
            hgt_500['data'].values = hgt_500['data'].values / 10.

            hgt_500_2 = CMISS_IO.cimiss_model_by_time(
                '20' + filename2[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                fcst_level=500,
                fcst_ele="GPH",
                units='gpm')
            if hgt_500_2 is None:
                return
            hgt_500_2['data'].values = hgt_500_2['data'].values / 10.

            u_300 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                fcst_level=300,
                fcst_ele="WIU",
                units='m/s')
            if u_300 is None:
                return

            v_300 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                fcst_level=300,
                fcst_ele="WIV",
                units='m/s')
            if v_300 is None:
                return

            u_500 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                fcst_level=500,
                fcst_ele="WIU",
                units='m/s')
            if u_500 is None:
                return

            v_500 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                fcst_level=500,
                fcst_ele="WIV",
                units='m/s')
            if v_500 is None:
                return

            u_850 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                fcst_level=850,
                fcst_ele="WIU",
                units='m/s')
            if u_850 is None:
                return

            v_850 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                fcst_level=850,
                fcst_ele="WIV",
                units='m/s')
            if v_850 is None:
                return

            BLI = CMISS_IO.cimiss_model_by_time('20' + filename2[0:8],
                                                valid_time=fhour,
                                                data_code=utl.CMISS_data_code(
                                                    data_source=model,
                                                    var_name='PLI'),
                                                fcst_level=0,
                                                fcst_ele="PLI",
                                                units='Pa')
            if BLI is None:
                return

            #1000hPa 露点温度代替2m露点温度
            Td2m = CMISS_IO.cimiss_model_by_time('20' + filename2[0:8],
                                                 valid_time=fhour,
                                                 data_code=utl.CMISS_data_code(
                                                     data_source=model,
                                                     var_name='DPT'),
                                                 fcst_level=1000,
                                                 fcst_ele="DPT",
                                                 units='Pa')
            if Td2m is None:
                return
            Td2m['data'].values = Td2m['data'].values - 273.15

            if (model == 'ECMWF'):
                PRMSL = CMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='GSSP'),
                    fcst_level=0,
                    fcst_ele="GSSP",
                    units='Pa')
            else:
                PRMSL = CMISS_IO.cimiss_model_by_time(
                    '20' + filename[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='SSP'),
                    fcst_level=0,
                    fcst_ele="SSP",
                    units='Pa')

            t_700 = CMISS_IO.cimiss_model_by_time(
                '20' + filename[0:8],
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='TEM'),
                fcst_level=700,
                fcst_ele="TEM",
                units='K')
            if t_700 is None:
                return
            t_700['data'].values = t_700['data'].values - 273.15

            if PRMSL is None:
                return
            PRMSL['data'] = PRMSL['data'] / 100.

            if (model == 'ECMWF'):
                PRMSL2 = CMISS_IO.cimiss_model_by_time(
                    '20' + filename2[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='GSSP'),
                    fcst_level=0,
                    fcst_ele="GSSP",
                    units='Pa')
            else:
                PRMSL2 = CMISS_IO.cimiss_model_by_time(
                    '20' + filename2[0:8],
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='SSP'),
                    fcst_level=0,
                    fcst_ele="SSP",
                    units='Pa')
            if PRMSL2 is None:
                return
            PRMSL2['data'] = PRMSL2['data'] / 100.
        except KeyError:
            raise ValueError('Can not find all data needed')

    lats = np.squeeze(rh_700['lat'].values)
    lons = np.squeeze(rh_700['lon'].values)
    x, y = np.meshgrid(rh_700['lon'], rh_700['lat'])

    tmp_700 = t_700['data'].values.squeeze() * units('degC')
    u_300 = (u_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_300 = (v_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_500 = (u_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_500 = (v_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_850 = (u_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_850 = (v_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    hgt_500 = (hgt_500['data'].values.squeeze()) * 10 / 9.8 * units.meter
    rh_700 = rh_700['data'].values.squeeze()
    lifted_index = BLI['data'].values.squeeze() * units.kelvin
    Td_sfc = Td2m['data'].values.squeeze() * units('degC')
    dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)

    avor_500 = mpcalc.absolute_vorticity(u_500, v_500, dx, dy,
                                         y * units.degree)
    pmsl = PRMSL['data'].values.squeeze() * units('hPa')

    hgt_500_2 = (hgt_500_2['data'].values.squeeze()) * 10 / 9.8 * units.meter
    pmsl2 = PRMSL2['data'].values.squeeze() * units('hPa')

    # 500 hPa CVA
    vort_adv_500 = mpcalc.advection(
        avor_500, [u_500.to('m/s'), v_500.to('m/s')],
        (dx, dy), dim_order='yx') * 1e9
    vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4)

    wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5)
    wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5)
    wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5)

    Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.)

    pmsl_change = pmsl - pmsl2
    hgt_500_change = hgt_500 - hgt_500_2

    mask_500 = ma.masked_less_equal(wspd_500, 0.66 * np.max(wspd_500)).mask
    u_500[mask_500] = np.nan
    v_500[mask_500] = np.nan

    # 300 hPa
    mask_300 = ma.masked_less_equal(wspd_300, 0.66 * np.max(wspd_300)).mask
    u_300[mask_300] = np.nan
    v_300[mask_300] = np.nan

    # 850 hPa
    mask_850 = ma.masked_less_equal(wspd_850, 0.66 * np.max(wspd_850)).mask
    u_850[mask_850] = np.nan
    v_850[mask_850] = np.nan

    # prepare data
    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1

    fcst_info = {
        'lon': lons,
        'lat': lats,
        'forecast_period': fhour,
        'model': model,
        'forecast_reference_time':
        t_700.coords['forecast_reference_time'].values
    }

    synthetical_graphics.draw_Miller_Composite_Chart(
        fcst_info=fcst_info,
        u_300=u_300,
        v_300=v_300,
        u_500=u_500,
        v_500=v_500,
        u_850=u_850,
        v_850=v_850,
        pmsl_change=pmsl_change,
        hgt_500_change=hgt_500_change,
        Td_dep_700=Td_dep_700,
        Td_sfc=Td_sfc,
        pmsl=pmsl,
        lifted_index=lifted_index,
        vort_adv_500_smooth=vort_adv_500_smooth,
        map_extent=map_extent,
        add_china=True,
        city=city,
        south_China_sea=south_China_sea,
        output_dir=output_dir,
        Global=False)