Пример #1
0
def sta_SkewT(model='ECMWF',points={'lon':[116.3833], 'lat':[39.9]},
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,250,200,150,100],
    fhour=3,output_dir=None):

    try:
        data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl='')]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # # 度数据
    initTime = get_latest_initTime(data_dir[0][0:-1]+"850")
    filename = initTime+'.'+str(fhour).zfill(3)
    TMP_4D=get_model_3D_grid(directory=data_dir[0][0:-1],filename=filename,levels=levels, allExists=False)
    TMP_2D=TMP_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    u_4D=get_model_3D_grid(directory=data_dir[1][0:-1],filename=filename,levels=levels, allExists=False)
    u_2D=u_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    v_4D=get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False)
    v_2D=v_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    HGT_4D=get_model_3D_grid(directory=data_dir[3][0:-1],filename=filename,levels=levels, allExists=False)
    HGT_2D=HGT_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    HGT_2D.attrs['model']=model
    HGT_2D.attrs['points']=points

    RH_4D=get_model_3D_grid(directory=data_dir[4][0:-1],filename=filename,levels=levels, allExists=False)
    RH_2D=RH_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    wind_dir_2D=mpcalc.wind_direction(u_2D['data'].values* units.meter / units.second,
        v_2D['data'].values* units.meter / units.second)
    wsp10m_2D=(u_2D['data']**2+v_2D['data']**2)**0.5
    Td2m=mpcalc.dewpoint_rh(TMP_2D['data'].values*units('degC'),RH_2D['data'].values/100.)

    p = np.squeeze(levels) * units.hPa
    T = np.squeeze(TMP_2D['data'].values) * units.degC
    Td = np.squeeze(np.array(Td2m)) * units.degC
    wind_speed = np.squeeze(wsp10m_2D.values) * units.meter
    wind_dir = np.squeeze(np.array(wind_dir_2D)) * units.degrees
    u=np.squeeze(u_2D['data'].values)* units.meter
    v=np.squeeze(v_2D['data'].values)* units.meter

    fcst_info= xr.DataArray(np.array(u_2D['data'].values),
                        coords=u_2D['data'].coords,
                        dims=u_2D['data'].dims,
                        attrs={'points': points,
                                'model': model})

    sta_graphics.draw_sta_skewT(
        p=p,T=T,Td=Td,wind_speed=wind_speed,wind_dir=wind_dir,u=u,v=v,
        fcst_info=fcst_info)
Пример #2
0
def point_fcst_rn_according_to_3D_field_box_line(output_dir=None,
                                                 t_range=[6, 60],
                                                 t_gap=6,
                                                 points={
                                                     'lon': [116.3833],
                                                     'lat': [39.9]
                                                 },
                                                 initTime=None,
                                                 obs_ID=54511,
                                                 extra_info={
                                                     'output_head_name': ' ',
                                                     'output_tail_name': ' ',
                                                     'point_name': ' ',
                                                     'drw_thr': True
                                                 },
                                                 **kwargs):

    try:
        dir_rqd = utl.Cassandra_dir(data_type='surface',
                                    data_source='ECMWF_ENSEMBLE',
                                    var_name='RAIN' + str(t_gap).zfill(2) +
                                    '_RAW')
    except KeyError:
        raise ValueError('Can not find all required directories needed')

    #-get all the directories needed
    if (initTime == None):
        initTime = get_latest_initTime(dir_rqd)
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    if (t_range[1] > 72):
        fhours = np.append(np.arange(t_range[0], 72, t_gap),
                           np.arange(72, t_range[1], 6))
    else:
        fhours = np.arange(t_range[0], t_range[1], t_gap)

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]

    rn = utl.get_model_points_gy(dir_rqd, filenames, points, allExists=False)
    rn.attrs['model'] = 'ECMWF_ENSEMBLE'

    Ensemble_graphics.box_line_rn(rn=rn,
                                  points=points,
                                  extra_info=extra_info,
                                  output_dir=output_dir)
Пример #3
0
def point_fcst(
        model='ECMWF',
        output_dir=None,
        t_range=[0,60],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9], 'altitude':[1351]},
        initTime=None,day_back=0,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' '}
            ):

    #+get all the directories needed
    try:
        dir_rqd=[utl.Cassandra_dir(data_type='surface',data_source=model,var_name='T2m'),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='u10m'),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='v10m'),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='RAIN'+str(t_gap).zfill(2))]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    #-get all the directories needed
    if(initTime == None):
        initTime = get_latest_initTime(dir_rqd[0])
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    directory=dir_rqd[0][0:-1]
    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [initTime+'.'+str(fhour).zfill(3) for fhour in fhours]
    t2m=utl.get_model_points_gy(dir_rqd[0], filenames, points,allExists=False)
    u10m=utl.get_model_points_gy(dir_rqd[1], filenames, points,allExists=False)
    v10m=utl.get_model_points_gy(dir_rqd[2], filenames, points,allExists=False)
    rn=utl.get_model_points_gy(dir_rqd[3], filenames, points,allExists=False)
    sta_graphics.draw_point_fcst(t2m=t2m,u10m=u10m,v10m=v10m,rn=rn,
        model=model,
        output_dir=output_dir,
        points=points,
        extra_info=extra_info
            )                 
Пример #4
0
def point_uv_gust_tmp_rh_rn_fcst(output_dir=None,
                                 t_range=[0, 60],
                                 t_gap=3,
                                 points={
                                     'lon': [116.3833],
                                     'lat': [39.9],
                                     'altitude': [1351]
                                 },
                                 initTime=None,
                                 day_back=0,
                                 extra_info={
                                     'output_head_name': ' ',
                                     'output_tail_name': ' ',
                                     'point_name': ' '
                                 },
                                 **kwargs):

    #+get all the directories needed
    try:
        dir_rqd = [
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='T2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='u10m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='v10m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='rh2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='RAIN' + str(t_gap).zfill(2)),
            utl.Cassandra_dir(data_type='surface',
                              data_source='OBS',
                              var_name='PLOT_GUST')
        ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')

    #-get all the directories needed
    if (initTime == None):
        initTime = MICAPS_IO.get_latest_initTime(dir_rqd[0])
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    gust_sta = MICAPS_IO.get_station_data(directory=dir_rqd[5],
                                          dropna=True,
                                          cache=False)
    datetime_sta = pd.to_datetime(str(
        gust_sta.time[0])).replace(tzinfo=None).to_pydatetime()
    datetime_model_initTime = datetime.strptime('20' + initTime, '%Y%m%d%H')

    u10_his_md = []
    v10_his_md = []
    wsp_his_sta_point = []

    model_filenames_his = None
    for iinit in range(0, 240, 12):
        for ifhour in range(0, 87, 3):
            for iobs in range(0, 168, 1):
                initTime_his = datetime_model_initTime - timedelta(hours=iinit)
                validTime_his = initTime_his + timedelta(hours=ifhour)
                staTime_his = datetime_sta - timedelta(hours=iobs)
                if (staTime_his == validTime_his):
                    model_filename_his = initTime_his.strftime(
                        '%Y%m%d%H')[2:10] + '.' + str(ifhour).zfill(3)
                    sta_filename_his = validTime_his.strftime(
                        '%Y%m%d%H') + '0000.000'
                    data_md1 = MICAPS_IO.get_model_grid(
                        dir_rqd[1], filename=model_filename_his)
                    if (data_md1 is None):
                        continue
                    data_md2 = MICAPS_IO.get_model_grid(
                        dir_rqd[1], filename=model_filename_his)
                    if (data_md2 is None):
                        continue
                    data_sta = MICAPS_IO.get_station_data(
                        directory=dir_rqd[5],
                        filename=sta_filename_his,
                        dropna=True,
                        cache=True)
                    if (data_sta is None):
                        continue
                    u10_his_md.append(data_md1)
                    v10_his_md.append(data_md2)
                    wsp_his_sta_interp = utl.sta_to_point_interpolation(
                        points=points, sta=data_sta, var_name='Gust_speed')
                    wsp_his_sta_point.append(wsp_his_sta_interp[:])

    u10_his_md = xr.concat(u10_his_md, dim='time')
    v10_his_md = xr.concat(v10_his_md, dim='time')
    wsp_his_md = (u10_his_md**2 + v10_his_md**2)**0.5
    wsp_his_md_point = wsp_his_md.interp(lon=('points', points['lon']),
                                         lat=('points', points['lat']))

    model = LinearRegression(copy_X=True,
                             fit_intercept=True,
                             n_jobs=1,
                             normalize=False)
    x = np.squeeze(wsp_his_md_point['data'].values).reshape(-1, 1)
    y = np.squeeze(wsp_his_sta_point).reshape(-1, 1)
    model.fit(x, y)
    if (model.coef_ < 0.2):
        f2 = np.polyfit(np.squeeze(x), np.squeeze(y), 2)
        model2 = np.poly1d(f2)

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    t2m = utl.get_model_points_gy(dir_rqd[0],
                                  filenames,
                                  points,
                                  allExists=False)
    u10m = utl.get_model_points_gy(dir_rqd[1],
                                   filenames,
                                   points,
                                   allExists=False)
    v10m = utl.get_model_points_gy(dir_rqd[2],
                                   filenames,
                                   points,
                                   allExists=False)
    rh = utl.get_model_points_gy(dir_rqd[3],
                                 filenames,
                                 points,
                                 allExists=False)
    rn = utl.get_model_points_gy(dir_rqd[4],
                                 filenames,
                                 points,
                                 allExists=False)

    gust10m_predict = u10m.copy()
    if (model.coef_ > 0.2):
        gust10m_predict['data'].values = np.squeeze(
            model.predict(
                np.squeeze((u10m['data'].values**2 +
                            v10m['data'].values**2)**0.5).reshape(-1,
                                                                  1))).reshape(
                                                                      -1, 1, 1)
    else:
        gust10m_predict['data'].values = np.squeeze(
            model2(
                np.squeeze((u10m['data'].values**2 +
                            v10m['data'].values**2)**0.5))).reshape(-1, 1, 1)

    sta_graphics.draw_point_uv_tmp_rh_rn_gust_fcst(t2m=t2m,
                                                   u10m=u10m,
                                                   v10m=v10m,
                                                   rh=rh,
                                                   rn=rn,
                                                   gust=gust10m_predict,
                                                   model='中央气象台中短期指导',
                                                   output_dir=output_dir,
                                                   points=points,
                                                   extra_info=extra_info)
Пример #5
0
def point_uv_ecgust_tmp_rh_rn_fcst(output_dir=None,
                                   t_range=[0, 60],
                                   t_gap=3,
                                   points={
                                       'lon': [116.3833],
                                       'lat': [39.9],
                                       'altitude': [1351]
                                   },
                                   initTime=None,
                                   day_back=0,
                                   extra_info={
                                       'output_head_name': ' ',
                                       'output_tail_name': ' ',
                                       'point_name': ' '
                                   },
                                   **kwargs):

    #+get all the directories needed
    try:
        dir_rqd = [
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='T2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='u10m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='v10m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='rh2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source='中央气象台中短期指导',
                              var_name='RAIN' + str(t_gap).zfill(2)),
            utl.Cassandra_dir(data_type='surface',
                              data_source='ECMWF',
                              var_name='10M_GUST_3H')
        ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')

    #-get all the directories needed
    if (initTime == None):
        initTime = MICAPS_IO.get_latest_initTime(dir_rqd[0])
        initTime2 = MICAPS_IO.get_latest_initTime(dir_rqd[-1])
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    fhours2 = np.arange(t_range[0], t_range[1] + 12, t_gap)
    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    filenames2 = [initTime2 + '.' + str(fhour).zfill(3) for fhour in fhours2]
    t2m = utl.get_model_points_gy(dir_rqd[0],
                                  filenames,
                                  points,
                                  allExists=False)
    u10m = utl.get_model_points_gy(dir_rqd[1],
                                   filenames,
                                   points,
                                   allExists=False)
    v10m = utl.get_model_points_gy(dir_rqd[2],
                                   filenames,
                                   points,
                                   allExists=False)
    rh = utl.get_model_points_gy(dir_rqd[3],
                                 filenames,
                                 points,
                                 allExists=False)
    rn = utl.get_model_points_gy(dir_rqd[4],
                                 filenames,
                                 points,
                                 allExists=False)
    gust = utl.get_model_points_gy(dir_rqd[5],
                                   filenames2,
                                   points,
                                   allExists=False)

    sta_graphics.draw_point_uv_tmp_rh_rn_gust_fcst(t2m=t2m,
                                                   u10m=u10m,
                                                   v10m=v10m,
                                                   rh=rh,
                                                   rn=rn,
                                                   gust=gust,
                                                   model='中央气象台中短期指导',
                                                   output_dir=output_dir,
                                                   points=points,
                                                   extra_info=extra_info)
Пример #6
0
def Time_Crossection_rh_uv_Temp(
        initTime=None,
        model='ECMWF',
        points={
            'lon': [116.3833],
            'lat': [39.9]
        },
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200],
        t_gap=3,
        t_range=[0, 48],
        output_dir=None):

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    # 读数据

    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl=''),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='PSFC')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    if (initTime == None):
        initTime = get_latest_initTime(data_dir[0][0:-1] + "850")
    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    TMP_4D = get_model_3D_grids(directory=data_dir[0][0:-1],
                                filenames=filenames,
                                levels=levels,
                                allExists=False)
    TMP_2D = TMP_4D.interp(lon=('points', points['lon']),
                           lat=('points', points['lat']))

    u_4D = get_model_3D_grids(directory=data_dir[1][0:-1],
                              filenames=filenames,
                              levels=levels,
                              allExists=False)
    u_2D = u_4D.interp(lon=('points', points['lon']),
                       lat=('points', points['lat']))

    v_4D = get_model_3D_grids(directory=data_dir[2][0:-1],
                              filenames=filenames,
                              levels=levels,
                              allExists=False)
    v_2D = v_4D.interp(lon=('points', points['lon']),
                       lat=('points', points['lat']))

    rh_4D = get_model_3D_grids(directory=data_dir[3][0:-1],
                               filenames=filenames,
                               levels=levels,
                               allExists=False)
    rh_2D = rh_4D.interp(lon=('points', points['lon']),
                         lat=('points', points['lat']))
    rh_2D.attrs['model'] = model
    rh_2D.attrs['points'] = points

    Psfc_3D = get_model_grids(directory=data_dir[4][0:-1],
                              filenames=filenames,
                              allExists=False)
    Psfc_1D = Psfc_3D.interp(lon=('points', points['lon']),
                             lat=('points', points['lat']))
    v_2D2, pressure_2D = xr.broadcast(v_2D['data'], v_2D['level'])
    v_2D2, Psfc_2D = xr.broadcast(v_2D['data'], Psfc_1D['data'])
    terrain_2D = pressure_2D - Psfc_2D

    crossection_graphics.draw_Time_Crossection_rh_uv_Temp(
        rh_2D=rh_2D,
        u_2D=u_2D,
        v_2D=v_2D,
        TMP_2D=TMP_2D,
        terrain_2D=terrain_2D,
        t_range=t_range,
        model=model,
        output_dir=output_dir)
Пример #7
0
def Time_Crossection_rh_uv_theta_e(
        initTime=None,
        model='ECMWF',
        points={
            'lon': [116.3833],
            'lat': [39.9]
        },
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200],
        t_gap=3,
        t_range=[0, 48],
        output_dir=None):

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    # 读数据

    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl='')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    if (initTime == None):
        initTime = get_latest_initTime(data_dir[0][0:-1] + "850")
    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    TMP_4D = get_model_3D_grids(directory=data_dir[0][0:-1],
                                filenames=filenames,
                                levels=levels,
                                allExists=False)
    TMP_2D = TMP_4D.interp(lon=('points', points['lon']),
                           lat=('points', points['lat']))

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    u_4D = get_model_3D_grids(directory=data_dir[1][0:-1],
                              filenames=filenames,
                              levels=levels,
                              allExists=False)
    u_2D = u_4D.interp(lon=('points', points['lon']),
                       lat=('points', points['lat']))

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    v_4D = get_model_3D_grids(directory=data_dir[2][0:-1],
                              filenames=filenames,
                              levels=levels,
                              allExists=False)
    v_2D = v_4D.interp(lon=('points', points['lon']),
                       lat=('points', points['lat']))

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    rh_4D = get_model_3D_grids(directory=data_dir[3][0:-1],
                               filenames=filenames,
                               levels=levels,
                               allExists=False)
    rh_2D = rh_4D.interp(lon=('points', points['lon']),
                         lat=('points', points['lat']))
    rh_2D.attrs['model'] = model
    rh_2D.attrs['points'] = points
    Td_2D = mpcalc.dewpoint_rh(TMP_2D['data'].values * units.celsius,
                               rh_2D['data'].values * units.percent)

    rh, pressure = xr.broadcast(rh_2D['data'], rh_2D['level'])

    Theta_e = mpcalc.equivalent_potential_temperature(
        pressure, TMP_2D['data'].values * units.celsius, Td_2D)

    theta_e_2D = xr.DataArray(np.array(Theta_e),
                              coords=rh_2D['data'].coords,
                              dims=rh_2D['data'].dims,
                              attrs={'units': Theta_e.units})

    crossection_graphics.draw_Time_Crossection_rh_uv_theta_e(
        rh_2D=rh_2D,
        u_2D=u_2D,
        v_2D=v_2D,
        theta_e_2D=theta_e_2D,
        t_range=t_range,
        output_dir=output_dir)
Пример #8
0
def gh_uv_mslp(initTime=None, fhour=0, day_back=0,model='ECMWF',
    gh_lev=500,uv_lev=850,
    map_ratio=19/9,zoom_ratio=20,cntr_pnt=[102,34],
    south_China_sea=True,area = '全国',city=False,output_dir=None,data_source='MICAPS',
    Global=False):

    if(area != '全国'):
        south_China_sea=False

    # micaps data directory
    if(data_source =='MICAPS'):
        try:
            data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=gh_lev),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=uv_lev),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=uv_lev),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PRMSL')]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename

        if(initTime == None):
            initTime = MICAPS_IO.get_latest_initTime(data_dir[-1])

        if(initTime != None):
            filename = utl.model_filename(initTime, fhour)
        else:
            filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour)

        # retrieve data from micaps server
        gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename)
        if gh is None:
            return
        
        u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename)
        if u is None:
            return
            
        v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename)
        if v is None:
            return
        mslp = MICAPS_IO.get_model_grid(data_dir[3], filename=filename)
        if mslp is None:
            return

    if(data_source =='CIMISS'):

        # get filename
        if(initTime != None):
            filename = utl.model_filename(initTime, fhour,UTC=True)
        else:
            filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True)
        try:
            # retrieve data from CMISS server        
            gh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='GPH'),
                        levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'},
                        fcst_level=gh_lev, fcst_ele="GPH", units='gpm')
            if gh is None:
                return
            gh['data'].values=gh['data'].values/10.

            u=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'),
                        levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'},
                        fcst_level=uv_lev, fcst_ele="WIU", units='m/s')
            if u is None:
                return
                
            v=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'),
                        levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'},
                        fcst_level=uv_lev, fcst_ele="WIV", units='m/s')
            if v is None:
                return

            if(model == 'ECMWF'):
                mslp=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour,
                            data_code=utl.CMISS_data_code(data_source=model,var_name='GSSP'),
                            levattrs={'long_name':'Mean_sea_level', 'units':'m', '_CoordinateAxisType':'-'},
                            fcst_level=0, fcst_ele="GSSP", units='Pa')
            else:
                mslp=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour,
                            data_code=utl.CMISS_data_code(data_source=model,var_name='SSP'),
                            levattrs={'long_name':'Mean_sea_level', 'units':'m', '_CoordinateAxisType':'-'},
                            fcst_level=0, fcst_ele="SSP", units='Pa')
            if mslp is None:
                return
            mslp['data']=mslp['data']/100.
        except KeyError:
            raise ValueError('Can not find all data needed')                
    # prepare data

    if(area != None):
        cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area)

    map_extent=[0,0,0,0]
    map_extent[0]=cntr_pnt[0]-zoom_ratio*1*map_ratio
    map_extent[1]=cntr_pnt[0]+zoom_ratio*1*map_ratio
    map_extent[2]=cntr_pnt[1]-zoom_ratio*1
    map_extent[3]=cntr_pnt[1]+zoom_ratio*1

    delt_x=(map_extent[1]-map_extent[0])*0.2
    delt_y=(map_extent[3]-map_extent[2])*0.1

#+ to solve the problem of labels on all the contours
    mask1 = (gh['lon'] > map_extent[0]-delt_x) & (gh['lon'] < map_extent[1]+delt_x) & (gh['lat'] > map_extent[2]-delt_y) & (gh['lat'] < map_extent[3]+delt_y)

    mask2 = (u['lon'] > map_extent[0]-delt_x) & (u['lon'] < map_extent[1]+delt_x) & (u['lat'] > map_extent[2]-delt_y) & (u['lat'] < map_extent[3]+delt_y)

    mask3 = (mslp['lon'] > map_extent[0]-delt_x) & (mslp['lon'] < map_extent[1]+delt_x) & (mslp['lat'] > map_extent[2]-delt_y) & (mslp['lat'] < map_extent[3]+delt_y)
#- to solve the problem of labels on all the contours
    gh=gh.where(mask1,drop=True)
    gh.attrs['model']=model

    u=u.where(mask2,drop=True)
    v=v.where(mask2,drop=True)
    mslp=mslp.where(mask3,drop=True)

    uv=xr.merge([u.rename({'data': 'u'}),v.rename({'data': 'v'})])

    synoptic_graphics.draw_gh_uv_mslp(
        mslp=mslp, gh=gh, uv=uv,
        map_extent=map_extent, regrid_shape=20,
        city=city,south_China_sea=south_China_sea,
        output_dir=output_dir,Global=Global)
Пример #9
0
def compare_gh_uv(anaTime=None,
                  anamodel='GRAPES_GFS',
                  fhour=24,
                  model='ECMWF',
                  data_source='MICAPS',
                  gh_lev=500,
                  uv_lev=850,
                  area=None,
                  map_ratio=14 / 9,
                  zoom_ratio=20,
                  cntr_pnt=[104, 34],
                  **products_kwargs):

    if (area != None):
        south_China_sea = False

    # micaps data directory
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='HGT',
                                  lvl=gh_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl=uv_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl=uv_lev),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='PSFC')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename

        if (anaTime == None):
            anaTime = MICAPS_IO.get_latest_initTime(data_dir[-1])
            initTime = (datetime.strptime('20' + anaTime, '%Y%m%d%H') -
                        timedelta(hours=fhour)).strftime("%Y%m%d%H")[2:10]

        if (anaTime != None):
            filename_ana = utl.model_filename(anaTime, 0)
            initTime = (datetime.strptime('20' + anaTime, '%Y%m%d%H') -
                        timedelta(hours=fhour)).strftime("%Y%m%d%H")[2:10]
            filename_fcst = utl.model_filename(initTime, fhour)

        # retrieve data from micaps server
        gh_ana = MICAPS_IO.get_model_grid(data_dir[0], filename=filename_ana)
        u_ana = MICAPS_IO.get_model_grid(data_dir[1], filename=filename_ana)
        v_ana = MICAPS_IO.get_model_grid(data_dir[2], filename=filename_ana)
        psfc_ana = MICAPS_IO.get_model_grid(data_dir[3], filename=filename_ana)
        gh_fcst = MICAPS_IO.get_model_grid(data_dir[0], filename=filename_fcst)
        u_fcst = MICAPS_IO.get_model_grid(data_dir[1], filename=filename_fcst)
        v_fcst = MICAPS_IO.get_model_grid(data_dir[2], filename=filename_fcst)
        psfc_fcst = MICAPS_IO.get_model_grid(data_dir[3],
                                             filename=filename_fcst)

    if (data_source == 'CIMISS'):

        # get filename
        if (anaTime != None):
            anaTime = utl.model_filename(anaTime, fhour, UTC=True)[0:8]
        else:
            anaTime = utl.filename_day_back_model(fhour=fhour, UTC=True)[0:8]
        initTime = (datetime.strptime('20' + anaTime, '%Y%m%d%H') -
                    timedelta(hours=fhour)).strftime("%Y%m%d%H")[2:10]
        try:
            # retrieve data from CIMISS server
            gh_ana = CMISS_IO.cimiss_model_by_time(
                '20' + anaTime,
                valid_time=0,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=gh_lev,
                fcst_ele="GPH",
                units='gpm')
            gh_ana['data'].values = gh_ana['data'].values / 10.
            gh_fcst = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=gh_lev,
                fcst_ele="GPH",
                units='gpm')
            gh_fcst['data'].values = gh_fcst['data'].values / 10.

            u_ana = CMISS_IO.cimiss_model_by_time(
                '20' + anaTime,
                valid_time=0,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=uv_lev,
                fcst_ele="WIU",
                units='m/s')
            u_fcst = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=uv_lev,
                fcst_ele="WIU",
                units='m/s')

            v_ana = CMISS_IO.cimiss_model_by_time(
                '20' + anaTime,
                valid_time=0,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=uv_lev,
                fcst_ele="WIV",
                units='m/s')
            v_fcst = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=uv_lev,
                fcst_ele="WIV",
                units='m/s')

            psfc_ana = CMISS_IO.cimiss_model_by_time(
                '20' + anaTime,
                valid_time=0,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='PRS'),
                levattrs={
                    'long_name': 'sea_surface_pressure',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=0,
                fcst_ele="PRS",
                units='Pa')
            psfc_fcst = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='PRS'),
                levattrs={
                    'long_name': 'sea_surface_pressure',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=0,
                fcst_ele="PRS",
                units='Pa')
            psfc_ana['data'] = psfc_ana['data'] / 100.
            psfc_fcst['data'] = psfc_fcst['data'] / 100.
        except KeyError:
            raise ValueError('Can not find all data needed')
    # prepare data
    if (all([
            gh_ana, u_ana, v_ana, psfc_ana, gh_fcst, u_fcst, v_fcst, psfc_fcst
    ]) is False):
        print('some data is not avaliable')
        return

    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    gh_ana = utl.cut_xrdata(map_extent, gh_ana)
    u_ana = utl.cut_xrdata(map_extent, u_ana)
    v_ana = utl.cut_xrdata(map_extent, v_ana)
    psfc_ana = utl.cut_xrdata(map_extent, psfc_ana)

    gh_fcst = utl.cut_xrdata(map_extent, gh_fcst)
    u_fcst = utl.cut_xrdata(map_extent, u_fcst)
    v_fcst = utl.cut_xrdata(map_extent, v_fcst)
    psfc_fcst = utl.cut_xrdata(map_extent, psfc_fcst)

    u_ana = utl.mask_terrian(uv_lev, psfc_ana, u_ana)
    v_ana = utl.mask_terrian(uv_lev, psfc_ana, v_ana)
    gh_ana = utl.mask_terrian(gh_lev, psfc_ana, gh_ana)
    u_fcst = utl.mask_terrian(uv_lev, psfc_fcst, u_fcst)
    v_fcst = utl.mask_terrian(uv_lev, psfc_fcst, v_fcst)
    gh_fcst = utl.mask_terrian(gh_lev, psfc_fcst, gh_fcst)

    uv_ana = xr.merge(
        [u_ana.rename({'data': 'u'}),
         v_ana.rename({'data': 'v'})])
    uv_fcst = xr.merge(
        [u_fcst.rename({'data': 'u'}),
         v_fcst.rename({'data': 'v'})])

    gh_ana.attrs = {'model_name': model}
    u_ana.attrs = {'model_name': model}
    v_ana.attrs = {'model_name': model}
    gh_fcst.attrs = {'model_name': model}
    u_fcst.attrs = {'model_name': model}
    v_fcst.attrs = {'model_name': model}

    vs_ana.draw_compare_gh_uv(gh_ana=gh_ana,
                              uv_ana=uv_ana,
                              gh_fcst=gh_fcst,
                              uv_fcst=uv_fcst,
                              map_extent=map_extent,
                              **products_kwargs)
Пример #10
0
def point_fcst_uv_tmp_according_to_3D_field_vs_sounding(
        output_dir=None,
        obs_ID='55664',
        initTime=None,
        fhour=6,
        day_back=0,
        extra_info={
            'output_head_name':
            ' ',
            'output_tail_name':
            ' ',
            'point_name':
            ' ',
            'drw_thr':
            True,
            'levels_for_interp': [
                1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 250,
                200, 150
            ]
        },
        **kwargs):

    model = 'GRAPES_GFS'
    try:
        dir_rqd = [
            utl.Cassandra_dir(data_type='high',
                              data_source='OBS',
                              var_name='TLOGP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl='')
        ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')

    if (initTime == None):
        initTime = get_latest_initTime(dir_rqd[1][0:-1] + '/850')

    filename_obs = (datetime.strptime('20' + initTime, '%Y%m%d%H') +
                    timedelta(hours=fhour)).strftime('%Y%m%d%H%M%S') + '.000'
    obs_pfl_all = MICAPS_IO.get_tlogp(dir_rqd[0][0:-1],
                                      filename=filename_obs,
                                      cache=False)
    if (obs_pfl_all is None):
        return
    obs_pfl_raw = obs_pfl_all[obs_pfl_all.ID == obs_ID]
    obs_pfl = obs_pfl_raw.replace(9999.0, np.nan).dropna(how='any')
    obs_pfl = obs_pfl[obs_pfl.p >= 200.]

    directory = dir_rqd[1][0:-1]
    filename = initTime + '.' + str(fhour).zfill(3)
    HGT_4D = get_model_3D_grid(directory=directory,
                               filename=filename,
                               levels=extra_info['levels_for_interp'],
                               allExists=False)
    directory = dir_rqd[2][0:-1]
    U_4D = get_model_3D_grid(directory=directory,
                             filename=filename,
                             levels=extra_info['levels_for_interp'],
                             allExists=False)
    directory = dir_rqd[3][0:-1]
    V_4D = get_model_3D_grid(directory=directory,
                             filename=filename,
                             levels=extra_info['levels_for_interp'],
                             allExists=False)

    directory = dir_rqd[4][0:-1]
    TMP_4D = get_model_3D_grid(directory=directory,
                               filename=filename,
                               levels=extra_info['levels_for_interp'],
                               allExists=False)

    points = {
        'lon': obs_pfl.lon.to_numpy(),
        'lat': obs_pfl.lat.to_numpy(),
        'altitude': obs_pfl.h.to_numpy() * 10
    }

    directory = dir_rqd[4][0:-1]

    delt_xy = HGT_4D['lon'].values[1] - HGT_4D['lon'].values[0]
    mask = (HGT_4D['lon'] < (points['lon'][0] + 2 * delt_xy)) & (
        HGT_4D['lon'] > (points['lon'][0] - 2 * delt_xy)
    ) & (HGT_4D['lat'] <
         (points['lat'][0] + 2 * delt_xy)) & (HGT_4D['lat'] >
                                              (points['lat'][0] - 2 * delt_xy))

    HGT_4D_sm = HGT_4D['data'].where(mask, drop=True)
    U_4D_sm = U_4D['data'].where(mask, drop=True)
    V_4D_sm = V_4D['data'].where(mask, drop=True)
    TMP_4D_sm = TMP_4D['data'].where(mask, drop=True)

    lon_md = np.squeeze(HGT_4D_sm['lon'].values)
    lat_md = np.squeeze(HGT_4D_sm['lat'].values)
    alt_md = np.squeeze(HGT_4D_sm.values * 10).flatten()
    time_md = HGT_4D_sm['forecast_period'].values

    coords = np.zeros((HGT_4D_sm.level.size, len(lat_md), len(lon_md), 3))
    coords[..., 1] = lat_md.reshape((1, len(lat_md), 1))
    coords[..., 2] = lon_md.reshape((1, 1, len(lon_md)))
    coords = coords.reshape((alt_md.size, 3))
    coords[:, 0] = alt_md

    interpolator_U = LinearNDInterpolator(coords,
                                          U_4D_sm.values.reshape(
                                              (U_4D_sm.values.size)),
                                          rescale=True)
    interpolator_V = LinearNDInterpolator(coords,
                                          V_4D_sm.values.reshape(
                                              (V_4D_sm.values.size)),
                                          rescale=True)
    interpolator_TMP = LinearNDInterpolator(coords,
                                            TMP_4D_sm.values.reshape(
                                                (TMP_4D_sm.values.size)),
                                            rescale=True)

    coords2 = np.zeros((np.size(points['lon']), 3))
    coords2[:, 0] = points['altitude']
    coords2[:, 1] = points['lat']
    coords2[:, 2] = points['lon']

    U_interped = np.squeeze(interpolator_U(coords2))
    V_interped = np.squeeze(interpolator_V(coords2))
    windsp_interped = (U_interped**2 + V_interped**2)**0.5
    winddir10m_interped = mpcalc.wind_direction(U_interped * units('m/s'),
                                                V_interped * units('m/s'))
    TMP_interped = np.squeeze(interpolator_TMP(coords2))

    fcst_pfl = obs_pfl.copy()
    fcst_pfl.wind_angle = np.array(winddir10m_interped)
    fcst_pfl.wind_speed = np.array(windsp_interped)
    fcst_pfl.t = TMP_interped

    fcst_info = xr.DataArray(np.array(U_4D_sm.values),
                             coords=U_4D_sm.coords,
                             dims=U_4D_sm.dims,
                             attrs={
                                 'points': points,
                                 'model': model
                             })

    sta_graphics.draw_sta_skewT_model_VS_obs(fcst_pfl=fcst_pfl,
                                             obs_pfl=obs_pfl,
                                             fcst_info=fcst_info,
                                             output_dir=output_dir)
Пример #11
0
def point_fcst_tmp_according_to_3D_field_box_line(
        output_dir=None,
        t_range=[0,60],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9], 'altitude':[1351]},
        initTime=None,obs_ID=54511,day_back=0,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' ',
            'drw_thr':True,
            'levels_for_interp':[1000, 925, 850, 700, 500,300,200]}
            ):

    try:
        dir_rqd=[utl.Cassandra_dir(data_type='high',data_source='ECMWF_ENSEMBLE',var_name='HGT_RAW',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source='ECMWF_ENSEMBLE',var_name='TMP_RAW',lvl='')]
                        #utl.Cassandra_dir(data_type='surface',data_source=model,var_name='RAIN'+str(t_gap).zfill(2)+'_RAW')]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    #-get all the directories needed
    if(initTime == None):
        initTime = get_latest_initTime(dir_rqd[0][0:-1]+'/850')
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    directory=dir_rqd[0][0:-1]

    if(t_range[1] > 72):
        fhours = np.append(np.arange(t_range[0], 72, t_gap),np.arange(72,t_range[1],6))
    else:
        fhours = np.arange(t_range[0], t_range[1], t_gap)

    filenames = [initTime+'.'+str(fhour).zfill(3) for fhour in fhours]
    HGT_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    directory=dir_rqd[0][0:-1]

    directory=dir_rqd[1][0:-1]
    TMP_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    
    #rn=utl.get_model_points_gy(dir_rqd[4], filenames, points,allExists=False)

    directory=dir_rqd[1][0:-1]
    coords_info_2D=utl.get_model_points_gy(directory+str(extra_info['levels_for_interp'][0])+'/',
                        points=points,filenames=filenames,allExists=False)

    delt_xy=HGT_4D['lon'].values[1]-HGT_4D['lon'].values[0]
    mask = (HGT_4D['lon']<(points['lon']+2*delt_xy))&(HGT_4D['lon']>(points['lon']-2*delt_xy))&(HGT_4D['lat']<(points['lat']+2*delt_xy))&(HGT_4D['lat']>(points['lat']-2*delt_xy))

    HGT_4D_sm=HGT_4D['data'].where(mask,drop=True)
    TMP_4D_sm=TMP_4D['data'].where(mask,drop=True)

    lon_md=np.squeeze(HGT_4D_sm['lon'].values)
    lat_md=np.squeeze(HGT_4D_sm['lat'].values)
    alt_md=np.squeeze(HGT_4D_sm.values*10).flatten()
    time_md=np.squeeze(HGT_4D_sm['forecast_period'].values)
    number_md=np.squeeze(HGT_4D_sm['number'].values)
    '''
    coords = np.zeros((len(time_md),len(number_md),len(extra_info['levels_for_interp']),len(lat_md),len(lon_md),5))
    coords[...,0]=time_md.reshape((len(time_md),1,1,1,1))
    coords[...,1]=number_md.reshape((1,len(number_md),1,1,1))
    coords[...,3] = lat_md.reshape((1,1,1,len(lat_md),1))
    coords[...,4] = lon_md.reshape((1,1,1,1,len(lon_md)))
    coords = coords.reshape((alt_md.size,5))
    coords[:,2]=alt_md

    interpolator_TMP = LinearNDInterpolator(coords,TMP_4D_sm.values.reshape((TMP_4D_sm.values.size)),rescale=True)
    
    coords2 = np.zeros((len(time_md),len(number_md),1,1,1,5))
    coords2[...,0]=time_md.reshape((len(time_md),1,1,1,1))
    coords2[...,1]=number_md.reshape(1,(len(number_md),1,1,1))
    coords2[...,2]=points['altitude'][0]
    coords2[...,3] = points['lat'][0]
    coords2[...,4] = points['lon'][0]
    coords2 = coords2.reshape((time_md.size,5))

    TMP_interped=np.squeeze(interpolator_TMP(coords2))
    '''
    TMP_interped=np.zeros((len(time_md),len(number_md)))

    for it in range(0,len(time_md)):
        for inum in range(0,len(number_md)):
            alt_md=np.squeeze(HGT_4D_sm.values[it,inum,:,:,:]*10).flatten()
            coords = np.zeros((len(extra_info['levels_for_interp']),len(lat_md),len(lon_md),3))
            coords[...,1] = lat_md.reshape((1,len(lat_md),1))
            coords[...,2] = lon_md.reshape((1,1,len(lon_md)))
            coords = coords.reshape((alt_md.size,3))
            coords[:,0]=alt_md
            interpolator_TMP = LinearNDInterpolator(coords,TMP_4D_sm.values[it,inum,:,:,:].reshape((TMP_4D_sm.values[it,inum,:,:,:].size)),rescale=True)

            coords2 = np.zeros((1,1,1,3))
            coords2[...,0]=points['altitude'][0]
            coords2[...,1] = points['lat'][0]
            coords2[...,2] = points['lon'][0]
            coords2 = coords2.reshape((1,3))

            TMP_interped[it,inum]=np.squeeze(interpolator_TMP(coords2))

    TMP_interped_xr=coords_info_2D.copy()
    TMP_interped_xr['data'].values[:,:,0,0]=TMP_interped
    TMP_interped_xr.attrs['model']='ECMWF_ENSEMBLE'

    Ensemble_graphics.box_line_temp(TMP=TMP_interped_xr,
        points=points,
        extra_info=extra_info,output_dir=output_dir)  
Пример #12
0
def ivt(initTime=None,
        fhour=0,
        frange=None,
        model='ECMWF',
        region='中国陆地',
        show='list',
        width=500,
        getModels=False,
        getInitTime=False,
        noshow=False):
    """
    分析水汽传输垂直积分场.

    Args:
        initTime (string, optional): model initial time YYYYmmddHH, like 2020061320. 
                                     Defaults to None, = the model latest run time. 
        fhour (int, optional): model forecast hour. Defaults to 0.
        frange (list, optional): model forecast hour range, [start, end, step] or [start, end] which step=6.
                                 if frange is set, fhour is ignored.
        model (str, optional): model name. Defaults to 'ECMWF'.
                               You can use "getModels=True" return all model names.
        region (str or list, optional): Predifined region name, like '中国', '中国陆地', '华北', '东北', '华东', '华中', '华南', 
                                        '西南', '西北', '新疆', '青藏'. Defaults to '中国陆地'. Or [lonmin, lonmax, latmin, latmax]
        show (str, optional): 'list', show all plots in one cell.
                              'tab', show one plot in each tab page. 
                              'animation', show gif animation.
        width (int, optional): Width of the displayed image. Defaults to 500.
        noshow (bool, optional): just return the plots.
    """

    # get function arguments
    kwargs = locals().copy()

    # set and check model directory
    model_dirs = {
        'ECMWF': [
            "ECMWF_HR/SPFH/", "ECMWF_HR/UGRD/", "ECMWF_HR/VGRD/",
            "ECMWF_HR/PRES/SURFACE/", "ECMWF_HR/PRMSL/"
        ],
        'GRAPES': [
            'GRAPES_GFS/SPFH/', 'GRAPES_GFS/UGRD/', 'GRAPES_GFS/VGRD/',
            'GRAPES_GFS/PRES/SURFACE/', 'GRAPES_GFS/PRMSL/'
        ]
    }
    if getModels: return list(model_dirs.keys())
    model_dir = check_model(model, model_dirs)
    if model_dir is None: return None

    # check initTime
    if initTime is None:
        initTime = get_latest_initTime(model_dir[0] + '1000')
    initTime = check_initTime(initTime)
    if getInitTime: return initTime

    # check frange
    if frange is not None:
        return util.draw_multiple_plots(ivt, kwargs)

    # prepare data
    filename = initTime.strftime("%y%m%d%H") + '.' + str(int(fhour)).zfill(3)
    levels = [1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300]
    qData = get_model_3D_grid(model_dir[0], filename, levels=levels)
    if qData is None: return None
    uData = get_model_3D_grid(model_dir[1], filename, levels=levels)
    if uData is None: return None
    vData = get_model_3D_grid(model_dir[2], filename, levels=levels)
    if vData is None: return None
    sPres = get_model_grid(model_dir[3], filename)
    if sPres is None: return None
    mslp = get_model_grid(model_dir[4], filename)
    if mslp is None: return None

    # get the coordinates
    lon = qData.lon.values
    lat = qData.lat.values
    lev = qData.level.values
    time = qData.forecast_reference_time.values
    fhour = qData.forecast_period.values[0]

    # comform surface and high variables
    sPres = hinterp(sPres.data.values.squeeze(), sPres.lon.values,
                    sPres.lat.values, lon, lat)
    mslp = hinterp(mslp.data.values.squeeze(), mslp.lon.values,
                   mslp.lat.values, lon, lat)
    mslp = grid_smooth(mslp, radius=4, method='CRES')
    qData = qData.data.values.squeeze()
    uData = uData.data.values.squeeze()
    vData = vData.data.values.squeeze()

    # compute IVT
    iquData, iqvData = cal_ivt(qData,
                               uData,
                               vData,
                               lon,
                               lat,
                               lev,
                               surf_pres=sPres)

    # draw the figure
    plot = draw_ivt(iquData,
                    iqvData,
                    lon,
                    lat,
                    mslp=mslp,
                    map_region=get_map_region(region),
                    title_kwargs={
                        'name': model.upper(),
                        'time': time,
                        'fhour': fhour,
                        'tzone': 'BJT'
                    })
    if noshow:
        return plot, str(int(fhour)).zfill(3)
    else:
        return plot
Пример #13
0
def tmp_evo(initTime=None,
            tmp_lev=850,
            t_gap=6,
            t_range=[6, 36],
            model='ECMWF',
            data_source='MICAPS',
            map_ratio=14 / 9,
            zoom_ratio=20,
            cntr_pnt=[104, 34],
            south_China_sea=True,
            area=None,
            **kwargs):

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    #prepare data
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='TMP',
                                  lvl=tmp_lev),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='PSFC')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename
        if (initTime == None):
            initTime = MICAPS_IO.get_latest_initTime(data_dir[0])
        filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]

        # retrieve data from micaps server
        tmp = MICAPS_IO.get_model_grids(data_dir[0], filenames=filenames)
        psfc = MICAPS_IO.get_model_grids(data_dir[1], filenames=filenames)

    if (data_source == 'CIMISS'):
        # get filename
        if (initTime != None):
            filename = utl.model_filename(initTime, 0, UTC=True)
        else:
            filename = utl.filename_day_back_model(day_back=0,
                                                   fhour=0,
                                                   UTC=True)
        try:
            tmp = CMISS_IO.cimiss_model_by_times(
                '20' + filename[0:8],
                valid_times=fhours,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='TEM'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=tmp_lev,
                fcst_ele="TEM",
                units='K')
            tmp['data'].values = tmp['data'].values - 273.15

            psfc = CMISS_IO.cimiss_model_by_times(
                '20' + filename[0:8],
                valid_times=fhours,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='PRS'),
                fcst_level=0,
                fcst_ele="PRS",
                units='Pa')
            psfc['data'] = psfc['data'] / 100.

        except KeyError:
            raise ValueError('Can not find all data needed')
# set map extent
    if (area != None):
        south_China_sea = False

    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)
    map_extent, delt_x, delt_y = utl.get_map_extent(cntr_pnt, zoom_ratio,
                                                    map_ratio)
    tmp = utl.cut_xrdata(map_extent, tmp, delt_x=delt_x, delt_y=delt_y)
    tmp = tmp.rolling({'lon': 3, 'lat': 3}).mean()
    psfc = utl.cut_xrdata(map_extent, psfc, delt_x=delt_x, delt_y=delt_y)
    tmp = utl.mask_terrian(tmp_lev, psfc, tmp)
    tmp.attrs['model'] = model
    coldwave_graphics.draw_tmp_evo(tmp=tmp,
                                   map_extent=map_extent,
                                   south_China_sea=south_China_sea,
                                   **kwargs)
Пример #14
0
def Station_Snow_Synthetical_Forecast_From_Cassandra(
        model='ECMWF',
        output_dir=None,
        t_range=[0,84],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9]},
        initTime=None,
        draw_VIS=True,drw_thr=False,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' '}
            ):

    #+get all the directories needed
    try:
        dir_rqd=[ 
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_3_HOURS/",
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_6_HOURS/",
                "ECMWF_HR/SNOD/",
                "ECMWF_HR/SDEN/",
                "ECMWF_HR/UGRD_100M/",
                "ECMWF_HR/VGRD_100M/",
                "NWFD_SCMOC/VIS/",
                "NCEP_GFS_HR/SNOD/",
                "ECMWF_HR/SNOW06/",
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='T2m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='u10m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='v10m'),
                'ECMWF_ENSEMBLE/RAW/SNOW06/'
                ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    try:
        dir_opt=[ 
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='Td2m')
                ]
        name_opt=['Td2m']
    except:
        dir_opt=[
                utl.Cassandra_dir(data_type='surface',data_source=model,var_name='rh2m')
                ]
        name_opt=['rh2m']
          
    #+get all the directories needed

    if(initTime == None):
        last_file={model:get_latest_initTime(dir_rqd[0]),
                    'SCMOC':get_latest_initTime(dir_rqd[6]),
                    }
    else:
        last_file={model:initTime[0],
                    'SCMOC':initTime[1],
                    }        

    y_s={model:int('20'+last_file[model][0:2]),
        'SCMOC':int('20'+last_file['SCMOC'][0:2])}
    m_s={model:int(last_file[model][2:4]),
        'SCMOC':int(last_file['SCMOC'][2:4])}
    d_s={model:int(last_file[model][4:6]),
        'SCMOC':int(last_file['SCMOC'][4:6])}
    h_s={model:int(last_file[model][6:8]),
        'SCMOC':int(last_file['SCMOC'][6:8])}

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    for ifhour in fhours:
        if (ifhour == fhours[0] ):
            time_all=datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour))
        else:
            time_all=np.append(time_all,datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour)))            

    filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
    t2m=utl.get_model_points_gy(dir_rqd[9], filenames, points,allExists=False)
    
    if(name_opt[0] == 'rh2m'):
        rh2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)
        Td2m=mpcalc.dewpoint_rh(t2m['data'].values*units('degC'),rh2m['data'].values/100.)
        p_vapor=(rh2m['data'].values/100.)*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))

    if(name_opt[0] == 'Td2m'):
        Td2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)        
        rh2m=mpcalc.relative_humidity_from_dewpoint(t2m['data'].values* units('degC'),
                Td2m['data'].values* units('degC'))
        p_vapor=(np.array(rh2m))*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))
        Td2m=np.array(Td2m['data'].values)* units('degC')

    #SN06_ensm=utl.get_model_points_gy(dir_rqd[12], filenames, points,allExists=False)
    '''
    for i in range(0,len(SN06_ensm['forecast_period'])):
        SN06_std=np.std(np.squeeze(SN06_ensm['data'].values[i,:]))
        SN06_mean=np.mean(np.squeeze(SN06_ensm['data'].values[i,:]))
        if(i == 0):
            SN06_01=norm.pdf(0.01, SN06_mean, SN06_std)
            SN06_10=norm.pdf(0.1, SN06_mean, SN06_std)
            SN06_25=norm.pdf(0.25, SN06_mean, SN06_std)
            SN06_50=norm.pdf(0.5, SN06_mean, SN06_std)
            SN06_75=norm.pdf(0.75, SN06_mean, SN06_std)
            SN06_90=norm.pdf(0.9, SN06_mean, SN06_std)
            SN06_99=norm.pdf(0.99, SN06_mean, SN06_std)
        if(i > 0):
            SN06_01=[SN06_01,norm.pdf(0.01, SN06_mean, SN06_std)]
            SN06_10=[SN06_10,norm.pdf(0.1, SN06_mean, SN06_std)]
            SN06_25=[SN06_25,norm.pdf(0.25, SN06_mean, SN06_std)]
            SN06_50=[SN06_50,norm.pdf(0.5, SN06_mean, SN06_std)]
            SN06_75=[SN06_75,norm.pdf(0.75, SN06_mean, SN06_std)]
            SN06_90=[SN06_90,norm.pdf(0.9, SN06_mean, SN06_std)]
            SN06_99=[SN06_99,norm.pdf(0.99, SN06_mean, SN06_std)]

    SN06_ensm_stc={            
        'SN06_01'=SN06_01
        'SN06_10'=SN06_10
        'SN06_25'=SN06_25
        'SN06_50'=SN06_50
        'SN06_75'=SN06_75
        'SN06_90'=SN06_90
        'SN06_99'=SN06_99
        }
    '''
    u10m=utl.get_model_points_gy(dir_rqd[10], filenames, points,allExists=False)
    v10m=utl.get_model_points_gy(dir_rqd[11], filenames, points,allExists=False)
    wsp10m=(u10m['data']**2+v10m['data']**2)**0.5
    AT=1.07*t2m['data'].values+0.2*p_vapor-0.65*wsp10m-2.7
    #https://en.wikipedia.org/wiki/Wind_chill
    TWC=13.12+0.6215*t2m['data'].values-11.37*(wsp10m**0.16)+0.3965*t2m['data'].values*(wsp10m**0.16)

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [last_file['SCMOC']+'.'+str(fhour).zfill(3) for fhour in fhours]
    VIS=utl.get_model_points_gy(dir_rqd[6], filenames, points,allExists=False,fill_null=True,Null_value=-0.001)     

    if(last_file['SCMOC'] == last_file[model] and t_range[1] > 72):
        fhours = np.append(np.arange(3,72,3),np.arange(72, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]            

    if(last_file['SCMOC'] != last_file[model] and t_range[1] > 60):
        fhours = np.append(np.arange(3,60,3),np.arange(60, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] != last_file[model] and t_range[1] <= 60):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] == last_file[model] and t_range[1] <= 72):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    SNOD1=utl.get_model_points_gy(dir_rqd[2], filenames2, points,allExists=False)
    SNOD2=utl.get_model_points_gy(dir_rqd[7], filenames2, points,allExists=False)
    SDEN=utl.get_model_points_gy(dir_rqd[3], filenames2, points,allExists=False)
    SN06=utl.get_model_points_gy(dir_rqd[8], filenames2, points,allExists=False)
    u100m=utl.get_model_points_gy(dir_rqd[4], filenames2, points,allExists=False)
    v100m=utl.get_model_points_gy(dir_rqd[5], filenames2, points,allExists=False)
    wsp100m=(u100m['data']**2+v100m['data']**2)**0.5

    if(fhours[-1] < 120):
        gust10m=utl.get_model_points_gy(dir_rqd[0], filenames, points,allExists=False)
    if(fhours[-1] > 120):
        if(last_file['SCMOC'] == last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        if(last_file['SCMOC'] != last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        gust10m=utl.get_model_points_gy(dir_rqd[1], filenames, points,allExists=False)        
        
    sta_graphics.draw_Station_Snow_Synthetical_Forecast_From_Cassandra(
            TWC=TWC,AT=AT,u10m=u10m,v10m=v10m,u100m=u100m,v100m=v100m,
            gust10m=gust10m,wsp10m=wsp10m,wsp100m=wsp100m,SNOD1=SNOD1,SNOD2=SNOD2,SDEN=SDEN,SN06=SN06,
            draw_VIS=draw_VIS,VIS=VIS,drw_thr=drw_thr,
            time_all=time_all,
            model=model,points=points,
            output_dir=output_dir,extra_info=extra_info)
Пример #15
0
def cumulated_precip_evo(initTime=None,
                         t_gap=6,
                         t_range=[6, 36],
                         day_back=0,
                         model='ECMWF',
                         data_source='MICAPS',
                         map_ratio=14 / 9,
                         zoom_ratio=20,
                         cntr_pnt=[104, 34],
                         south_China_sea=True,
                         area=None,
                         city=False,
                         output_dir=None,
                         Global=False,
                         **kwargs):
    fhours = np.arange(t_range[0], t_range[1] + 1, t_gap)
    # prepare data
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='RAIN' + '%02d' % t_gap)
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')
        if (initTime == None):
            initTime = MICAPS_IO.get_latest_initTime(data_dir[0])
        filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
        # retrieve data from micaps server
        rain = MICAPS_IO.get_model_grids(data_dir[0], filenames=filenames)
        rain2 = rain.copy(deep=True)
        for itime in range(1, len(rain['forecast_period'].values)):
            rain2['data'].values[itime, :, :] = np.sum(
                rain['data'].values[0:itime + 1, :, :], axis=0)

    if (data_source == 'CIMISS'):
        if (initTime != None):
            filename = utl.model_filename(initTime, 0, UTC=True)
        else:
            filename = utl.filename_day_back_model(day_back=0,
                                                   fhour=0,
                                                   UTC=True)
        try:
            TPE1 = CMISS_IO.cimiss_model_by_times(
                '20' + filename[0:8],
                valid_times=fhours,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='TPE'),
                levattrs={
                    'long_name': 'Height above Ground',
                    'units': 'm',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=0,
                fcst_ele="TPE",
                units='kg*m^-2')
        except KeyError:
            raise ValueError('Can not find all data needed')
        rain = TPE1.copy(deep=True)
        rain['data'].values = (TPE1['data'].values)


# set map extent
    if (area != None):
        south_China_sea = False

    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)
    else:
        map_extent = [0, 0, 0, 0]
        map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
        map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
        map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
        map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1
    mask1 = (rain['lon'] > map_extent[0] - delt_x) & (
        rain['lon'] < map_extent[1] + delt_x) & (
            rain['lat'] > map_extent[2] - delt_y) & (rain['lat'] <
                                                     map_extent[3] + delt_y)
    rain2 = rain2.where(mask1, drop=True)
    rain2.attrs['model'] = model
    rain2.attrs['t_gap'] = t_gap
    # draw
    QPF_graphics.draw_cumulated_precip_evo(rain=rain2,
                                           map_extent=map_extent,
                                           regrid_shape=20,
                                           city=city,
                                           south_China_sea=south_China_sea,
                                           output_dir=output_dir,
                                           Global=Global)
Пример #16
0
def Station_Synthetical_Forecast_From_Cassandra(
        model='ECMWF',
        output_dir=None,
        t_range=[0,84],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9]},
        initTime=None,
        draw_VIS=True,drw_thr=False,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' '}
            ):

    #+get all the directories needed
    try:
        dir_rqd=[ 
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_3_HOURS/",
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_6_HOURS/",
                "ECMWF_HR/TCDC/",
                "ECMWF_HR/LCDC/",
                "ECMWF_HR/UGRD_100M/",
                "ECMWF_HR/VGRD_100M/",
                "NWFD_SCMOC/VIS/",

                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='RAIN03'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='RAIN06'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='T2m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='u10m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='v10m'),
                ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    try:
        dir_opt=[ 
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='Td2m')
                ]
        name_opt=['Td2m']
    except:
        dir_opt=[
                utl.Cassandra_dir(data_type='surface',data_source=model,var_name='rh2m')
                ]
        name_opt=['rh2m']
          
    #+get all the directories needed

    if(initTime == None):
        last_file={model:get_latest_initTime(dir_rqd[0]),
                    'SCMOC':get_latest_initTime(dir_rqd[6]),
                    }
    else:
        last_file={model:initTime[0],
                    'SCMOC':initTime[1],
                    }        

    y_s={model:int('20'+last_file[model][0:2]),
        'SCMOC':int('20'+last_file['SCMOC'][0:2])}
    m_s={model:int(last_file[model][2:4]),
        'SCMOC':int(last_file['SCMOC'][2:4])}
    d_s={model:int(last_file[model][4:6]),
        'SCMOC':int(last_file['SCMOC'][4:6])}
    h_s={model:int(last_file[model][6:8]),
        'SCMOC':int(last_file['SCMOC'][6:8])}

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    for ifhour in fhours:
        if (ifhour == fhours[0] ):
            time_all=datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour))
        else:
            time_all=np.append(time_all,datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour)))            

    filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
    t2m=utl.get_model_points_gy(dir_rqd[9], filenames, points,allExists=False)
    
    if(name_opt[0] == 'rh2m'):
        rh2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)
        Td2m=mpcalc.dewpoint_rh(t2m['data'].values*units('degC'),rh2m['data'].values/100.)
        p_vapor=(rh2m['data'].values/100.)*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))

    if(name_opt[0] == 'Td2m'):
        Td2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)        
        rh2m=mpcalc.relative_humidity_from_dewpoint(t2m['data'].values* units('degC'),
                Td2m['data'].values* units('degC'))
        p_vapor=(np.array(rh2m))*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))
        Td2m=np.array(Td2m['data'].values)* units('degC')

    u10m=utl.get_model_points_gy(dir_rqd[10], filenames, points,allExists=False)
    v10m=utl.get_model_points_gy(dir_rqd[11], filenames, points,allExists=False)
    wsp10m=(u10m['data']**2+v10m['data']**2)**0.5
    AT=1.07*t2m['data'].values+0.2*p_vapor-0.65*wsp10m-2.7      
    if((t_range[1]) > 72):
        fhours = np.arange(6, t_range[1], 6)
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        r03=utl.get_model_points_gy(dir_rqd[8], filenames, points,allExists=False)
    else:
        r03=utl.get_model_points_gy(dir_rqd[7], filenames, points,allExists=False)

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [last_file['SCMOC']+'.'+str(fhour).zfill(3) for fhour in fhours]
    VIS=utl.get_model_points_gy(dir_rqd[6], filenames, points,allExists=False,fill_null=True,Null_value=-0.001)     

    if(last_file['SCMOC'] == last_file[model] and t_range[1] > 72):
        fhours = np.append(np.arange(3,72,3),np.arange(72, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]            

    if(last_file['SCMOC'] != last_file[model] and t_range[1] > 60):
        fhours = np.append(np.arange(3,60,3),np.arange(60, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] != last_file[model] and t_range[1] <= 60):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] == last_file[model] and t_range[1] <= 72):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    TCDC=utl.get_model_points_gy(dir_rqd[2], filenames2, points,allExists=False)
    LCDC=utl.get_model_points_gy(dir_rqd[3], filenames2, points,allExists=False)
    u100m=utl.get_model_points_gy(dir_rqd[4], filenames2, points,allExists=False)
    v100m=utl.get_model_points_gy(dir_rqd[5], filenames2, points,allExists=False)
    wsp100m=(u100m['data']**2+v100m['data']**2)**0.5

    if(fhours[-1] < 120):
        gust10m=utl.get_model_points_gy(dir_rqd[0], filenames, points,allExists=False)
    if(fhours[-1] > 120):
        if(last_file['SCMOC'] == last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        if(last_file['SCMOC'] != last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        gust10m=utl.get_model_points_gy(dir_rqd[1], filenames, points,allExists=False)        
        
    sta_graphics.draw_Station_Synthetical_Forecast_From_Cassandra(
            t2m=t2m,Td2m=Td2m,AT=AT,u10m=u10m,v10m=v10m,u100m=u100m,v100m=v100m,
            gust10m=gust10m,wsp10m=wsp10m,wsp100m=wsp100m,r03=r03,TCDC=TCDC,LCDC=LCDC,
            draw_VIS=draw_VIS,VIS=VIS,drw_thr=drw_thr,
            time_all=time_all,
            model=model,points=points,
            output_dir=output_dir,extra_info=extra_info)
Пример #17
0
def cumulated_precip(initTime=None,
                     t_gap=6,
                     t_range=[6, 36],
                     day_back=0,
                     model='ECMWF',
                     data_source='MICAPS',
                     map_ratio=14 / 9,
                     zoom_ratio=20,
                     cntr_pnt=[104, 34],
                     south_China_sea=True,
                     area=None,
                     city=False,
                     output_dir=None,
                     Global=False,
                     **kwargs):
    fhours = np.arange(t_range[0], t_range[1] + 1, t_gap)
    # prepare data
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='RAIN' + '%02d' % t_gap)
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')
        if (initTime == None):
            initTime = MICAPS_IO.get_latest_initTime(data_dir[0])
        filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
        # retrieve data from micaps server
        rain = MICAPS_IO.get_model_grids(data_dir[0], filenames=filenames)
        rain2 = rain.sum('time')

    if (data_source == 'CIMISS'):
        if (initTime != None):
            filename = utl.model_filename(initTime, 0, UTC=True)
        else:
            filename = utl.filename_day_back_model(day_back=0,
                                                   fhour=0,
                                                   UTC=True)
        try:
            TPE1 = CMISS_IO.cimiss_model_by_time('20' + filename[0:8],
                                                 valid_time=fhours[0],
                                                 data_code=utl.CMISS_data_code(
                                                     data_source=model,
                                                     var_name='TPE'),
                                                 fcst_level=0,
                                                 fcst_ele="TPE",
                                                 units='kg*m^-2')
            if TPE1 is None:
                return

            TPE2 = CMISS_IO.cimiss_model_by_time('20' + filename[0:8],
                                                 valid_time=fhours[-1],
                                                 data_code=utl.CMISS_data_code(
                                                     data_source=model,
                                                     var_name='TPE'),
                                                 fcst_level=0,
                                                 fcst_ele="TPE",
                                                 units='kg*m^-2')
            if TPE2 is None:
                return

        except KeyError:
            raise ValueError('Can not find all data needed')
        rain = TPE1.copy(deep=True)
        rain['data'].values = (TPE2['data'].values - TPE1['data'].values)
        rain2 = rain.sum('time')
# set map extent
    if (area != None):
        south_China_sea = False

    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)
    else:
        map_extent = [0, 0, 0, 0]
        map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
        map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
        map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
        map_extent[3] = cntr_pnt[1] + zoom_ratio * 1
    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1
    rain = utl.cut_xrdata(map_extent=map_extent,
                          xr_input=rain,
                          delt_y=delt_y,
                          delt_x=delt_x)
    rain2.attrs['model'] = model
    rain2.attrs['t_gap'] = t_gap
    rain2.attrs['initTime'] = datetime.strptime(initTime, '%y%m%d%H')
    rain2.attrs['fhour1'] = fhours[0]
    rain2.attrs['fhour2'] = fhours[-1]
    # draw
    QPF_graphics.draw_cumulated_precip(rain=rain2,
                                       map_extent=map_extent,
                                       city=city,
                                       south_China_sea=south_China_sea,
                                       output_dir=output_dir,
                                       Global=Global)
Пример #18
0
def point_wind_time_fcst_according_to_3D_wind(
        model='ECMWF',
        output_dir=None,
        t_range=[0,60],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9], 'altitude':[1351]},
        initTime=None,draw_obs=True,obs_ID=54511,day_back=0,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' ',
            'drw_thr':True,
            'levels_for_interp':[1000, 950, 925, 900, 850, 800, 700, 600, 500]}
            ):

    #+get all the directories needed
    try:
        dir_rqd=[utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl='')]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    #-get all the directories needed
    if(initTime == None):
        initTime = get_latest_initTime(dir_rqd[0][0:-1]+'/850')
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    directory=dir_rqd[0][0:-1]
    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [initTime+'.'+str(fhour).zfill(3) for fhour in fhours]
    HGT_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    directory=dir_rqd[1][0:-1]
    U_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    directory=dir_rqd[2][0:-1]
    V_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    #obs
    if(draw_obs == True):
        initial_time=pd.to_datetime(str(V_4D['forecast_reference_time'].values)).replace(tzinfo=None).to_pydatetime()
        sign=0
        for ifhour in V_4D['forecast_period'].values:
            temp=(initial_time+timedelta(hours=ifhour))
            filenames_obs=temp.strftime("%Y%m%d%H")+'0000.000'
            try:
                obs_data=get_station_data('SURFACE/PLOT/',filename=filenames_obs)
            except:
                break

            if(obs_data is not None):
                temp=obs_data.where(obs_data['ID']==obs_ID).dropna(how='all')
                if ((ifhour == V_4D['forecast_period'].values[0]) or ((ifhour > V_4D['forecast_period'].values[0]) and (sign==0))):
                    if(len(temp) > 0):
                        sta_obs_data=obs_data.where(obs_data['ID']==obs_ID).dropna(how='all').reset_index()
                        sign=1
                else:
                    if(len(temp) > 0):
                        sta_obs_data=sta_obs_data.append(temp).reset_index()
            if(obs_data is None):
                break
        try:
            sta_obs_data
        except:
            draw_obs=False

    delt_xy=HGT_4D['lon'].values[1]-HGT_4D['lon'].values[0]
    mask = (HGT_4D['lon']<(points['lon']+2*delt_xy))&(HGT_4D['lon']>(points['lon']-2*delt_xy))&(HGT_4D['lat']<(points['lat']+2*delt_xy))&(HGT_4D['lat']>(points['lat']-2*delt_xy))

    HGT_4D_sm=HGT_4D['data'].where(mask,drop=True)
    U_4D_sm=U_4D['data'].where(mask,drop=True)
    V_4D_sm=V_4D['data'].where(mask,drop=True)

    lon_md=np.squeeze(HGT_4D_sm['lon'].values)
    lat_md=np.squeeze(HGT_4D_sm['lat'].values)
    alt_md=np.squeeze(HGT_4D_sm.values*10).flatten()
    time_md=np.squeeze(HGT_4D_sm['forecast_period'].values)

    coords = np.zeros((len(time_md),len(extra_info['levels_for_interp']),len(lat_md),len(lon_md),4))
    coords[...,0]=time_md.reshape((len(time_md),1,1,1))
    coords[...,2] = lat_md.reshape((1,1,len(lat_md),1))
    coords[...,3] = lon_md.reshape((1,1,1,len(lon_md)))
    coords = coords.reshape((alt_md.size,4))
    coords[:,1]=alt_md

    interpolator_U = LinearNDInterpolator(coords,U_4D_sm.values.reshape((U_4D_sm.values.size)),rescale=True)
    interpolator_V = LinearNDInterpolator(coords,V_4D_sm.values.reshape((V_4D_sm.values.size)),rescale=True)

    coords2 = np.zeros((len(time_md),1,1,1,4))
    coords2[...,0]=time_md.reshape((len(time_md),1,1,1))
    coords2[...,1]=points['altitude'][0]
    coords2[...,2] = points['lat'][0]
    coords2[...,3] = points['lon'][0]
    coords2 = coords2.reshape((time_md.size,4))

    U_interped=np.squeeze(interpolator_U(coords2))
    V_interped=np.squeeze(interpolator_V(coords2))
    time_info=HGT_4D_sm.coords

    sta_graphics.draw_point_wind(U=U_interped,V=V_interped,
        model=model,
        output_dir=output_dir,
        points=points,
        time_info=time_info,
        extra_info=extra_info
            )        
Пример #19
0
def gh_uv_mslp(initTime=None,
               fhour=0,
               day_back=0,
               model='ECMWF',
               gh_lev=500,
               uv_lev=850,
               map_ratio=14 / 9,
               zoom_ratio=20,
               cntr_pnt=[104, 34],
               south_China_sea=True,
               area=None,
               city=False,
               output_dir=None,
               data_source='MICAPS',
               Global=False,
               **kwargs):

    if (area != None):
        south_China_sea = False

    # micaps data directory
    if (data_source == 'MICAPS'):
        try:
            data_dir = [
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='HGT',
                                  lvl=gh_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='UGRD',
                                  lvl=uv_lev),
                utl.Cassandra_dir(data_type='high',
                                  data_source=model,
                                  var_name='VGRD',
                                  lvl=uv_lev),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='PRMSL'),
                utl.Cassandra_dir(data_type='surface',
                                  data_source=model,
                                  var_name='PSFC')
            ]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        # get filename

        if (initTime == None):
            initTime = MICAPS_IO.get_latest_initTime(data_dir[-1])

        if (initTime != None):
            filename = utl.model_filename(initTime, fhour)
        else:
            filename = utl.filename_day_back_model(day_back=day_back,
                                                   fhour=fhour)

        # retrieve data from micaps server
        gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename)
        u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename)
        v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename)
        mslp = MICAPS_IO.get_model_grid(data_dir[3], filename=filename)
        psfc = MICAPS_IO.get_model_grid(data_dir[4], filename=filename)

    if (data_source == 'CIMISS'):

        # get filename
        if (initTime != None):
            initTime = utl.model_filename(initTime, fhour, UTC=True)[0:8]
        else:
            initTime = utl.filename_day_back_model(fhour=fhour, UTC=True)[0:8]
        try:
            # retrieve data from CIMISS server
            gh = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='GPH'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=gh_lev,
                fcst_ele="GPH",
                units='gpm')
            gh['data'].values = gh['data'].values / 10.

            u = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIU'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=uv_lev,
                fcst_ele="WIU",
                units='m/s')

            v = CMISS_IO.cimiss_model_by_time(
                '20' + initTime,
                valid_time=fhour,
                data_code=utl.CMISS_data_code(data_source=model,
                                              var_name='WIV'),
                levattrs={
                    'long_name': 'pressure_level',
                    'units': 'hPa',
                    '_CoordinateAxisType': '-'
                },
                fcst_level=uv_lev,
                fcst_ele="WIV",
                units='m/s')

            if (model == 'ECMWF'):
                mslp = CMISS_IO.cimiss_model_by_time(
                    '20' + initTime,
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='GSSP'),
                    levattrs={
                        'long_name': 'Mean_sea_level',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="GSSP",
                    units='Pa')
            else:
                mslp = CMISS_IO.cimiss_model_by_time(
                    '20' + initTime,
                    valid_time=fhour,
                    data_code=utl.CMISS_data_code(data_source=model,
                                                  var_name='SSP'),
                    levattrs={
                        'long_name': 'Mean_sea_level',
                        'units': 'm',
                        '_CoordinateAxisType': '-'
                    },
                    fcst_level=0,
                    fcst_ele="SSP",
                    units='Pa')
            mslp['data'] = mslp['data'] / 100.

            psfc = CMISS_IO.cimiss_model_by_time('20' + initTime,
                                                 valid_time=fhour,
                                                 data_code=utl.CMISS_data_code(
                                                     data_source=model,
                                                     var_name='PRS'),
                                                 fcst_level=0,
                                                 fcst_ele="PRS",
                                                 units='Pa')
            psfc['data'] = psfc['data'] / 100.
        except KeyError:
            raise ValueError('Can not find all data needed')
    # prepare data

    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1

    gh = utl.cut_xrdata(map_extent, gh, delt_x=delt_x, delt_y=delt_y)
    u = utl.cut_xrdata(map_extent, u, delt_x=delt_x, delt_y=delt_y)
    v = utl.cut_xrdata(map_extent, v, delt_x=delt_x, delt_y=delt_y)
    mslp = utl.cut_xrdata(map_extent, mslp, delt_x=delt_x, delt_y=delt_y)
    psfc = utl.cut_xrdata(map_extent, psfc, delt_x=delt_x, delt_y=delt_y)

    u = utl.mask_terrian(uv_lev, psfc, u)
    v = utl.mask_terrian(uv_lev, psfc, v)
    gh = utl.mask_terrian(gh_lev, psfc, gh)

    gh.attrs['model'] = model

    uv = xr.merge([u.rename({'data': 'u'}), v.rename({'data': 'v'})])

    synoptic_graphics.draw_gh_uv_mslp(mslp=mslp,
                                      gh=gh,
                                      uv=uv,
                                      map_extent=map_extent,
                                      regrid_shape=20,
                                      city=city,
                                      south_China_sea=south_China_sea,
                                      output_dir=output_dir,
                                      Global=Global)
Пример #20
0
def point_fcst_according_to_3D_field(
        model='ECMWF',
        output_dir=None,
        t_range=[0,60],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9], 'altitude':[1351]},
        initTime=None,obs_ID=54511,day_back=0,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' ',
            'drw_thr':True,
            'levels_for_interp':[1000, 950, 925, 900, 850, 800, 700, 600, 500]}
            ):

    try:
        dir_rqd=[utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='RAIN'+str(t_gap).zfill(2))]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    #-get all the directories needed
    if(initTime == None):
        initTime = get_latest_initTime(dir_rqd[0][0:-1]+'/850')
        #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8]

    directory=dir_rqd[0][0:-1]
    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [initTime+'.'+str(fhour).zfill(3) for fhour in fhours]
    HGT_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    directory=dir_rqd[1][0:-1]
    U_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    directory=dir_rqd[2][0:-1]
    V_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)

    directory=dir_rqd[3][0:-1]
    TMP_4D=get_model_3D_grids(directory=directory,filenames=filenames,levels=extra_info['levels_for_interp'], allExists=False)
    
    rn=utl.get_model_points_gy(dir_rqd[4], filenames, points,allExists=False)

    directory=dir_rqd[3][0:-1]
    coords_info_2D=utl.get_model_points_gy(directory+str(extra_info['levels_for_interp'][0])+'/',
                        points=points,filenames=filenames,allExists=False)

    delt_xy=HGT_4D['lon'].values[1]-HGT_4D['lon'].values[0]
    mask = (HGT_4D['lon']<(points['lon']+2*delt_xy))&(HGT_4D['lon']>(points['lon']-2*delt_xy))&(HGT_4D['lat']<(points['lat']+2*delt_xy))&(HGT_4D['lat']>(points['lat']-2*delt_xy))

    HGT_4D_sm=HGT_4D['data'].where(mask,drop=True)
    U_4D_sm=U_4D['data'].where(mask,drop=True)
    V_4D_sm=V_4D['data'].where(mask,drop=True)
    TMP_4D_sm=U_4D['data'].where(mask,drop=True)

    lon_md=np.squeeze(HGT_4D_sm['lon'].values)
    lat_md=np.squeeze(HGT_4D_sm['lat'].values)
    alt_md=np.squeeze(HGT_4D_sm.values*10).flatten()
    time_md=np.squeeze(HGT_4D_sm['forecast_period'].values)

    coords = np.zeros((len(time_md),len(extra_info['levels_for_interp']),len(lat_md),len(lon_md),4))
    coords[...,0]=time_md.reshape((len(time_md),1,1,1))
    coords[...,2] = lat_md.reshape((1,1,len(lat_md),1))
    coords[...,3] = lon_md.reshape((1,1,1,len(lon_md)))
    coords = coords.reshape((alt_md.size,4))
    coords[:,1]=alt_md

    interpolator_U = LinearNDInterpolator(coords,U_4D_sm.values.reshape((U_4D_sm.values.size)),rescale=True)
    interpolator_V = LinearNDInterpolator(coords,V_4D_sm.values.reshape((V_4D_sm.values.size)),rescale=True)
    interpolator_TMP = LinearNDInterpolator(coords,TMP_4D_sm.values.reshape((TMP_4D_sm.values.size)),rescale=True)

    coords2 = np.zeros((len(time_md),1,1,1,4))
    coords2[...,0]=time_md.reshape((len(time_md),1,1,1))
    coords2[...,1]=points['altitude'][0]
    coords2[...,2] = points['lat'][0]
    coords2[...,3] = points['lon'][0]
    coords2 = coords2.reshape((time_md.size,4))

    U_interped=np.squeeze(interpolator_U(coords2))
    V_interped=np.squeeze(interpolator_V(coords2))
    TMP_interped=np.squeeze(interpolator_TMP(coords2))

    U_interped_xr=coords_info_2D.copy()
    U_interped_xr['data'].values=U_interped.reshape(U_interped.size,1,1)
    V_interped_xr=coords_info_2D.copy()
    V_interped_xr['data'].values=V_interped.reshape(V_interped.size,1,1)
    TMP_interped_xr=coords_info_2D.copy()
    TMP_interped_xr['data'].values=TMP_interped.reshape(TMP_interped.size,1,1)
    
    sta_graphics.draw_point_fcst(t2m=TMP_interped_xr,u10m=U_interped_xr,v10m=V_interped_xr,rn=rn,
        model=model,
        output_dir=output_dir,
        points=points,
        extra_info=extra_info
            )        
Пример #21
0
def Time_Crossection_rh_uv_Temp(initTime=None,model='ECMWF',points={'lon':[116.3833], 'lat':[39.9]},
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,200],data_source='MICAPS',
    t_gap=3,t_range=[0,48],lw_ratio=[16,9],output_dir=None,**kwargs):
  
    fhours = np.arange(t_range[0], t_range[1], t_gap)

    # 读数据
    if(data_source == 'MICAPS'):
        try:
            data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl=''),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')]
        except KeyError:
            raise ValueError('Can not find all directories needed')
        
        if(initTime==None):
            initTime = get_latest_initTime(data_dir[0][0:-1]+"850")
        filenames = [initTime+'.'+str(fhour).zfill(3) for fhour in fhours]
        TMP_4D=get_model_3D_grids(directory=data_dir[0][0:-1],filenames=filenames,levels=levels, allExists=False)
        u_4D=get_model_3D_grids(directory=data_dir[1][0:-1],filenames=filenames,levels=levels, allExists=False)
        v_4D=get_model_3D_grids(directory=data_dir[2][0:-1],filenames=filenames,levels=levels, allExists=False)
        rh_4D=get_model_3D_grids(directory=data_dir[3][0:-1],filenames=filenames,levels=levels, allExists=False)
        Psfc_3D=get_model_grids(directory=data_dir[4][0:-1],filenames=filenames,allExists=False)

    if(data_source == 'CIMISS'):
        if(initTime != None):
            filename = utl.model_filename(initTime, 0,UTC=True)
        else:
            filename=utl.filename_day_back_model(day_back=0,fhour=0,UTC=True)
        try:
            rh_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='RHU'),
                        fcst_levels=levels, fcst_ele="RHU", units='%',pbar=True)

            u_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'),
                        fcst_levels=levels, fcst_ele="WIU", units='m/s',pbar=True)
                
            v_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'),
                        fcst_levels=levels, fcst_ele="WIV", units='m/s',pbar=True)

            TMP_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='TEM'),
                        fcst_levels=levels, fcst_ele="TEM", units='K',pbar=True)
            TMP_4D['data'].values=TMP_4D['data'].values-273.15

            Psfc_3D=CMISS_IO.cimiss_model_grids(init_time_str='20'+filename[0:8], valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'),
                        fcst_level=0, fcst_ele="PRS", units='Pa',pbar=True)

        except KeyError:
            raise ValueError('Can not find all data needed')

    TMP_2D=TMP_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    u_2D=u_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    v_2D=v_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    rh_2D=rh_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    rh_2D.attrs['model']=model
    rh_2D.attrs['points']=points
    Psfc_1D=Psfc_3D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    v_2D2,pressure_2D = xr.broadcast(v_2D['data'],v_2D['level'])
    v_2D2,Psfc_2D = xr.broadcast(v_2D['data'],Psfc_1D['data'])
    terrain_2D=pressure_2D-Psfc_2D

    crossection_graphics.draw_Time_Crossection_rh_uv_Temp(
                    rh_2D=rh_2D, u_2D=u_2D, v_2D=v_2D,TMP_2D=TMP_2D,terrain_2D=terrain_2D,
                    t_range=t_range,model=model,lw_ratio=lw_ratio,output_dir=output_dir)