def minput_grid(directory, **kwargs): """ Retrieve model grid data from micaps cassandra server and put xarray into magics minput data. refer to: https://github.com/ecmwf/magics-python/blob/master/Magics/macro.py Args: directory (string): micaps cassandra server directory. Examples: data = minput_grid("ECMWF_HR/TMP/850", varattrs={'units':'C', 'long_name':'temperature'}) """ # retrieve data data = get_model_grid(directory, **kwargs) if data is None: return None # flatten an nD matrix into a 2d matrix for dim in data.dims: if dim in ['lon', 'lat']: continue else: d = data[dim].values[0] data = data.loc[{dim: d}] # extract values lat = data['lat'].values.astype(np.float64) lon = data['lon'].values.astype(np.float64) input_field_values = np.squeeze(data['data'].values.astype(np.float64)) # put values into magics data = magics.minput(input_field=input_field_values, input_latitudes_list=lat, input_longitudes_list=lon, input_metadata=dict(data['data'].attrs)) return data
def ivt(initTime=None, fhour=0, frange=None, model='ECMWF', region='中国陆地', show='list', width=500, getModels=False, getInitTime=False, noshow=False): """ 分析水汽传输垂直积分场. Args: initTime (string, optional): model initial time YYYYmmddHH, like 2020061320. Defaults to None, = the model latest run time. fhour (int, optional): model forecast hour. Defaults to 0. frange (list, optional): model forecast hour range, [start, end, step] or [start, end] which step=6. if frange is set, fhour is ignored. model (str, optional): model name. Defaults to 'ECMWF'. You can use "getModels=True" return all model names. region (str or list, optional): Predifined region name, like '中国', '中国陆地', '华北', '东北', '华东', '华中', '华南', '西南', '西北', '新疆', '青藏'. Defaults to '中国陆地'. Or [lonmin, lonmax, latmin, latmax] show (str, optional): 'list', show all plots in one cell. 'tab', show one plot in each tab page. 'animation', show gif animation. width (int, optional): Width of the displayed image. Defaults to 500. noshow (bool, optional): just return the plots. """ # get function arguments kwargs = locals().copy() # set and check model directory model_dirs = { 'ECMWF': [ "ECMWF_HR/SPFH/", "ECMWF_HR/UGRD/", "ECMWF_HR/VGRD/", "ECMWF_HR/PRES/SURFACE/", "ECMWF_HR/PRMSL/" ], 'GRAPES': [ 'GRAPES_GFS/SPFH/', 'GRAPES_GFS/UGRD/', 'GRAPES_GFS/VGRD/', 'GRAPES_GFS/PRES/SURFACE/', 'GRAPES_GFS/PRMSL/' ] } if getModels: return list(model_dirs.keys()) model_dir = check_model(model, model_dirs) if model_dir is None: return None # check initTime if initTime is None: initTime = get_latest_initTime(model_dir[0] + '1000') initTime = check_initTime(initTime) if getInitTime: return initTime # check frange if frange is not None: return util.draw_multiple_plots(ivt, kwargs) # prepare data filename = initTime.strftime("%y%m%d%H") + '.' + str(int(fhour)).zfill(3) levels = [1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300] qData = get_model_3D_grid(model_dir[0], filename, levels=levels) if qData is None: return None uData = get_model_3D_grid(model_dir[1], filename, levels=levels) if uData is None: return None vData = get_model_3D_grid(model_dir[2], filename, levels=levels) if vData is None: return None sPres = get_model_grid(model_dir[3], filename) if sPres is None: return None mslp = get_model_grid(model_dir[4], filename) if mslp is None: return None # get the coordinates lon = qData.lon.values lat = qData.lat.values lev = qData.level.values time = qData.forecast_reference_time.values fhour = qData.forecast_period.values[0] # comform surface and high variables sPres = hinterp(sPres.data.values.squeeze(), sPres.lon.values, sPres.lat.values, lon, lat) mslp = hinterp(mslp.data.values.squeeze(), mslp.lon.values, mslp.lat.values, lon, lat) mslp = grid_smooth(mslp, radius=4, method='CRES') qData = qData.data.values.squeeze() uData = uData.data.values.squeeze() vData = vData.data.values.squeeze() # compute IVT iquData, iqvData = cal_ivt(qData, uData, vData, lon, lat, lev, surf_pres=sPres) # draw the figure plot = draw_ivt(iquData, iqvData, lon, lat, mslp=mslp, map_region=get_map_region(region), title_kwargs={ 'name': model.upper(), 'time': time, 'fhour': fhour, 'tzone': 'BJT' }) if noshow: return plot, str(int(fhour)).zfill(3) else: return plot
def point_uv_gust_tmp_rh_rn_fcst(output_dir=None, t_range=[0, 60], t_gap=3, points={ 'lon': [116.3833], 'lat': [39.9], 'altitude': [1351] }, initTime=None, day_back=0, extra_info={ 'output_head_name': ' ', 'output_tail_name': ' ', 'point_name': ' ' }, **kwargs): #+get all the directories needed try: dir_rqd = [ utl.Cassandra_dir(data_type='surface', data_source='中央气象台中短期指导', var_name='T2m'), utl.Cassandra_dir(data_type='surface', data_source='中央气象台中短期指导', var_name='u10m'), utl.Cassandra_dir(data_type='surface', data_source='中央气象台中短期指导', var_name='v10m'), utl.Cassandra_dir(data_type='surface', data_source='中央气象台中短期指导', var_name='rh2m'), utl.Cassandra_dir(data_type='surface', data_source='中央气象台中短期指导', var_name='RAIN' + str(t_gap).zfill(2)), utl.Cassandra_dir(data_type='surface', data_source='OBS', var_name='PLOT_GUST') ] except KeyError: raise ValueError('Can not find all required directories needed') #-get all the directories needed if (initTime == None): initTime = MICAPS_IO.get_latest_initTime(dir_rqd[0]) #initTime=utl.filename_day_back_model(day_back=day_back,fhour=0)[0:8] gust_sta = MICAPS_IO.get_station_data(directory=dir_rqd[5], dropna=True, cache=False) datetime_sta = pd.to_datetime(str( gust_sta.time[0])).replace(tzinfo=None).to_pydatetime() datetime_model_initTime = datetime.strptime('20' + initTime, '%Y%m%d%H') u10_his_md = [] v10_his_md = [] wsp_his_sta_point = [] model_filenames_his = None for iinit in range(0, 240, 12): for ifhour in range(0, 87, 3): for iobs in range(0, 168, 1): initTime_his = datetime_model_initTime - timedelta(hours=iinit) validTime_his = initTime_his + timedelta(hours=ifhour) staTime_his = datetime_sta - timedelta(hours=iobs) if (staTime_his == validTime_his): model_filename_his = initTime_his.strftime( '%Y%m%d%H')[2:10] + '.' + str(ifhour).zfill(3) sta_filename_his = validTime_his.strftime( '%Y%m%d%H') + '0000.000' data_md1 = MICAPS_IO.get_model_grid( dir_rqd[1], filename=model_filename_his) if (data_md1 is None): continue data_md2 = MICAPS_IO.get_model_grid( dir_rqd[1], filename=model_filename_his) if (data_md2 is None): continue data_sta = MICAPS_IO.get_station_data( directory=dir_rqd[5], filename=sta_filename_his, dropna=True, cache=True) if (data_sta is None): continue u10_his_md.append(data_md1) v10_his_md.append(data_md2) wsp_his_sta_interp = utl.sta_to_point_interpolation( points=points, sta=data_sta, var_name='Gust_speed') wsp_his_sta_point.append(wsp_his_sta_interp[:]) u10_his_md = xr.concat(u10_his_md, dim='time') v10_his_md = xr.concat(v10_his_md, dim='time') wsp_his_md = (u10_his_md**2 + v10_his_md**2)**0.5 wsp_his_md_point = wsp_his_md.interp(lon=('points', points['lon']), lat=('points', points['lat'])) model = LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=False) x = np.squeeze(wsp_his_md_point['data'].values).reshape(-1, 1) y = np.squeeze(wsp_his_sta_point).reshape(-1, 1) model.fit(x, y) if (model.coef_ < 0.2): f2 = np.polyfit(np.squeeze(x), np.squeeze(y), 2) model2 = np.poly1d(f2) fhours = np.arange(t_range[0], t_range[1], t_gap) filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours] t2m = utl.get_model_points_gy(dir_rqd[0], filenames, points, allExists=False) u10m = utl.get_model_points_gy(dir_rqd[1], filenames, points, allExists=False) v10m = utl.get_model_points_gy(dir_rqd[2], filenames, points, allExists=False) rh = utl.get_model_points_gy(dir_rqd[3], filenames, points, allExists=False) rn = utl.get_model_points_gy(dir_rqd[4], filenames, points, allExists=False) gust10m_predict = u10m.copy() if (model.coef_ > 0.2): gust10m_predict['data'].values = np.squeeze( model.predict( np.squeeze((u10m['data'].values**2 + v10m['data'].values**2)**0.5).reshape(-1, 1))).reshape( -1, 1, 1) else: gust10m_predict['data'].values = np.squeeze( model2( np.squeeze((u10m['data'].values**2 + v10m['data'].values**2)**0.5))).reshape(-1, 1, 1) sta_graphics.draw_point_uv_tmp_rh_rn_gust_fcst(t2m=t2m, u10m=u10m, v10m=v10m, rh=rh, rn=rn, gust=gust10m_predict, model='中央气象台中短期指导', output_dir=output_dir, points=points, extra_info=extra_info)
def mslp_gust10m(initTime=None, fhour=6, day_back=0, model='ECMWF', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): if (area != '全国'): south_China_sea = False # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PRMSL'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='10M_GUST_6H') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server mslp = get_model_grid(data_dir[0], filename=filename) if mslp is None: return gust = get_model_grid(data_dir[1], filename=filename) if gust is None: return init_time = mslp.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = ((mslp['lon'] > (map_extent[0] - delt_x)) & (mslp['lon'] < (map_extent[1] + delt_x)) & (mslp['lat'] > (map_extent[2] - delt_y)) & (mslp['lat'] < (map_extent[3] + delt_y))) mask2 = ((mslp['lon'] > (map_extent[0] - delt_x)) & (mslp['lon'] < (map_extent[1] + delt_x))) mask3 = ((mslp['lat'] > (map_extent[2] - delt_y)) & (mslp['lat'] < (map_extent[3] + delt_y))) #- to solve the problem of labels on all the contours mslp = { 'lon': mslp.coords['lon'].where(mask2, drop=True).values, 'lat': mslp.coords['lat'].where(mask3, drop=True).values, 'data': np.squeeze(mslp['data'].where(mask1, drop=True).values), 'model': model, 'fhour': fhour, 'init_time': init_time } gust = { 'lon': gust.coords['lon'].where(mask2, drop=True).values, 'lat': gust.coords['lat'].where(mask3, drop=True).values, 'data': np.squeeze(gust['data'].where(mask1, drop=True).values), } elements_graphics.draw_mslp_gust10m(gust=gust, mslp=mslp, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def Crosssection_Wind_Temp_RH( initial_time=None, fhour=24, levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200], day_back=0, model='ECMWF', output_dir=None, st_point=[43.5, 111.5], ed_point=[33, 125.0], map_extent=[70, 140, 15, 55], h_pos=[0.125, 0.665, 0.25, 0.2]): # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='RH', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='TMP', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl='500'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PSFC') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initial_time != None): filename = utl.model_filename(initial_time, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server rh = get_model_3D_grid(directory=data_dir[0][0:-1], filename=filename, levels=levels, allExists=False) if rh is None: return rh = rh.metpy.parse_cf().squeeze() u = get_model_3D_grid(directory=data_dir[1][0:-1], filename=filename, levels=levels, allExists=False) if u is None: return u = u.metpy.parse_cf().squeeze() v = get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if v is None: return v = v.metpy.parse_cf().squeeze() v2 = get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if v2 is None: return v2 = v2.metpy.parse_cf().squeeze() t = get_model_3D_grid(directory=data_dir[3][0:-1], filename=filename, levels=levels, allExists=False) if t is None: return t = t.metpy.parse_cf().squeeze() gh = get_model_grid(data_dir[4], filename=filename) psfc = get_model_grid(data_dir[5], filename=filename) psfc = psfc.metpy.parse_cf().squeeze() mask1 = ((psfc['lon'] >= t['lon'].values.min()) & (psfc['lon'] <= t['lon'].values.max()) & (psfc['lat'] >= t['lat'].values.min()) & (psfc['lat'] <= t['lat'].values.max())) t2, psfc_bdcst = xr.broadcast(t['data'], psfc['data'].where(mask1, drop=True)) mask2 = (psfc_bdcst > -10000) psfc_bdcst = psfc_bdcst.where(mask2, drop=True) #psfc_bdcst=psfc_bdcst.metpy.parse_cf().squeeze() if t is None: return resolution = u['lon'][1] - u['lon'][0] x, y = np.meshgrid(u['lon'], u['lat']) dx, dy = mpcalc.lat_lon_grid_deltas(u['lon'], u['lat']) #rh=rh.rename(dict(lat='latitude',lon='longitude')) cross = cross_section(rh, st_point, ed_point) cross_rh = cross.set_coords(('lat', 'lon')) cross = cross_section(u, st_point, ed_point) cross_u = cross.set_coords(('lat', 'lon')) cross = cross_section(v, st_point, ed_point) cross_v = cross.set_coords(('lat', 'lon')) cross_psfc = cross_section(psfc_bdcst, st_point, ed_point) #cross_psfc=cross.set_coords(('lat', 'lon')) cross_u['data'].attrs['units'] = units.meter / units.second cross_v['data'].attrs['units'] = units.meter / units.second cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components( cross_u['data'], cross_v['data']) cross = cross_section(t, st_point, ed_point) cross_Temp = cross.set_coords(('lat', 'lon')) cross_Td = mpcalc.dewpoint_rh(cross_Temp['data'].values * units.celsius, cross_rh['data'].values * units.percent) rh, pressure = xr.broadcast(cross_rh['data'], cross_Temp['level']) cross_terrain = pressure - cross_psfc crossection_graphics.draw_Crosssection_Wind_Temp_RH( cross_rh=cross_rh, cross_Temp=cross_Temp, cross_u=cross_u, cross_v=cross_v, cross_terrain=cross_terrain, gh=gh, h_pos=h_pos, st_point=st_point, ed_point=ed_point, levels=levels, map_extent=map_extent, model=model, output_dir=output_dir)
def dT2m_mx24(initTime=None, fhour=48, map_ratio=19/9,zoom_ratio=20,cntr_pnt=[102,34],area=None,south_China_sea=True, **kargws): data_dir = [utl.Cassandra_dir(data_type='surface',data_source='中央气象台中短期指导',var_name='Tmx_2m')] fhours1 = np.arange(fhour-24, fhour+1, 24) if(initTime is None): initTime=utl.filename_day_back_model(day_back=day_back,fhour=fhour)[0:8] filename1 = initTime+'.'+str(fhour).zfill(3) if(fhour >= 48): fhour2 = fhour-24 filename2 = initTime+'.'+str(fhour2).zfill(3) if(fhour >=36 and fhour < 48): fhour2 = fhour-12 initTime2=(datetime.strptime('20'+initTime,'%Y%m%d%H')-timedelta(hours=12)).strftime('%Y%m%d%H')[2:10] filename2=initTime2+'.'+str(fhour2).zfill(3) if(fhour >=24 and fhour < 36): fhour2 = fhour initTime2=(datetime.strptime('20'+initTime,'%Y%m%d%H')-timedelta(hours=24)).strftime('%Y%m%d%H')[2:10] filename2=initTime2+'.'+str(fhour2).zfill(3) if(fhour < 24): print('fhour should > 24') return # prepare data T_2m1 = MICAPS_IO.get_model_grid(data_dir[0], filename=filename1) T_2m2 = MICAPS_IO.get_model_grid(data_dir[0], filename=filename2) dTmx_2m=T_2m1.copy() dTmx_2m['data'].values=T_2m1['data'].values-T_2m2['data'].values # set map extent if(area != '全国'): south_China_sea=False if(area != None): cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area) map_extent=[0,0,0,0] map_extent[0]=cntr_pnt[0]-zoom_ratio*1*map_ratio map_extent[1]=cntr_pnt[0]+zoom_ratio*1*map_ratio map_extent[2]=cntr_pnt[1]-zoom_ratio*1 map_extent[3]=cntr_pnt[1]+zoom_ratio*1 delt_x=(map_extent[1]-map_extent[0])*0.2 delt_y=(map_extent[3]-map_extent[2])*0.1 #+ to solve the problem of labels on all the contours mask1 = (dTmx_2m['lon'] > map_extent[0]-delt_x) & (dTmx_2m['lon'] < map_extent[1]+delt_x) & (dTmx_2m['lat'] > map_extent[2]-delt_y) & (dTmx_2m['lat'] < map_extent[3]+delt_y) dTmx_2m=dTmx_2m.where(mask1,drop=True) #- to solve the problem of labels on all the contours dTmx2=xr.DataArray(np.squeeze(dTmx_2m['data'].values,axis=0),name='data', coords={'time':('time',[dTmx_2m['time'].values[0]]), 'fhour':('time',[fhour]), 'lat':('lat',dTmx_2m['lat'].values), 'lon':('lon',dTmx_2m['lon'].values) }, dims=('time','lat','lon'), attrs={'model_name':'中央气象台中短期指导', 'var_name':'2米最高温度24小时变温', 'vhours':24}) draw_SCMOC.draw_dT2m(dTmx2,map_extent=map_extent,south_China_sea=south_China_sea,**kargws)
def gh_uv_wsp(initTime=None, fhour=6, day_back=0, model='ECMWF', gh_lev=500, uv_lev=850, map_ratio=14 / 9, zoom_ratio=20, cntr_pnt=[104, 34], south_China_sea=True, area=None, city=False, output_dir=None, data_source='MICAPS', Global=False, **kwargs): if (area != None): south_China_sea = False # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=gh_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=uv_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=uv_lev), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PSFC') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename print(initTime) if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if gh is None: return u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if u is None: return v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if v is None: return psfc = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CIMISS server gh = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GPH'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') if gh is None: return gh['data'].values = gh['data'].values / 10. u = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') if u is None: return v = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') if v is None: return psfc = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='PRS'), fcst_level=0, fcst_ele="PRS", units='Pa') psfc['data'] = psfc['data'] / 100. except KeyError: raise ValueError('Can not find all data needed') # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent, delt_x, delt_y = utl.get_map_extent(cntr_pnt, zoom_ratio, map_ratio) gh = utl.mask_terrian(gh_lev, psfc, gh) u = utl.mask_terrian(uv_lev, psfc, u) v = utl.mask_terrian(uv_lev, psfc, v) #+ to solve the problem of labels on all the contours gh = utl.cut_xrdata(map_extent, gh, delt_x=delt_x, delt_y=delt_y) u = utl.cut_xrdata(map_extent, u, delt_x=delt_x, delt_y=delt_y) v = utl.cut_xrdata(map_extent, v, delt_x=delt_x, delt_y=delt_y) #- to solve the problem of labels on all the contours uv = xr.merge([u.rename({'data': 'u'}), v.rename({'data': 'v'})]) wsp = (u['data']**2 + v['data']**2)**0.5 gh.attrs['model'] = model synoptic_graphics.draw_gh_uv_wsp(wsp=wsp, gh=gh, uv=uv, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def mslp_rain_snow(initTime=None, fhour=24, day_back=0, model='ECMWF', atime=6, data_source='MICAPS', map_ratio=14 / 9, zoom_ratio=20, cntr_pnt=[104, 34], south_China_sea=True, area=None, city=False, output_dir=None, Global=False, **kwargs): ''' issues: 1. CIMISS 上上没有上没有GRAPES-GFS的降雪,所以当data_source='CIMISS',model='GRAPES_GFS'无法出图 ''' # prepare data if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PRMSL'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='RAIN' + '%02d' % atime), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='SNOW' + '%02d' % atime), ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) if (atime > 3): filename_mslp = utl.model_filename(initTime, int(fhour - atime / 2)) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) if (atime > 3): filename_mslp = utl.filename_day_back_model( day_back=day_back, fhour=int(fhour - atime / 2)) # retrieve data from micaps server mslp = get_model_grid(data_dir[0], filename=filename) if mslp is None: return rain = get_model_grid(data_dir[1], filename=filename) if rain is None: return snow = get_model_grid(data_dir[2], filename=filename) if snow is None: return if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) if (atime > 3): filename_gh = utl.filename_day_back_model(initTime, fhour=int(fhour - atime / 2), UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) if (atime > 3): filename_gh = utl.filename_day_back_model(day_back=day_back, fhour=int(fhour - atime / 2), UTC=True) try: # retrieve data from CIMISS server if (model == 'ECMWF'): mslp = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GSSP'), levattrs={ 'long_name': 'Mean_sea_level', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="GSSP", units='Pa') else: mslp = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='SSP'), levattrs={ 'long_name': 'Mean_sea_level', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="SSP", units='Pa') if mslp is None: return mslp['data'] = mslp['data'] / 100. TPE1 = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='TPE'), levattrs={ 'long_name': 'Height above Ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TPE", units='kg*m^-2') if TPE1 is None: return TPE2 = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour - atime, data_code=utl.CMISS_data_code(data_source=model, var_name='TPE'), levattrs={ 'long_name': 'Height above Ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TPE", units='kg*m^-2') if TPE2 is None: return TTSP1 = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='TTSP'), levattrs={ 'long_name': 'Height above Ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TTSP", units='kg*m^-2') if TTSP1 is None: return TTSP2 = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour - atime, data_code=utl.CMISS_data_code(data_source=model, var_name='TTSP'), levattrs={ 'long_name': 'Height above Ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TTSP", units='kg*m^-2') if TTSP2 is None: return except KeyError: raise ValueError('Can not find all data needed') rain = TPE1.copy(deep=True) rain['data'].values = (TPE1['data'].values - TPE2['data'].values) * 1000 snow = TTSP1.copy(deep=True) snow['data'].values = (TTSP1['data'].values - TTSP2['data'].values) * 1000 # set map extent if (area != None): south_China_sea = False if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 mask1 = (mslp['lon'] > map_extent[0] - delt_x) & ( mslp['lon'] < map_extent[1] + delt_x) & ( mslp['lat'] > map_extent[2] - delt_y) & (mslp['lat'] < map_extent[3] + delt_y) mask2 = (rain['lon'] > map_extent[0] - delt_x) & ( rain['lon'] < map_extent[1] + delt_x) & ( rain['lat'] > map_extent[2] - delt_y) & (rain['lat'] < map_extent[3] + delt_y) mslp = mslp.where(mask1, drop=True) mslp.attrs['model'] = model rain = rain.where(mask2, drop=True) snow = snow.where(mask2, drop=True) snow.attrs['atime'] = atime rain_snow = xr.merge( [rain.rename({'data': 'rain'}), snow.rename({'data': 'snow'})]) mask1 = ((rain_snow['rain'] - rain_snow['snow']) > 0.1) & (rain_snow['snow'] > 0.1) sleet = rain_snow['rain'].where(mask1) mask2 = ((rain_snow['rain'] - rain_snow['snow']) < 0.1) & (rain_snow['snow'] > 0.1) snw = rain_snow['snow'].where(mask2) mask3 = (rain_snow['rain'] > 0.1) & (rain_snow['snow'] < 0.1) rn = rain_snow['rain'].where(mask3) rn.attrs['atime'] = atime # draw QPF_graphics.draw_mslp_rain_snow(rain=rn, snow=snw, sleet=sleet, mslp=mslp, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def Miller_Composite_Chart(initial_time=None, fhour=24, day_back=0, model='GRAPES_GFS', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], Global=False, south_China_sea=True, area='全国', city=False, output_dir=None): # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='RH', lvl='700'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl='300'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl='300'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl='500'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl='500'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl='850'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl='850'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='TMP', lvl='700'), utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl='500'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='BLI'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='Td2m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PRMSL') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initial_time != None): filename = utl.model_filename(initial_time, fhour) filename2 = utl.model_filename(initial_time, fhour - 12) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) filename2 = utl.filename_day_back_model(day_back=day_back, fhour=fhour - 12) # retrieve data from micaps server rh_700 = get_model_grid(directory=data_dir[0], filename=filename) if rh_700 is None: return u_300 = get_model_grid(directory=data_dir[1], filename=filename) if u_300 is None: return v_300 = get_model_grid(directory=data_dir[2], filename=filename) if v_300 is None: return u_500 = get_model_grid(directory=data_dir[3], filename=filename) if u_500 is None: return v_500 = get_model_grid(directory=data_dir[4], filename=filename) if v_500 is None: return u_850 = get_model_grid(directory=data_dir[5], filename=filename) if u_850 is None: return v_850 = get_model_grid(directory=data_dir[6], filename=filename) if v_850 is None: return t_700 = get_model_grid(directory=data_dir[7], filename=filename) if t_700 is None: return hgt_500 = get_model_grid(directory=data_dir[8], filename=filename) if hgt_500 is None: return hgt_500_2 = get_model_grid(directory=data_dir[8], filename=filename2) if hgt_500_2 is None: return BLI = get_model_grid(directory=data_dir[9], filename=filename) if BLI is None: return Td2m = get_model_grid(directory=data_dir[10], filename=filename) if Td2m is None: return PRMSL = get_model_grid(directory=data_dir[11], filename=filename) if PRMSL is None: return PRMSL2 = get_model_grid(directory=data_dir[11], filename=filename2) if PRMSL2 is None: return lats = np.squeeze(rh_700['lat'].values) lons = np.squeeze(rh_700['lon'].values) x, y = np.meshgrid(rh_700['lon'], rh_700['lat']) tmp_700 = t_700['data'].values.squeeze() * units('degC') u_300 = (u_300['data'].values.squeeze() * units.meter / units.second).to('kt') v_300 = (v_300['data'].values.squeeze() * units.meter / units.second).to('kt') u_500 = (u_500['data'].values.squeeze() * units.meter / units.second).to('kt') v_500 = (v_500['data'].values.squeeze() * units.meter / units.second).to('kt') u_850 = (u_850['data'].values.squeeze() * units.meter / units.second).to('kt') v_850 = (v_850['data'].values.squeeze() * units.meter / units.second).to('kt') hgt_500 = (hgt_500['data'].values.squeeze()) * 10 / 9.8 * units.meter rh_700 = rh_700['data'].values.squeeze() lifted_index = BLI['data'].values.squeeze() * units.kelvin Td_sfc = Td2m['data'].values.squeeze() * units('degC') dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats) avor_500 = mpcalc.absolute_vorticity(u_500, v_500, dx, dy, y * units.degree) pmsl = PRMSL['data'].values.squeeze() * units('hPa') hgt_500_2 = (hgt_500_2['data'].values.squeeze()) * 10 / 9.8 * units.meter pmsl2 = PRMSL2['data'].values.squeeze() * units('hPa') # 500 hPa CVA vort_adv_500 = mpcalc.advection( avor_500, [u_500.to('m/s'), v_500.to('m/s')], (dx, dy), dim_order='yx') * 1e9 vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4) wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5) wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5) wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5) Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.) pmsl_change = pmsl - pmsl2 hgt_500_change = hgt_500 - hgt_500_2 mask_500 = ma.masked_less_equal(wspd_500, 0.66 * np.max(wspd_500)).mask u_500[mask_500] = np.nan v_500[mask_500] = np.nan # 300 hPa mask_300 = ma.masked_less_equal(wspd_300, 0.66 * np.max(wspd_300)).mask u_300[mask_300] = np.nan v_300[mask_300] = np.nan # 850 hPa mask_850 = ma.masked_less_equal(wspd_850, 0.66 * np.max(wspd_850)).mask u_850[mask_850] = np.nan v_850[mask_850] = np.nan # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours idx_x1 = np.where((lons > map_extent[0] - delt_x) & (lons < map_extent[1] + delt_x)) idx_y1 = np.where((lats > map_extent[2] - delt_y) & (lats < map_extent[3] + delt_y)) fcst_info = { 'lon': lons, 'lat': lats, 'fhour': fhour, 'model': model, 'init_time': t_700.coords['forecast_reference_time'].values } synthetical_graphics.draw_Miller_Composite_Chart( fcst_info=fcst_info, u_300=u_300, v_300=v_300, u_500=u_500, v_500=v_500, u_850=u_850, v_850=v_850, pmsl_change=pmsl_change, hgt_500_change=hgt_500_change, Td_dep_700=Td_dep_700, Td_sfc=Td_sfc, pmsl=pmsl, lifted_index=lifted_index, vort_adv_500_smooth=vort_adv_500_smooth, map_extent=map_extent, add_china=True, city=False, south_China_sea=True, output_dir=None, Global=False)
def compare_gh_uv(anaTime=None, anamodel='GRAPES_GFS', fhour=24, model='ECMWF', data_source='MICAPS', gh_lev=500, uv_lev=850, area=None, map_ratio=14 / 9, zoom_ratio=20, cntr_pnt=[104, 34], **products_kwargs): if (area != None): south_China_sea = False # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=gh_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=uv_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=uv_lev), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PSFC') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (anaTime == None): anaTime = MICAPS_IO.get_latest_initTime(data_dir[-1]) initTime = (datetime.strptime('20' + anaTime, '%Y%m%d%H') - timedelta(hours=fhour)).strftime("%Y%m%d%H")[2:10] if (anaTime != None): filename_ana = utl.model_filename(anaTime, 0) initTime = (datetime.strptime('20' + anaTime, '%Y%m%d%H') - timedelta(hours=fhour)).strftime("%Y%m%d%H")[2:10] filename_fcst = utl.model_filename(initTime, fhour) # retrieve data from micaps server gh_ana = MICAPS_IO.get_model_grid(data_dir[0], filename=filename_ana) u_ana = MICAPS_IO.get_model_grid(data_dir[1], filename=filename_ana) v_ana = MICAPS_IO.get_model_grid(data_dir[2], filename=filename_ana) psfc_ana = MICAPS_IO.get_model_grid(data_dir[3], filename=filename_ana) gh_fcst = MICAPS_IO.get_model_grid(data_dir[0], filename=filename_fcst) u_fcst = MICAPS_IO.get_model_grid(data_dir[1], filename=filename_fcst) v_fcst = MICAPS_IO.get_model_grid(data_dir[2], filename=filename_fcst) psfc_fcst = MICAPS_IO.get_model_grid(data_dir[3], filename=filename_fcst) if (data_source == 'CIMISS'): # get filename if (anaTime != None): anaTime = utl.model_filename(anaTime, fhour, UTC=True)[0:8] else: anaTime = utl.filename_day_back_model(fhour=fhour, UTC=True)[0:8] initTime = (datetime.strptime('20' + anaTime, '%Y%m%d%H') - timedelta(hours=fhour)).strftime("%Y%m%d%H")[2:10] try: # retrieve data from CIMISS server gh_ana = CMISS_IO.cimiss_model_by_time( '20' + anaTime, valid_time=0, data_code=utl.CMISS_data_code(data_source=model, var_name='GPH'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') gh_ana['data'].values = gh_ana['data'].values / 10. gh_fcst = CMISS_IO.cimiss_model_by_time( '20' + initTime, valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GPH'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') gh_fcst['data'].values = gh_fcst['data'].values / 10. u_ana = CMISS_IO.cimiss_model_by_time( '20' + anaTime, valid_time=0, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') u_fcst = CMISS_IO.cimiss_model_by_time( '20' + initTime, valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') v_ana = CMISS_IO.cimiss_model_by_time( '20' + anaTime, valid_time=0, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') v_fcst = CMISS_IO.cimiss_model_by_time( '20' + initTime, valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') psfc_ana = CMISS_IO.cimiss_model_by_time( '20' + anaTime, valid_time=0, data_code=utl.CMISS_data_code(data_source=model, var_name='PRS'), levattrs={ 'long_name': 'sea_surface_pressure', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="PRS", units='Pa') psfc_fcst = CMISS_IO.cimiss_model_by_time( '20' + initTime, valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='PRS'), levattrs={ 'long_name': 'sea_surface_pressure', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="PRS", units='Pa') psfc_ana['data'] = psfc_ana['data'] / 100. psfc_fcst['data'] = psfc_fcst['data'] / 100. except KeyError: raise ValueError('Can not find all data needed') # prepare data if (all([ gh_ana, u_ana, v_ana, psfc_ana, gh_fcst, u_fcst, v_fcst, psfc_fcst ]) is False): print('some data is not avaliable') return if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 gh_ana = utl.cut_xrdata(map_extent, gh_ana) u_ana = utl.cut_xrdata(map_extent, u_ana) v_ana = utl.cut_xrdata(map_extent, v_ana) psfc_ana = utl.cut_xrdata(map_extent, psfc_ana) gh_fcst = utl.cut_xrdata(map_extent, gh_fcst) u_fcst = utl.cut_xrdata(map_extent, u_fcst) v_fcst = utl.cut_xrdata(map_extent, v_fcst) psfc_fcst = utl.cut_xrdata(map_extent, psfc_fcst) u_ana = utl.mask_terrian(uv_lev, psfc_ana, u_ana) v_ana = utl.mask_terrian(uv_lev, psfc_ana, v_ana) gh_ana = utl.mask_terrian(gh_lev, psfc_ana, gh_ana) u_fcst = utl.mask_terrian(uv_lev, psfc_fcst, u_fcst) v_fcst = utl.mask_terrian(uv_lev, psfc_fcst, v_fcst) gh_fcst = utl.mask_terrian(gh_lev, psfc_fcst, gh_fcst) uv_ana = xr.merge( [u_ana.rename({'data': 'u'}), v_ana.rename({'data': 'v'})]) uv_fcst = xr.merge( [u_fcst.rename({'data': 'u'}), v_fcst.rename({'data': 'v'})]) gh_ana.attrs = {'model_name': model} u_ana.attrs = {'model_name': model} v_ana.attrs = {'model_name': model} gh_fcst.attrs = {'model_name': model} u_fcst.attrs = {'model_name': model} v_fcst.attrs = {'model_name': model} vs_ana.draw_compare_gh_uv(gh_ana=gh_ana, uv_ana=uv_ana, gh_fcst=gh_fcst, uv_fcst=uv_fcst, map_extent=map_extent, **products_kwargs)
def gh500_uv850_mslp(initial_time, fhour=0, model='ECMWF'): """ Analysis 500hPa geopotential height, 850hPa wind barbs, and mean sea level pressure. :param initial_time: initial time, string or datetime ojbect. like '18042008' or datetime(2018, 4, 20, 8). :param fhour: forecast hour. :param model: model name. :return: None. """ # micaps data directory data_dirs = { 'ECMWF': [ 'ECMWF_LR/HGT/500', 'ECMWF_LR/UGRD/850', 'ECMWF_LR/VGRD/850', 'ECMWF_LR/PRMSL' ], 'GRAPES': [ 'GRAPES_GFS/HGT/500', 'GRAPES_GFS/UGRD/850', 'GRAPES_GFS/VGRD/850', 'GRAPES_GFS/PRMSL' ], 'NCEP': [ 'NCEP_GFS/HGT/500', 'NCEP_GFS/UGRD/850', 'NCEP_GFS/VGRD/850', 'NCEP_GFS/PRMSL' ] } try: data_dir = data_dirs[model.strip().upper()] except KeyError: raise ValueError('Unknown model, choose ECMWF, GRAPES or NCEP.') # get filename filename = model_filename(initial_time, fhour) # retrieve data from micaps server gh500 = get_model_grid(data_dir[0], filename=filename) if gh500 is None: return init_time = gh500.coords['init_time'].values[0] u850 = get_model_grid(data_dir[1], filename=filename) if u850 is None: return v850 = get_model_grid(data_dir[2], filename=filename) if v850 is None: return mslp = get_model_grid(data_dir[3], filename=filename) if mslp is None: return # prepare data gh500 = { 'lon': gh500.coords['lon'].values, 'lat': gh500.coords['lat'].values, 'data': gh500.values } uv850 = { 'lon': u850.coords['lon'].values, 'lat': u850.coords['lat'].values, 'udata': u850.values, 'vdata': v850.values } mslp = { 'lon': mslp.coords['lon'].values, 'lat': mslp.coords['lat'].values, 'data': mslp.values } # draw figure fig = plt.figure(figsize=(10.5, 6)) plotcrs = ccrs.AlbersEqualArea(central_latitude=45., central_longitude=100., standard_parallels=[30., 60.]) gs = mpl.gridspec.GridSpec(1, 2, width_ratios=[1, .02], bottom=.07, top=.99, hspace=0.01, wspace=0.01) # draw main figure ax = plt.subplot(gs[0], projection=plotcrs) add_model_title('500-hPa Heights (m), 850-hPa Winds, MSLP (hPa)', init_time, model=model, fhour=fhour, fontsize=14) plots = metp.draw_gh500_uv850_mslp(ax, mslp=mslp, gh500=gh500, uv850=uv850, map_extent=[50, 150, 0, 65], regrid_shape=20) # add color bar cax = plt.subplot(gs[1]) cb = plt.colorbar(plots['mslp'], cax=cax, orientation='vertical', extendrect='True', ticks=plots['mslp'].levels) cb.set_label('Mean sea level pressure', size=12) # add logo add_logo(fig, alpha=0.7) # show figure gs.tight_layout(fig) plt.show()
def wind_temp_rn_according_to_4D_data( initTime=None, fhour=6, day_back=0, model='ECMWF', sta_fcs={ 'lon': [101.82, 101.32, 101.84, 102.23, 102.2681], 'lat': [28.35, 27.91, 28.32, 27.82, 27.8492], 'altitude': [3600, 3034.62, 3240, 1669, 1941.5], 'name': ['健美乡', '项脚乡', '\n锦屏镇', '\n马道镇', 'S9005 '] }, draw_zd=True, levels=[ 1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 250, 200, 150 ], map_ratio=19 / 9, zoom_ratio=1, south_China_sea=False, area=None, city=False, output_dir=None, bkgd_type='satellite', data_source='MICAPS', **kwargs): # micaps data directory if (area != None): south_China_sea = False # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) cntr_pnt = np.append(np.mean(sta_fcs['lon']), np.mean(sta_fcs['lat'])) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 bkgd_level = utl.cal_background_zoom_ratio(zoom_ratio) # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='TMP', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=''), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='u10m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='v10m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='Td2m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='T2m') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) initTime = filename[0:8] # retrieve data from micaps server gh = MICAPS_IO.get_model_3D_grid(directory=data_dir[0][0:-1], filename=filename, levels=levels) if (gh is None): return gh['data'].values = gh['data'].values * 10 TMP = MICAPS_IO.get_model_3D_grid(directory=data_dir[1][0:-1], filename=filename, levels=levels, allExists=False) if TMP is None: return u = MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if u is None: return v = MICAPS_IO.get_model_3D_grid(directory=data_dir[3][0:-1], filename=filename, levels=levels, allExists=False) if v is None: return u10m = MICAPS_IO.get_model_grid(directory=data_dir[4], filename=filename) if u10m is None: return v10m = MICAPS_IO.get_model_grid(directory=data_dir[5], filename=filename) if v10m is None: return td2m = MICAPS_IO.get_model_grid(directory=data_dir[6], filename=filename) if td2m is None: return t2m = MICAPS_IO.get_model_grid(directory=data_dir[7], filename=filename) if t2m is None: return if (draw_zd == True): validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') + timedelta(hours=fhour)).strftime("%Y%m%d%H") directory_obs = utl.Cassandra_dir(data_type='surface', data_source='OBS', var_name='PLOT_ALL') try: zd_sta = MICAPS_IO.get_station_data(filename=validtime + '0000.000', directory=directory_obs, dropna=True, cache=False) obs_valid = True except: zd_sta = MICAPS_IO.get_station_data(directory=directory_obs, dropna=True, cache=False) obs_valid = False zd_lon = zd_sta['lon'].values zd_lat = zd_sta['lat'].values zd_alt = zd_sta['Alt'].values zd_u, zd_v = mpcalc.wind_components( zd_sta['Wind_speed_2m_avg'].values * units('m/s'), zd_sta['Wind_angle_2m_avg'].values * units.deg) idx_zd = np.where((zd_lon > map_extent[0]) & (zd_lon < map_extent[1]) & (zd_lat > map_extent[2]) & (zd_lat < map_extent[3])) zd_sm_lon = zd_lon[idx_zd[0]] zd_sm_lat = zd_lat[idx_zd[0]] zd_sm_alt = zd_alt[idx_zd[0]] zd_sm_u = zd_u[idx_zd[0]] zd_sm_v = zd_v[idx_zd[0]] #maskout area delt_xy = TMP['lon'].values[1] - TMP['lon'].values[0] #+ to solve the problem of labels on all the contours mask1 = (TMP['lon'] > map_extent[0] - delt_xy) & ( TMP['lon'] < map_extent[1] + delt_xy) & ( TMP['lat'] > map_extent[2] - delt_xy) & (TMP['lat'] < map_extent[3] + delt_xy) mask2 = (u10m['lon'] > map_extent[0] - delt_xy) & ( u10m['lon'] < map_extent[1] + delt_xy) & ( u10m['lat'] > map_extent[2] - delt_xy) & (u10m['lat'] < map_extent[3] + delt_xy) #- to solve the problem of labels on all the contours TMP = TMP.where(mask1, drop=True) u = u.where(mask1, drop=True) v = v.where(mask1, drop=True) gh = gh.where(mask1, drop=True) u10m = u10m.where(mask2, drop=True) v10m = v10m.where(mask2, drop=True) #prepare interpolator Ex1 = np.squeeze(u['data'].values).flatten() Ey1 = np.squeeze(v['data'].values).flatten() Ez1 = np.squeeze(TMP['data'].values).flatten() z = (np.squeeze(gh['data'].values)).flatten() coords = np.zeros((np.size(levels), u['lat'].size, u['lon'].size, 3)) coords[..., 1] = u['lat'].values.reshape((1, u['lat'].size, 1)) coords[..., 2] = u['lon'].values.reshape((1, 1, u['lon'].size)) coords = coords.reshape((Ex1.size, 3)) coords[:, 0] = z interpolator_U = LinearNDInterpolator(coords, Ex1, rescale=True) interpolator_V = LinearNDInterpolator(coords, Ey1, rescale=True) interpolator_TMP = LinearNDInterpolator(coords, Ez1, rescale=True) #process sta_fcs 10m wind coords2 = np.zeros((np.size(sta_fcs['lon']), 3)) coords2[:, 0] = sta_fcs['altitude'] coords2[:, 1] = sta_fcs['lat'] coords2[:, 2] = sta_fcs['lon'] u_sta = interpolator_U(coords2) v_sta = interpolator_V(coords2) TMP_sta = interpolator_TMP(coords2) wsp_sta = (u_sta**2 + v_sta**2)**0.5 u10m_2D = u10m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) v10m_2D = v10m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) td2m_2D = td2m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) t2m_2D = t2m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) wsp10m_2D = (u10m_2D['data'].values**2 + v10m_2D['data'].values**2)**0.5 winddir10m = mpcalc.wind_direction(u10m_2D['data'].values * units('m/s'), v10m_2D['data'].values * units('m/s')) if (np.isnan(wsp_sta).any()): if (wsp_sta.size == 1): wsp_sta[np.isnan(wsp_sta)] = np.squeeze( wsp10m_2D[np.isnan(wsp_sta)]) TMP_sta[np.isnan(TMP_sta)] = np.squeeze( np.array(t2m_2D)[np.isnan(TMP_sta)]) else: wsp_sta[np.isnan(wsp_sta)] = np.squeeze(wsp10m_2D)[np.isnan( wsp_sta)] TMP_sta[np.isnan(TMP_sta)] = np.squeeze( np.array(t2m_2D))[np.isnan(TMP_sta)] u_sta, v_sta = mpcalc.wind_components(wsp_sta * units('m/s'), winddir10m) #process zd_sta 10m wind zd_fcst_obs = None if (draw_zd is True): coords3 = np.zeros((np.size(zd_sm_alt), 3)) coords3[:, 0] = zd_sm_alt coords3[:, 1] = zd_sm_lat coords3[:, 2] = zd_sm_lon u_sm_sta = interpolator_U(coords3) v_sm_sta = interpolator_V(coords3) wsp_sm_sta = (u_sm_sta**2 + v_sm_sta**2)**0.5 u10m_sm = u10m.interp(lon=('points', zd_sm_lon), lat=('points', zd_sm_lat)) v10m_sm = v10m.interp(lon=('points', zd_sm_lon), lat=('points', zd_sm_lat)) wsp10m_sta = np.squeeze( (u10m_sm['data'].values**2 + v10m_sm['data'].values**2)**0.5) winddir10m_sm = mpcalc.wind_direction( u10m_sm['data'].values * units('m/s'), v10m_sm['data'].values * units('m/s')) if (np.isnan(wsp_sm_sta).any()): wsp_sm_sta[np.isnan(wsp_sm_sta)] = wsp10m_sta[np.isnan(wsp_sm_sta)] for ista in range(0, len(wsp10m_sta)): if (wsp10m_sta[ista] > wsp_sm_sta[ista]): wsp_sm_sta[ista] = wsp10m_sta[ista] u_sm_sta, v_sm_sta = mpcalc.wind_components(wsp_sm_sta * units('m/s'), winddir10m_sm) zd_fcst_obs = { 'lon': zd_sm_lon, 'lat': zd_sm_lat, 'altitude': zd_sm_alt, 'U': np.squeeze(np.array(u_sm_sta)), 'V': np.squeeze(np.array(v_sm_sta)), 'obs_valid': obs_valid, 'U_obs': np.squeeze(np.array(zd_sm_u)), 'V_obs': np.squeeze(np.array(zd_sm_v)) } #prepare for graphics sta_fcs_fcst = { 'lon': sta_fcs['lon'], 'lat': sta_fcs['lat'], 'altitude': sta_fcs['altitude'], 'name': sta_fcs['name'], 'TMP': np.array(TMP_sta), 'U': np.squeeze(np.array(u_sta)), 'V': np.squeeze(np.array(v_sta)) } fcst_info = gh.coords local_scale_graphics.draw_wind_temp_according_to_4D_data( sta_fcs_fcst=sta_fcs_fcst, zd_fcst_obs=zd_fcst_obs, fcst_info=fcst_info, map_extent=map_extent, draw_zd=draw_zd, bkgd_type=bkgd_type, bkgd_level=bkgd_level, output_dir=output_dir)
def TMP850_extreme_uv(initTime=None, fhour=6, day_back=0,model='ECMWF', uv_lev=850,tmp_lev=850, map_ratio=13/9,zoom_ratio=20,cntr_pnt=[102,34], south_China_sea=True,area =None,city=False,output_dir=None,data_source='MICAPS', Global=False,**kwargs): #prepare data if(data_source =='MICAPS'): try: data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=tmp_lev), utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')] except KeyError: raise ValueError('Can not find all directories needed') # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour) # retrieve data from micaps server u = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if u is None: return v = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if v is None: return tmp = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if tmp is None: return psfc = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if(data_source=='CIMISS'): # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour,UTC=True) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True) try: u=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') if u is None: return v=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') if v is None: return tmp=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='TEM'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=tmp_lev, fcst_ele="TEM", units='K') if tmp is None: return tmp['data'].values=tmp['data'].values-273.15 psfc=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'), fcst_level=0, fcst_ele="PRS", units='Pa') psfc['data']=psfc['data']/100. except KeyError: raise ValueError('Can not find all data needed') # set map extent tmp_extr=utl.get_var_extr(tmp+273.15,Var_name='t850') if(area != None): south_China_sea=False if(area != None): cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area) map_extent,delt_x,delt_y=utl.get_map_extent(cntr_pnt,zoom_ratio,map_ratio) tmp['data'].values=gaussian_filter(tmp['data'].values,5) u=utl.mask_terrian(uv_lev,psfc,u) v=utl.mask_terrian(uv_lev,psfc,v) tmp=utl.mask_terrian(tmp_lev,psfc,tmp) tmp_extr=utl.mask_terrian(tmp_lev,psfc,tmp_extr) #to solve the problem of labels on all the contours tmp=utl.cut_xrdata(map_extent,tmp,delt_x=delt_x,delt_y=delt_y) u=utl.cut_xrdata(map_extent,u,delt_x=delt_x,delt_y=delt_y) v=utl.cut_xrdata(map_extent,v,delt_x=delt_x,delt_y=delt_y) tmp_extr=utl.cut_xrdata(map_extent,tmp_extr,delt_x=delt_x,delt_y=delt_y) tmp.attrs['model']=model uv=xr.merge([u.rename({'data': 'u'}),v.rename({'data': 'v'})]) #draw thermal_graphics.draw_gh_uv_tmp_extr( tmp=tmp, tmp_extr=tmp_extr, uv=uv, map_extent=map_extent, regrid_shape=20, city=city,south_China_sea=south_China_sea, output_dir=output_dir,Global=Global)
def gh_uv_tadv(initTime=None, fhour=6, day_back=0,model='ECMWF', gh_lev=500,uv_lev=850, map_ratio=14/9,zoom_ratio=20,cntr_pnt=[104,34], south_China_sea=True,area =None,city=False,output_dir=None,data_source='MICAPS', Global=False,**kwargs): #prepare data if(data_source =='MICAPS'): try: data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=gh_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=uv_lev), utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')] except KeyError: raise ValueError('Can not find all directories needed') # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour) # retrieve data from micaps server gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if gh is None: return u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if u is None: return v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if v is None: return tmp = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if tmp is None: return psfc = MICAPS_IO.get_model_grid(data_dir[4], filename=filename) if(data_source=='CIMISS'): # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour,UTC=True) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True) try: # retrieve data from CIMISS server gh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='GPH'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') if gh is None: return gh['data'].values=gh['data'].values/10. u=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') if u is None: return v=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') if v is None: return tmp=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='TEM'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="TEM", units='K') if tmp is None: return tmp['data'].values=tmp['data'].values-273.15 psfc=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'), fcst_level=0, fcst_ele="PRS", units='Pa') psfc['data']=psfc['data']/100. except KeyError: raise ValueError('Can not find all data needed') # set map extent if(area != None): south_China_sea=False if(area != None): cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area) map_extent,delt_x,delt_y=utl.get_map_extent(cntr_pnt=cntr_pnt,zoom_ratio=zoom_ratio,map_ratio=map_ratio) gh=utl.cut_xrdata(map_extent, gh, delt_x=delt_x, delt_y=delt_y) u=utl.cut_xrdata(map_extent, u, delt_x=delt_x, delt_y=delt_y) v=utl.cut_xrdata(map_extent, v, delt_x=delt_x, delt_y=delt_y) tmp=utl.cut_xrdata(map_extent, tmp, delt_x=delt_x, delt_y=delt_y) tadv_xr=tmp.copy(deep=True) tadv=-1*mbd.advection(tmp['data'].values.squeeze(),u['data'].values.squeeze(),v['data'].values.squeeze(),tmp['lat'].values,tmp['lon'].values) tadv_xr['data'].values=tadv[np.newaxis,np.newaxis,:,:] gh=utl.mask_terrian(gh_lev,psfc,gh) u=utl.mask_terrian(uv_lev,psfc,u) tadv_xr=utl.mask_terrian(uv_lev,psfc,tadv_xr) gh.attrs['model']=model uv=xr.merge([u.rename({'data': 'u'}),v.rename({'data': 'v'})]) #draw thermal_graphics.draw_gh_uv_tadv( tadv=tadv_xr, gh=gh, uv=uv, map_extent=map_extent, regrid_shape=20, city=city,south_China_sea=south_China_sea, output_dir=output_dir,Global=Global)
def qpf_24h(initial_time, fhour=0, model='ECMWF', map_center=(117, 39), map_width=12): """ Draw 24h accumulated QPF. Arguments: initial_time {string or datetime object} -- model initital time, like '18042008' or datetime(2018, 4, 20, 8). Keyword Arguments: fhour {int} -- model initial time (default: {0}) model {str} -- model name (default: {'ECMWF'}) """ # micaps data directory data_dirs = {'ECMWF': ['ECMWF_HR/RAIN24']} try: data_dir = data_dirs[model.strip().upper()] except KeyError: raise ValueError('Unknown model, choose ECMWF, GRAPES or NCEP.') # get file name filename = model_filename(initial_time, fhour) # retrieve data from micaps server rain24 = get_model_grid(data_dir[0], filename=filename) if rain24 is None: print('Can not retrieve {} from Micaps server.'.format(filename)) return init_time = rain24.coords['init_time'].values[0] rain24 = { 'lon': rain24.coords['lon'].values, 'lat': rain24.coords['lat'].values, 'data': np.squeeze(rain24.values) } # set up map projection datacrs = ccrs.PlateCarree() plotcrs = ccrs.AlbersEqualArea(central_latitude=map_center[1], central_longitude=map_center[0], standard_parallels=[30., 60.]) # set up figure fig = plt.figure(figsize=(10, 8)) ax = fig.add_axes([0, 0, 1, 1], projection=plotcrs) # add model title add_model_title('24h accumulated QPF', init_time, model=model, fhour=fhour, fontsize=18, multilines=True, atime=24) # add map background map_extent = (map_center[0] - map_width / 2.0, map_center[0] + map_width / 2.0, map_center[1] - map_width / 2.0, map_center[1] + map_width / 2.0) ax.set_extent(map_extent, crs=datacrs) land_50m = cfeature.NaturalEarthFeature('physical', 'land', '50m', edgecolor='face', facecolor=cfeature.COLORS['land']) ax.add_feature(land_50m) add_china_map_2cartopy(ax, name='province', edgecolor='darkcyan', lw=1, zorder=100) # draw QPF clevs = [0.1, 10, 25, 50, 100, 250] colors = ["#88F492", "#00A929", "#2AB8FF", "#1202FC", "#FF04F4", "#850C3E"] cmap, norm = mpl.colors.from_levels_and_colors(clevs, colors, extend='max') ax.pcolormesh(rain24['lon'], rain24['lat'], rain24['data'], norm=norm, cmap=cmap, transform=datacrs, zorder=2) # add custom legend legend_elements = [ Patch(facecolor=colors[0], label='0.1~10mm'), Patch(facecolor=colors[1], label='10~25mm'), Patch(facecolor=colors[2], label='25~50mm'), Patch(facecolor=colors[3], label='50~100mm'), Patch(facecolor=colors[4], label='100~250mm'), Patch(facecolor=colors[5], label='>250mm') ] ax.legend(handles=legend_elements, loc='lower right', fontsize=16) # add logo add_logo(fig, alpha=0.7) # show figure ax.set_adjustable('datalim') plt.show()
def wind_rh_according_to_4D_data( initTime=None, fhour=6, day_back=0, model='ECMWF', sta_fcs={ 'lon': [101.82, 101.32, 101.84, 102.23, 102.2681], 'lat': [28.35, 27.91, 28.32, 27.82, 27.8492], 'altitude': [3600, 3034.62, 3240, 1669, 1941.5], 'name': ['健美乡', '项脚乡', '\n锦屏镇', '\n马道镇', 'S9005 '] }, draw_zd=True, levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500], map_ratio=19 / 9, zoom_ratio=1, south_China_sea=False, area='全国', city=False, output_dir=None, bkgd_type='satellite', data_source='MICAPS'): # micaps data directory if (area != '全国'): south_China_sea = False # prepare data if (area != '全国'): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) cntr_pnt = np.append(np.mean(sta_fcs['lon']), np.mean(sta_fcs['lat'])) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 bkgd_level = utl.cal_background_zoom_ratio(zoom_ratio) # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='RH', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=''), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='u10m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='v10m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='Td2m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='T2m') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) initTime = filename[0:8] # retrieve data from micaps server gh = MICAPS_IO.get_model_3D_grid(directory=data_dir[0][0:-1], filename=filename, levels=levels) if (gh is None): return gh['data'].values = gh['data'].values * 10 rh = MICAPS_IO.get_model_3D_grid(directory=data_dir[1][0:-1], filename=filename, levels=levels, allExists=False) if rh is None: return u = MICAPS_IO.get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if u is None: return v = MICAPS_IO.get_model_3D_grid(directory=data_dir[3][0:-1], filename=filename, levels=levels, allExists=False) if v is None: return u10m = MICAPS_IO.get_model_grid(directory=data_dir[4], filename=filename) if u10m is None: return v10m = MICAPS_IO.get_model_grid(directory=data_dir[5], filename=filename) if v10m is None: return td2m = MICAPS_IO.get_model_grid(directory=data_dir[6], filename=filename) if td2m is None: return t2m = MICAPS_IO.get_model_grid(directory=data_dir[7], filename=filename) if t2m is None: return if (draw_zd == True): validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') + timedelta(hours=fhour)).strftime("%Y%m%d%H") directory_obs = utl.Cassandra_dir(data_type='surface', data_source='OBS', var_name='PLOT_ALL') try: zd_sta = MICAPS_IO.get_station_data(filename=validtime + '0000.000', directory=directory_obs, dropna=True, cache=False) obs_valid = True except: zd_sta = MICAPS_IO.get_station_data(directory=directory_obs, dropna=True, cache=False) obs_valid = False zd_lon = zd_sta['lon'].values zd_lat = zd_sta['lat'].values zd_alt = zd_sta['Alt'].values zd_u, zd_v = mpcalc.wind_components( zd_sta['Wind_speed_2m_avg'].values * units('m/s'), zd_sta['Wind_angle_2m_avg'].values * units.deg) idx_zd = np.where((zd_lon > map_extent[0]) & (zd_lon < map_extent[1]) & (zd_lat > map_extent[2]) & (zd_lat < map_extent[3])) zd_sm_lon = zd_lon[idx_zd[0]] zd_sm_lat = zd_lat[idx_zd[0]] zd_sm_alt = zd_alt[idx_zd[0]] zd_sm_u = zd_u[idx_zd[0]] zd_sm_v = zd_v[idx_zd[0]] if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CMISS server gh = CIMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='GPH'), init_time_str='20' + filename[0:8], valid_time=fhour, levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_levels=levels, fcst_ele="GPH", units='gpm') if gh is None: return rh = CIMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='RHU'), init_time_str='20' + filename[0:8], valid_time=fhour, levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_levels=levels, fcst_ele="RHU", units='%') if rh is None: return u = CIMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='WIU'), init_time_str='20' + filename[0:8], valid_time=fhour, levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_levels=levels, fcst_ele="WIU", units='m/s') if u is None: return v = CIMISS_IO.cimiss_model_3D_grid( data_code=utl.CMISS_data_code(data_source=model, var_name='WIV'), init_time_str='20' + filename[0:8], valid_time=fhour, levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_levels=levels, fcst_ele="WIV", units='m/s') if v is None: return if (model == 'ECMWF'): td2m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='DPT'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="DPT", units='K') if td2m is None: return t2m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='TEF2'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TEF2", units='K') if t2m is None: return v10m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV10'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="WIV10", units='m/s') if v10m is None: return u10m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU10'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="WIU10", units='m/s') if u10m is None: return if (model == 'GRAPES_GFS'): rh2m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='RHF2'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=2, fcst_ele="RHF2", units='%') if rh2m is None: return v10m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV10'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=10, fcst_ele="WIV10", units='m/s') if v10m is None: return u10m = CIMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU10'), levattrs={ 'long_name': 'height_above_ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=10, fcst_ele="WIU10", units='m/s') if u10m is None: return except KeyError: raise ValueError('Can not find all data needed') if (draw_zd == True): if (initTime == None): initTime1 = CIMISS_IO.cimiss_get_obs_latest_time( data_code="SURF_CHN_MUL_HOR") initTime = (datetime.strptime('20' + initTime1, '%Y%m%d%H') - timedelta(days=day_back)).strftime("%Y%m%d%H")[2:] validtime = (datetime.strptime('20' + initTime, '%Y%m%d%H') + timedelta(hours=fhour)).strftime("%Y%m%d%H") data_code = utl.CMISS_data_code(data_source='OBS', var_name='PLOT_sfc') zd_sta = CIMISS_IO.cimiss_obs_by_time( times=validtime + '0000', data_code=data_code, sta_levels="011,012,013,014", elements= "Station_Id_C,Station_Id_d,lat,lon,Alti,TEM,WIN_D_Avg_2mi,WIN_S_Avg_2mi,RHU" ) obs_valid = True if (zd_sta is None): CIMISS_IO.cimiss_get_obs_latest_time(data_code=data_code, latestTime=6) zd_sta = CIMISS_IO.cimiss_obs_by_time(directory=directory_obs, dropna=True, cache=False) obs_valid = False zd_lon = zd_sta['lon'].values zd_lat = zd_sta['lat'].values zd_alt = zd_sta['Alti'].values zd_u, zd_v = mpcalc.wind_components( zd_sta['WIN_S_Avg_2mi'].values * units('m/s'), zd_sta['WIN_D_Avg_2mi'].values * units.deg) idx_zd = np.where((zd_lon > map_extent[0]) & (zd_lon < map_extent[1]) & (zd_lat > map_extent[2]) & (zd_lat < map_extent[3]) & (zd_sta['WIN_S_Avg_2mi'].values < 1000)) zd_sm_lon = zd_lon[idx_zd[0]] zd_sm_lat = zd_lat[idx_zd[0]] zd_sm_alt = zd_alt[idx_zd[0]] zd_sm_u = zd_u[idx_zd[0]] zd_sm_v = zd_v[idx_zd[0]] #maskout area delt_xy = rh['lon'].values[1] - rh['lon'].values[0] #+ to solve the problem of labels on all the contours mask1 = (rh['lon'] > map_extent[0] - delt_xy) & ( rh['lon'] < map_extent[1] + delt_xy) & ( rh['lat'] > map_extent[2] - delt_xy) & (rh['lat'] < map_extent[3] + delt_xy) mask2 = (u10m['lon'] > map_extent[0] - delt_xy) & ( u10m['lon'] < map_extent[1] + delt_xy) & ( u10m['lat'] > map_extent[2] - delt_xy) & (u10m['lat'] < map_extent[3] + delt_xy) #- to solve the problem of labels on all the contours rh = rh.where(mask1, drop=True) u = u.where(mask1, drop=True) v = v.where(mask1, drop=True) gh = gh.where(mask1, drop=True) u10m = u10m.where(mask2, drop=True) v10m = v10m.where(mask2, drop=True) #prepare interpolator Ex1 = np.squeeze(u['data'].values).flatten() Ey1 = np.squeeze(v['data'].values).flatten() Ez1 = np.squeeze(rh['data'].values).flatten() z = (np.squeeze(gh['data'].values)).flatten() coords = np.zeros((np.size(levels), u['lat'].size, u['lon'].size, 3)) coords[..., 1] = u['lat'].values.reshape((1, u['lat'].size, 1)) coords[..., 2] = u['lon'].values.reshape((1, 1, u['lon'].size)) coords = coords.reshape((Ex1.size, 3)) coords[:, 0] = z interpolator_U = LinearNDInterpolator(coords, Ex1, rescale=True) interpolator_V = LinearNDInterpolator(coords, Ey1, rescale=True) interpolator_RH = LinearNDInterpolator(coords, Ez1, rescale=True) #process sta_fcs 10m wind coords2 = np.zeros((np.size(sta_fcs['lon']), 3)) coords2[:, 0] = sta_fcs['altitude'] coords2[:, 1] = sta_fcs['lat'] coords2[:, 2] = sta_fcs['lon'] u_sta = interpolator_U(coords2) v_sta = interpolator_V(coords2) RH_sta = interpolator_RH(coords2) wsp_sta = (u_sta**2 + v_sta**2)**0.5 u10m_2D = u10m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) v10m_2D = v10m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) if (model == 'GRAPES_GFS' and data_source == 'CIMISS'): rh2m_2D = rh2m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat']))['data'].values else: td2m_2D = td2m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) t2m_2D = t2m.interp(lon=('points', sta_fcs['lon']), lat=('points', sta_fcs['lat'])) if (data_source == 'MICAPS'): rh2m_2D = mpcalc.relative_humidity_from_dewpoint( t2m_2D['data'].values * units('degC'), td2m_2D['data'].values * units('degC')) * 100 else: rh2m_2D = mpcalc.relative_humidity_from_dewpoint( t2m_2D['data'].values * units('kelvin'), td2m_2D['data'].values * units('kelvin')) * 100 wsp10m_2D = (u10m_2D['data'].values**2 + v10m_2D['data'].values**2)**0.5 winddir10m = mpcalc.wind_direction(u10m_2D['data'].values * units('m/s'), v10m_2D['data'].values * units('m/s')) if (np.isnan(wsp_sta).any()): if (wsp_sta.size == 1): wsp_sta[np.isnan(wsp_sta)] = np.squeeze( wsp10m_2D[np.isnan(wsp_sta)]) RH_sta[np.isnan(RH_sta)] = np.squeeze( np.array(rh2m_2D)[np.isnan(RH_sta)]) else: wsp_sta[np.isnan(wsp_sta)] = np.squeeze(wsp10m_2D)[np.isnan( wsp_sta)] RH_sta[np.isnan(RH_sta)] = np.squeeze( np.array(rh2m_2D))[np.isnan(RH_sta)] u_sta, v_sta = mpcalc.wind_components(wsp_sta * units('m/s'), winddir10m) #process zd_sta 10m wind zd_fcst_obs = None if (draw_zd is True): coords3 = np.zeros((np.size(zd_sm_alt), 3)) coords3[:, 0] = zd_sm_alt coords3[:, 1] = zd_sm_lat coords3[:, 2] = zd_sm_lon u_sm_sta = interpolator_U(coords3) v_sm_sta = interpolator_V(coords3) wsp_sm_sta = (u_sm_sta**2 + v_sm_sta**2)**0.5 u10m_sm = u10m.interp(lon=('points', zd_sm_lon), lat=('points', zd_sm_lat)) v10m_sm = v10m.interp(lon=('points', zd_sm_lon), lat=('points', zd_sm_lat)) wsp10m_sta = np.squeeze( (u10m_sm['data'].values**2 + v10m_sm['data'].values**2)**0.5) winddir10m_sm = mpcalc.wind_direction( u10m_sm['data'].values * units('m/s'), v10m_sm['data'].values * units('m/s')) if (np.isnan(wsp_sm_sta).any()): wsp_sm_sta[np.isnan(wsp_sm_sta)] = wsp10m_sta[np.isnan(wsp_sm_sta)] u_sm_sta, v_sm_sta = mpcalc.wind_components(wsp_sm_sta * units('m/s'), winddir10m_sm) zd_fcst_obs = { 'lon': zd_sm_lon, 'lat': zd_sm_lat, 'altitude': zd_sm_alt, 'U': np.squeeze(np.array(u_sm_sta)), 'V': np.squeeze(np.array(v_sm_sta)), 'obs_valid': obs_valid, 'U_obs': np.squeeze(np.array(zd_sm_u)), 'V_obs': np.squeeze(np.array(zd_sm_v)) } #prepare for graphics sta_fcs_fcst = { 'lon': sta_fcs['lon'], 'lat': sta_fcs['lat'], 'altitude': sta_fcs['altitude'], 'name': sta_fcs['name'], 'RH': np.array(RH_sta), 'U': np.squeeze(np.array(u_sta)), 'V': np.squeeze(np.array(v_sta)) } fcst_info = gh.coords local_scale_graphics.draw_wind_rh_according_to_4D_data( sta_fcs_fcst=sta_fcs_fcst, zd_fcst_obs=zd_fcst_obs, fcst_info=fcst_info, map_extent=map_extent, draw_zd=draw_zd, bkgd_type=bkgd_type, bkgd_level=bkgd_level, output_dir=None)
def gh_rain(initTime=None, fhour=24, day_back=0, model='ECMWF', gh_lev=500, atime=6, data_source='MICAPS', map_ratio=14 / 9, zoom_ratio=20, cntr_pnt=[104, 34], south_China_sea=True, area=None, city=False, output_dir=None, Global=False, **kwargs): # prepare data if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=str(gh_lev)), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='RAIN' + '%02d' % atime), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PSFC') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) if (atime > 3): filename_gh = utl.model_filename(initTime, int(fhour - atime / 2)) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) if (atime > 3): filename_gh = utl.filename_day_back_model(day_back=day_back, fhour=int(fhour - atime / 2)) # retrieve data from micaps server gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename_gh) if gh is None: return rain = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if rain is None: return psfc = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) if (atime > 3): filename_gh = utl.model_filename(initTime, fhour=int(fhour - atime / 2), UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) if (atime > 3): filename_gh = utl.filename_day_back_model(day_back=day_back, fhour=int(fhour - atime / 2), UTC=True) try: # retrieve data from CIMISS server gh = CMISS_IO.cimiss_model_by_time( '20' + filename_gh[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GPH'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') if gh is None: return gh['data'].values = gh['data'].values / 10. TPE1 = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='TPE'), levattrs={ 'long_name': 'Height above Ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TPE", units='kg*m^-2') if TPE1 is None: return TPE2 = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour - atime, data_code=utl.CMISS_data_code(data_source=model, var_name='TPE'), levattrs={ 'long_name': 'Height above Ground', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TPE", units='kg*m^-2') if TPE2 is None: return psfc = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='PRS'), fcst_level=0, fcst_ele="PRS", units='Pa') psfc['data'] = psfc['data'] / 100. except KeyError: raise ValueError('Can not find all data needed') rain = TPE1.copy(deep=True) rain['data'].values = TPE1['data'].values - TPE2['data'].values # set map extent if (area != None): south_China_sea = False if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 gh = utl.cut_xrdata(map_extent, gh, delt_x=delt_x, delt_y=delt_y) rain = utl.cut_xrdata(map_extent, rain, delt_x=delt_x, delt_y=delt_y) gh = utl.mask_terrian(gh_lev, psfc, gh) gh.attrs['model'] = model gh.attrs['lev'] = gh_lev rain.attrs['atime'] = atime # draw QPF_graphics.draw_gh_rain(rain=rain, gh=gh, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def gh_uv_spfh(initTime=None, fhour=6, day_back=0,model='ECMWF', gh_lev=500,uv_lev=850,spfh_lev=850, map_ratio=14/9,zoom_ratio=20,cntr_pnt=[104,34], south_China_sea=True,area =None,city=False,output_dir=None,data_source='MICAPS', Global=False,**kwargs): if(area != None): south_China_sea=False if(data_source=='MICAPS'): # micaps data directory try: data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=gh_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='SPFH',lvl=spfh_lev), utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')] except KeyError: raise ValueError('Can not find all directories needed') # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour) # retrieve data from micaps server gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if gh is None: return u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if u is None: return v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if v is None: return spfh = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if spfh is None: return psfc = MICAPS_IO.get_model_grid(data_dir[4], filename=filename) if(data_source == 'CIMISS'): # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour,UTC=True) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True) try: # retrieve data from CIMISS server gh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='GPH'), fcst_level=gh_lev, fcst_ele="GPH", units='gpm') if gh is None: return gh['data'].values=gh['data'].values/10. u=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'), fcst_level=uv_lev, fcst_ele="WIU", units='m/s') if u is None: return v=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'), fcst_level=uv_lev, fcst_ele="WIV", units='m/s') if v is None: return spfh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='SHU'), fcst_level=spfh_lev, fcst_ele="SHU", units='kg.kg-1') if spfh is None: return spfh['data'].values=spfh['data'].values*1000 psfc=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'), fcst_level=0, fcst_ele="PRS", units='Pa') psfc['data']=psfc['data']/100. except KeyError: raise ValueError('Can not find all data needed') # prepare data if(area != None): cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area) map_extent=[0,0,0,0] map_extent[0]=cntr_pnt[0]-zoom_ratio*1*map_ratio map_extent[1]=cntr_pnt[0]+zoom_ratio*1*map_ratio map_extent[2]=cntr_pnt[1]-zoom_ratio*1 map_extent[3]=cntr_pnt[1]+zoom_ratio*1 delt_x=(map_extent[1]-map_extent[0])*0.2 delt_y=(map_extent[3]-map_extent[2])*0.1 #+ to solve the problem of labels on all the contours mask1 = (gh['lon'] > map_extent[0]-delt_x) & (gh['lon'] < map_extent[1]+delt_x) & (gh['lat'] > map_extent[2]-delt_y) & (gh['lat'] < map_extent[3]+delt_y) mask2 = (u['lon'] > map_extent[0]-delt_x) & (u['lon'] < map_extent[1]+delt_x) & (u['lat'] > map_extent[2]-delt_y) & (u['lat'] < map_extent[3]+delt_y) mask3 = (spfh['lon'] > map_extent[0]-delt_x) & (spfh['lon'] < map_extent[1]+delt_x) & (spfh['lat'] > map_extent[2]-delt_y) & (spfh['lat'] < map_extent[3]+delt_y) gh=utl.cut_xrdata(map_extent,gh,delt_x=delt_x,delt_y=delt_y) u=utl.cut_xrdata(map_extent, u, delt_x=delt_x, delt_y=delt_y) v=utl.cut_xrdata(map_extent, v, delt_x=delt_x, delt_y=delt_y) spfh=utl.cut_xrdata(map_extent, spfh, delt_x=delt_x, delt_y=delt_y) gh=utl.mask_terrian(gh_lev,psfc,gh) u=utl.mask_terrian(uv_lev,psfc,u) v=utl.mask_terrian(uv_lev,psfc,v) spfh=utl.mask_terrian(spfh_lev,psfc,spfh) #- to solve the problem of labels on all the contours gh=gh.where(mask1,drop=True) gh.attrs['model']=model u=u.where(mask2,drop=True) v=v.where(mask2,drop=True) spfh=spfh.where(mask3,drop=True) uv=xr.merge([u.rename({'data': 'u'}),v.rename({'data': 'v'})]) moisture_graphics.draw_gh_uv_spfh( spfh=spfh, gh=gh, uv=uv, map_extent=map_extent, regrid_shape=20, city=city,south_China_sea=south_China_sea, output_dir=output_dir,Global=Global)
def gh_uv_VVEL(initial_time=None, fhour=6, day_back=0, model='ECMWF', gh_lev='500', uvw_lev='850', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): if (area != '全国'): south_China_sea = False # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=gh_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=uvw_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=uvw_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VVEL', lvl=uvw_lev) ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initial_time != None): filename = utl.model_filename(initial_time, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server gh = get_model_grid(data_dir[0], filename=filename) if gh is None: return u = get_model_grid(data_dir[1], filename=filename) if u is None: return v = get_model_grid(data_dir[2], filename=filename) if v is None: return w = get_model_grid(data_dir[3], filename=filename) init_time = gh.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours idx_x1 = np.where((gh.coords['lon'].values > map_extent[0] - delt_x) & (gh.coords['lon'].values < map_extent[1] + delt_x)) idx_y1 = np.where((gh.coords['lat'].values > map_extent[2] - delt_y) & (gh.coords['lat'].values < map_extent[3] + delt_y)) idx_x2 = np.where((w.coords['lon'].values > map_extent[0] - delt_x) & (w.coords['lon'].values < map_extent[1] + delt_x)) idx_y2 = np.where((w.coords['lat'].values > map_extent[2] - delt_y) & (w.coords['lat'].values < map_extent[3] + delt_y)) #- to solve the problem of labels on all the contours gh = { 'lon': gh.coords['lon'].values[idx_x1], 'lat': gh.coords['lat'].values[idx_y1], 'data': gh['data'].values[0, 0, idx_y1[0][0]:(idx_y1[0][-1] + 1), idx_x1[0][0]:(idx_x1[0][-1] + 1)], 'lev': gh_lev, 'model': model, 'fhour': fhour, 'init_time': init_time } uv = { 'lon': u.coords['lon'].values[idx_x1], 'lat': u.coords['lat'].values[idx_y1], 'udata': u['data'].values[0, 0, idx_y1[0][0]:(idx_y1[0][-1] + 1), idx_x1[0][0]:(idx_x1[0][-1] + 1)], 'vdata': v['data'].values[0, 0, idx_y1[0][0]:(idx_y1[0][-1] + 1), idx_x1[0][0]:(idx_x1[0][-1] + 1)], 'lev': uvw_lev } VVEL = { 'lon': w.coords['lon'].values[idx_x2], 'lat': w.coords['lat'].values[idx_y2], 'data': np.squeeze(w['data'].values[0, 0, idx_y2[0][0]:(idx_y2[0][-1] + 1), idx_x2[0][0]:(idx_x2[0][-1] + 1)]), 'lev': uvw_lev } dynamic_graphics.draw_gh_uv_VVEL(VVEL=VVEL, gh=gh, uv=uv, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def mslp_rain_snow(initial_time=None, fhour=24, day_back=0, model='ECMWF', atime=6, map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): if (area != '全国'): south_China_sea = False # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PRMSL'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='RAIN' + '%02d' % atime), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='SNOW' + '%02d' % atime), ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initial_time != None): filename = utl.model_filename(initial_time, fhour) if (atime > 3): filename_mslp = utl.model_filename(initial_time, fhour / 2.) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) if (atime > 3): filename_mslp = utl.filename_day_back_model(day_back=day_back, fhour=fhour / 2.) # retrieve data from micaps server mslp = get_model_grid(data_dir[0], filename=filename) if mslp is None: return rain = get_model_grid(data_dir[1], filename=filename) snow = get_model_grid(data_dir[2], filename=filename) init_time = mslp.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours idx_x1 = np.where((mslp.coords['lon'].values > map_extent[0] - delt_x) & (mslp.coords['lon'].values < map_extent[1] + delt_x)) idx_y1 = np.where((mslp.coords['lat'].values > map_extent[2] - delt_y) & (mslp.coords['lat'].values < map_extent[3] + delt_y)) idx_x2 = np.where((rain.coords['lon'].values > map_extent[0] - delt_x) & (rain.coords['lon'].values < map_extent[1] + delt_x)) idx_y2 = np.where((rain.coords['lat'].values > map_extent[2] - delt_y) & (rain.coords['lat'].values < map_extent[3] + delt_y)) #- to solve the problem of labels on all the contours rain_snow = xr.merge( [rain.rename({'data': 'rain'}), snow.rename({'data': 'snow'})]) mask1 = ((rain_snow['rain'] - rain_snow['snow']) > 0.1) & (rain_snow['snow'] > 0.1) sleet = rain_snow['rain'].where(mask1) mask2 = ((rain_snow['rain'] - rain_snow['snow']) < 0.1) & (rain_snow['snow'] > 0.1) snw = rain_snow['snow'].where(mask2) mask3 = (rain_snow['rain'] > 0.1) & (rain_snow['snow'] < 0.1) rn = rain_snow['rain'].where(mask3) mslp = { 'lon': mslp.coords['lon'].values[idx_x1], 'lat': mslp.coords['lat'].values[idx_y1], 'data': mslp['data'].values[0, idx_y1[0][0]:(idx_y1[0][-1] + 1), idx_x1[0][0]:(idx_x1[0][-1] + 1)], 'model': model, 'fhour': fhour, 'init_time': init_time } rain = { 'lon': rn.coords['lon'].values[idx_x2], 'lat': rn.coords['lat'].values[idx_y2], 'data': rn.values[0, idx_y2[0][0]:(idx_y2[0][-1] + 1), idx_x2[0][0]:(idx_x2[0][-1] + 1)] } snow = { 'lon': snw.coords['lon'].values[idx_x2], 'lat': snw.coords['lat'].values[idx_y2], 'data': snw.values[0, idx_y2[0][0]:(idx_y2[0][-1] + 1), idx_x2[0][0]:(idx_x2[0][-1] + 1)] } sleet = { 'lon': sleet.coords['lon'].values[idx_x2], 'lat': sleet.coords['lat'].values[idx_y2], 'data': sleet.values[0, idx_y2[0][0]:(idx_y2[0][-1] + 1), idx_x2[0][0]:(idx_x2[0][-1] + 1)] } QPF_graphics.draw_mslp_rain_snow(rain=rain, snow=snow, sleet=sleet, mslp=mslp, atime=atime, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def gh_uv_r6(initTime=None, fhour=6, day_back=0, model='ECMWF', gh_lev=500, uv_lev=850, map_ratio=14 / 9, zoom_ratio=20, cntr_pnt=[104, 34], south_China_sea=True, area=None, city=False, output_dir=None, data_source='MICAPS', Global=False, **kwargs): if (area != None): south_China_sea = False # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=gh_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=uv_lev), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=uv_lev), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='RAIN06'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PSFC') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if gh is None: return u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if u is None: return v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if v is None: return r6 = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if r6 is None: return psfc = MICAPS_IO.get_model_grid(data_dir[4], filename=filename) if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CIMISS server gh = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GPH'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') if gh is None: return gh['data'].values = gh['data'].values / 10. u = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') if u is None: return v = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV'), levattrs={ 'long_name': 'pressure_level', 'units': 'hPa', '_CoordinateAxisType': '-' }, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') if v is None: return TPE1 = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='TPE'), fcst_level=0, fcst_ele="TPE", units='kg*m^-2') if TPE1 is None: return TPE2 = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour - 6, data_code=utl.CMISS_data_code( data_source=model, var_name='TPE'), fcst_level=0, fcst_ele="TPE", units='kg*m^-2') if TPE2 is None: return psfc = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='PRS'), fcst_level=0, fcst_ele="PRS", units='Pa') psfc['data'] = psfc['data'] / 100. except KeyError: raise ValueError('Can not find all data needed') r6 = TPE1.copy(deep=True) r6['data'].values = TPE1['data'].values - TPE2['data'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 gh = utl.cut_xrdata(map_extent, gh, delt_x=delt_x, delt_y=delt_y) u = utl.cut_xrdata(map_extent, u, delt_x=delt_x, delt_y=delt_y) v = utl.cut_xrdata(map_extent, v, delt_x=delt_x, delt_y=delt_y) r6 = utl.cut_xrdata(map_extent, r6, delt_x=delt_x, delt_y=delt_y) gh = utl.mask_terrian(gh_lev, psfc, gh) u = utl.mask_terrian(uv_lev, psfc, u) v = utl.mask_terrian(uv_lev, psfc, v) gh.attrs['model'] = model uv = xr.merge([u.rename({'data': 'u'}), v.rename({'data': 'v'})]) synoptic_graphics.draw_gh_uv_r6(r6=r6, gh=gh, uv=uv, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def gh_rain(initial_time=None, fhour=24, day_back=0, model='ECMWF', gh_lev='500', atime=6, map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): if (area != '全国'): south_China_sea = False # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl=gh_lev), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='RAIN' + '%02d' % atime) ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initial_time != None): filename = utl.model_filename(initial_time, fhour) if (atime > 3): filename_gh = utl.model_filename(initial_time, fhour / 2.) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) if (atime > 3): filename_gh = utl.filename_day_back_model(day_back=day_back, fhour=fhour / 2.) # retrieve data from micaps server gh = get_model_grid(data_dir[0], filename=filename_gh) if gh is None: return rain = get_model_grid(data_dir[1], filename=filename) init_time = gh.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours idx_x1 = np.where((gh.coords['lon'].values > map_extent[0] - delt_x) & (gh.coords['lon'].values < map_extent[1] + delt_x)) idx_y1 = np.where((gh.coords['lat'].values > map_extent[2] - delt_y) & (gh.coords['lat'].values < map_extent[3] + delt_y)) idx_x2 = np.where((rain.coords['lon'].values > map_extent[0] - delt_x) & (rain.coords['lon'].values < map_extent[1] + delt_x)) idx_y2 = np.where((rain.coords['lat'].values > map_extent[2] - delt_y) & (rain.coords['lat'].values < map_extent[3] + delt_y)) #- to solve the problem of labels on all the contours gh = { 'lon': gh.coords['lon'].values[idx_x1], 'lat': gh.coords['lat'].values[idx_y1], 'data': gh['data'].values[0, 0, idx_y1[0][0]:(idx_y1[0][-1] + 1), idx_x1[0][0]:(idx_x1[0][-1] + 1)], 'lev': gh_lev, 'model': model, 'fhour': fhour, 'init_time': init_time } rain = { 'lon': rain.coords['lon'].values[idx_x2], 'lat': rain.coords['lat'].values[idx_y2], 'data': copy.deepcopy(rain['data'].values[0, idx_y2[0][0]:(idx_y2[0][-1] + 1), idx_x2[0][0]:(idx_x2[0][-1] + 1)]) } QPF_graphics.draw_gh_rain(rain=rain, gh=gh, atime=atime, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def Crosssection_Wind_Theta_e_Qv( initial_time=None, fhour=24, levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200], day_back=0, model='ECMWF', output_dir=None, st_point=[20, 120.0], ed_point=[50, 130.0], map_extent=[70, 140, 15, 55], h_pos=[0.125, 0.665, 0.25, 0.2]): # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='high', data_source=model, var_name='RH', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='UGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='VGRD', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='TMP', lvl=''), utl.Cassandra_dir(data_type='high', data_source=model, var_name='HGT', lvl='500') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initial_time != None): filename = utl.model_filename(initial_time, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server rh = get_model_3D_grid(directory=data_dir[0][0:-1], filename=filename, levels=levels, allExists=False) if rh is None: return rh = rh.metpy.parse_cf().squeeze() u = get_model_3D_grid(directory=data_dir[1][0:-1], filename=filename, levels=levels, allExists=False) if u is None: return u = u.metpy.parse_cf().squeeze() v = get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if v is None: return v = v.metpy.parse_cf().squeeze() v2 = get_model_3D_grid(directory=data_dir[2][0:-1], filename=filename, levels=levels, allExists=False) if v2 is None: return v2 = v2.metpy.parse_cf().squeeze() t = get_model_3D_grid(directory=data_dir[3][0:-1], filename=filename, levels=levels, allExists=False) if t is None: return t = t.metpy.parse_cf().squeeze() gh = get_model_grid(data_dir[4], filename=filename) if t is None: return resolution = u['lon'][1] - u['lon'][0] x, y = np.meshgrid(u['lon'], u['lat']) dx, dy = mpcalc.lat_lon_grid_deltas(u['lon'], u['lat']) for ilvl in levels: u2d = u.sel(level=ilvl) #u2d['data'].attrs['units']=units.meter/units.second v2d = v.sel(level=ilvl) #v2d['data'].attrs['units']=units.meter/units.second absv2d = mpcalc.absolute_vorticity( u2d['data'].values * units.meter / units.second, v2d['data'].values * units.meter / units.second, dx, dy, y * units.degree) if (ilvl == levels[0]): absv3d = v2 absv3d['data'].loc[dict(level=ilvl)] = np.array(absv2d) else: absv3d['data'].loc[dict(level=ilvl)] = np.array(absv2d) absv3d['data'].attrs['units'] = absv2d.units #rh=rh.rename(dict(lat='latitude',lon='longitude')) cross = cross_section(rh, st_point, ed_point) cross_rh = cross.set_coords(('lat', 'lon')) cross = cross_section(u, st_point, ed_point) cross_u = cross.set_coords(('lat', 'lon')) cross = cross_section(v, st_point, ed_point) cross_v = cross.set_coords(('lat', 'lon')) cross_u['data'].attrs['units'] = units.meter / units.second cross_v['data'].attrs['units'] = units.meter / units.second cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components( cross_u['data'], cross_v['data']) cross = cross_section(t, st_point, ed_point) cross_t = cross.set_coords(('lat', 'lon')) cross = cross_section(absv3d, st_point, ed_point) cross_Td = mpcalc.dewpoint_rh(cross_t['data'].values * units.celsius, cross_rh['data'].values * units.percent) rh, pressure = xr.broadcast(cross_rh['data'], cross_t['level']) Qv = mpcalc.specific_humidity_from_dewpoint(cross_Td, pressure) cross_Qv = xr.DataArray(np.array(Qv) * 1000., coords=cross_rh['data'].coords, dims=cross_rh['data'].dims, attrs={'units': units('g/kg')}) Theta_e = mpcalc.equivalent_potential_temperature( pressure, cross_t['data'].values * units.celsius, cross_Td) cross_Theta_e = xr.DataArray(np.array(Theta_e), coords=cross_rh['data'].coords, dims=cross_rh['data'].dims, attrs={'units': Theta_e.units}) crossection_graphics.draw_Crosssection_Wind_Theta_e_Qv( cross_Qv=cross_Qv, cross_Theta_e=cross_Theta_e, cross_u=cross_u, cross_v=cross_v, gh=gh, h_pos=h_pos, st_point=st_point, ed_point=ed_point, levels=levels, map_extent=map_extent, output_dir=output_dir)
def T2m_mslp_uv10m(initTime=None, fhour=6, day_back=0, model='ECMWF', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], data_source='MICAPS', south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): # prepare data if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PRMSL'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='u10m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='v10m'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='T2m') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server mslp = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if mslp is None: return u10m = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if u10m is None: return v10m = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if v10m is None: return t2m = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if t2m is None: return if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CMISS server t2m = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='TEF2'), fcst_level=0, fcst_ele="TEF2", units='K') if t2m is None: return t2m['data'].values = t2m['data'].values - 273.15 u10m = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='WIU10'), fcst_level=0, fcst_ele="WIU10", units='m/s') if u10m is None: return v10m = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='WIV10'), fcst_level=0, fcst_ele="WIV10", units='m/s') if v10m is None: return if (model == 'ECMWF'): mslp = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GSSP'), levattrs={ 'long_name': 'Mean_sea_level', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="GSSP", units='Pa') else: mslp = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='SSP'), levattrs={ 'long_name': 'Mean_sea_level', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="SSP", units='Pa') if mslp is None: return mslp['data'] = mslp['data'] / 100. except KeyError: raise ValueError('Can not find all data needed') if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) if (area != '全国'): south_China_sea = False map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = (t2m['lon'] > map_extent[0] - delt_x) & ( t2m['lon'] < map_extent[1] + delt_x) & ( t2m['lat'] > map_extent[2] - delt_y) & (t2m['lat'] < map_extent[3] + delt_y) mask2 = (u10m['lon'] > map_extent[0] - delt_x) & ( u10m['lon'] < map_extent[1] + delt_x) & ( u10m['lat'] > map_extent[2] - delt_y) & (u10m['lat'] < map_extent[3] + delt_y) mask3 = (mslp['lon'] > map_extent[0] - delt_x) & ( mslp['lon'] < map_extent[1] + delt_x) & ( mslp['lat'] > map_extent[2] - delt_y) & (mslp['lat'] < map_extent[3] + delt_y) t2m = t2m.where(mask1, drop=True) t2m.attrs['model'] = model u10m = u10m.where(mask2, drop=True) v10m = v10m.where(mask2, drop=True) uv10m = xr.merge( [u10m.rename({'data': 'u10m'}), v10m.rename({'data': 'v10m'})]) mslp = mslp.where(mask3, drop=True) mslp.attrs['model'] = model # draw elements_graphics.draw_T2m_mslp_uv10m(t2m=t2m, mslp=mslp, uv10m=uv10m, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def T2m_all_type(initTime=None, fhour=24, day_back=0, model='中央台指导', Var_plot='Tmn_2m', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): if (area != '全国'): south_China_sea = False # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name=Var_plot) ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server T_2m = get_model_grid(data_dir[0], filename=filename) if T_2m is None: return init_time = T_2m.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours idx_x1 = np.where((T_2m.coords['lon'].values > map_extent[0] - delt_x) & (T_2m.coords['lon'].values < map_extent[1] + delt_x)) idx_y1 = np.where((T_2m.coords['lat'].values > map_extent[2] - delt_y) & (T_2m.coords['lat'].values < map_extent[3] + delt_y)) titles = { 'Tmn_2m': '过去24小时2米最低温度', 'Tmx_2m': '过去24小时2米最高温度', 'T2m': '2米温度' } #- to solve the problem of labels on all the contours T_2m = { 'lon': T_2m.coords['lon'].values[idx_x1], 'lat': T_2m.coords['lat'].values[idx_y1], 'data': T_2m['data'].values[0, 0, idx_y1[0][0]:(idx_y1[0][-1] + 1), idx_x1[0][0]:(idx_x1[0][-1] + 1)], 'model': model, 'fhour': fhour, 'title': titles[Var_plot], 'init_time': init_time } elements_graphics.draw_T_2m(T_2m=T_2m, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def T2m_all_type(initTime=None, fhour=24, day_back=0, model='中央气象台中短期指导', Var_plot='Tmn_2m', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], data_source='MICAPS', south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): # prepare data if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name=Var_plot) ] except KeyError: raise ValueError('Can not find all directories needed') if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) T_2m = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if T_2m is None: return if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CMISS server T_2m = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='TEF2'), levattrs={ 'long_name': 'Mean_sea_level', 'units': 'm', '_CoordinateAxisType': '-' }, fcst_level=0, fcst_ele="TEF2", units='K') if T_2m is None: return T_2m['data'].values = T_2m['data'].values - 273.15 except KeyError: raise ValueError('Can not find all data needed') # set map extent if (area != '全国'): south_China_sea = False if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = (T_2m['lon'] > map_extent[0] - delt_x) & ( T_2m['lon'] < map_extent[1] + delt_x) & ( T_2m['lat'] > map_extent[2] - delt_y) & (T_2m['lat'] < map_extent[3] + delt_y) T_2m = T_2m.where(mask1, drop=True) titles = { 'Tmn_2m': '过去24小时2米最低温度', 'Tmx_2m': '过去24小时2米最高温度', 'T2m': '2米温度' } #- to solve the problem of labels on all the contours T_2m.attrs['model'] = model T_2m.attrs['title'] = titles[Var_plot] elements_graphics.draw_T_2m(T_2m=T_2m, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def low_level_wind(initTime=None, fhour=6, day_back=0, model='ECMWF', wind_level='100m', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): if (area != '全国'): south_China_sea = False # micaps data directory try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='u' + wind_level), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='v' + wind_level) ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server u10m = get_model_grid(data_dir[0], filename=filename) if u10m is None: return v10m = get_model_grid(data_dir[1], filename=filename) if v10m is None: return init_time = v10m.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = ((u10m['lon'] > (map_extent[0] - delt_x)) & (u10m['lon'] < (map_extent[1] + delt_x)) & (u10m['lat'] > (map_extent[2] - delt_y)) & (u10m['lat'] < (map_extent[3] + delt_y))) mask2 = ((u10m['lon'] > (map_extent[0] - delt_x)) & (u10m['lon'] < (map_extent[1] + delt_x))) mask3 = ((u10m['lat'] > (map_extent[2] - delt_y)) & (u10m['lat'] < (map_extent[3] + delt_y))) #- to solve the problem of labels on all the contours uv10m = { 'lon': u10m.coords['lon'].where(mask2, drop=True).values, 'lat': u10m.coords['lat'].where(mask3, drop=True).values, 'lev': wind_level, 'udata': np.squeeze(u10m['data'].where(mask1, drop=True).values), 'vdata': np.squeeze(v10m['data'].where(mask1, drop=True).values), 'model': model, 'fhour': fhour, 'init_time': init_time } wsp10m = { 'lon': u10m.coords['lon'].where(mask2, drop=True).values, 'lat': u10m.coords['lat'].where(mask3, drop=True).values, 'data': ((uv10m['udata'])**2 + (uv10m['vdata'])**2)**0.5 } elements_graphics.draw_low_level_wind(uv=uv10m, wsp=wsp10m, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def mslp_gust10m(initTime=None, fhour=6, day_back=0, model='ECMWF', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], data_source='MICAPS', south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='PRMSL'), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='10M_GUST_6H') ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server mslp = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if mslp is None: return gust = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if gust is None: return if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: gust = CMISS_IO.cimiss_model_by_time('20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code( data_source=model, var_name='GUST10T6'), fcst_level=0, fcst_ele="GUST10T6", units='m/s') if gust is None: return if (model == 'ECMWF'): mslp = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='GSSP'), fcst_level=0, fcst_ele="GSSP", units='Pa') else: mslp = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='SSP'), fcst_level=0, fcst_ele="SSP", units='Pa') if mslp is None: return mslp['data'] = mslp['data'] / 100. except KeyError: raise ValueError('Can not find all data needed') # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) if (area != '全国'): south_China_sea = False map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = (gust['lon'] > map_extent[0] - delt_x) & ( gust['lon'] < map_extent[1] + delt_x) & ( gust['lat'] > map_extent[2] - delt_y) & (gust['lat'] < map_extent[3] + delt_y) mask2 = (mslp['lon'] > map_extent[0] - delt_x) & ( mslp['lon'] < map_extent[1] + delt_x) & ( mslp['lat'] > map_extent[2] - delt_y) & (mslp['lat'] < map_extent[3] + delt_y) gust = gust.where(mask1, drop=True) mslp = mslp.where(mask2, drop=True) mslp.attrs['model'] = model elements_graphics.draw_mslp_gust10m(gust=gust, mslp=mslp, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)
def gh_uv_wvfl(initTime=None, fhour=6, day_back=0,model='GRAPES_GFS', gh_lev=500,uv_lev=850,wvfl_lev=850, map_ratio=19/9,zoom_ratio=20,cntr_pnt=[102,34], south_China_sea=True,area = '全国',city=False,output_dir=None,data_source='MICAPS', Global=False): if(area != '全国'): south_China_sea=False # micaps data directory if(data_source =='MICAPS'): try: data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=gh_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=uv_lev), utl.Cassandra_dir(data_type='high',data_source=model,var_name='WVFL',lvl=wvfl_lev) ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour) # retrieve data from micaps server gh = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if gh is None: return u = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if u is None: return v = MICAPS_IO.get_model_grid(data_dir[2], filename=filename) if v is None: return wvfl = MICAPS_IO.get_model_grid(data_dir[3], filename=filename) if wvfl is None: return if(data_source =='CIMISS'): # get filename if(initTime != None): filename = utl.model_filename(initTime, fhour,UTC=True) else: filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True) try: # retrieve data from CMISS server gh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='GPH'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=gh_lev, fcst_ele="GPH", units='gpm') if gh is None: return gh['data'].values=gh['data'].values/10. u=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="WIU", units='m/s') if u is None: return v=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=uv_lev, fcst_ele="WIV", units='m/s') if v is None: return wvfl=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model,var_name='MOFU'), levattrs={'long_name':'pressure_level', 'units':'hPa', '_CoordinateAxisType':'-'}, fcst_level=wvfl_lev, fcst_ele="MOFU", units='10^-1*g/cm*hPa*s') if wvfl is None: return except KeyError: raise ValueError('Can not find all data needed') # prepare data if(area != None): cntr_pnt,zoom_ratio=utl.get_map_area(area_name=area) map_extent=[0,0,0,0] map_extent[0]=cntr_pnt[0]-zoom_ratio*1*map_ratio map_extent[1]=cntr_pnt[0]+zoom_ratio*1*map_ratio map_extent[2]=cntr_pnt[1]-zoom_ratio*1 map_extent[3]=cntr_pnt[1]+zoom_ratio*1 delt_x=(map_extent[1]-map_extent[0])*0.2 delt_y=(map_extent[3]-map_extent[2])*0.1 #+ to solve the problem of labels on all the contours mask1 = (gh['lon'] > map_extent[0]-delt_x) & (gh['lon'] < map_extent[1]+delt_x) & (gh['lat'] > map_extent[2]-delt_y) & (gh['lat'] < map_extent[3]+delt_y) mask2 = (u['lon'] > map_extent[0]-delt_x) & (u['lon'] < map_extent[1]+delt_x) & (u['lat'] > map_extent[2]-delt_y) & (u['lat'] < map_extent[3]+delt_y) mask3 = (wvfl['lon'] > map_extent[0]-delt_x) & (u['lon'] < map_extent[1]+delt_x) & (u['lat'] > map_extent[2]-delt_y) & (u['lat'] < map_extent[3]+delt_y) #- to solve the problem of labels on all the contours gh=gh.where(mask1,drop=True) gh.attrs['model']=model u=u.where(mask2,drop=True) v=v.where(mask2,drop=True) uv=xr.merge([u.rename({'data': 'u'}),v.rename({'data': 'v'})]) wvfl=v.where(wvfl,drop=True) moisture_graphics.draw_gh_uv_wvfl( wvfl=wvfl, gh=gh, uv=uv, map_extent=map_extent, regrid_shape=20, city=city,south_China_sea=south_China_sea, output_dir=output_dir,Global=Global)
def low_level_wind(initTime=None, fhour=6, day_back=0, model='ECMWF', wind_level='100m', data_source='MICAPS', map_ratio=19 / 9, zoom_ratio=20, cntr_pnt=[102, 34], south_China_sea=True, area='全国', city=False, output_dir=None, Global=False): # micaps data directory if (data_source == 'MICAPS'): try: data_dir = [ utl.Cassandra_dir(data_type='surface', data_source=model, var_name='u' + wind_level), utl.Cassandra_dir(data_type='surface', data_source=model, var_name='v' + wind_level) ] except KeyError: raise ValueError('Can not find all directories needed') # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour) # retrieve data from micaps server u10m = MICAPS_IO.get_model_grid(data_dir[0], filename=filename) if u10m is None: return v10m = MICAPS_IO.get_model_grid(data_dir[1], filename=filename) if v10m is None: return init_time = v10m.coords['forecast_reference_time'].values # prepare data if (area != None): cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area) if (area != '全国'): south_China_sea = False if (data_source == 'CIMISS'): # get filename if (initTime != None): filename = utl.model_filename(initTime, fhour, UTC=True) else: filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour, UTC=True) try: # retrieve data from CMISS server u10m = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIU' + wind_level[0:-1]), fcst_level=0, fcst_ele="WIU" + wind_level[0:-1], units='m*s-1') if u10m is None: return v10m = CMISS_IO.cimiss_model_by_time( '20' + filename[0:8], valid_time=fhour, data_code=utl.CMISS_data_code(data_source=model, var_name='WIV' + wind_level[0:-1]), fcst_level=0, fcst_ele="WIV" + wind_level[0:-1], units='m*s-1') if v10m is None: return except KeyError: raise ValueError('Can not find all data needed') map_extent = [0, 0, 0, 0] map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio map_extent[2] = cntr_pnt[1] - zoom_ratio * 1 map_extent[3] = cntr_pnt[1] + zoom_ratio * 1 delt_x = (map_extent[1] - map_extent[0]) * 0.2 delt_y = (map_extent[3] - map_extent[2]) * 0.1 #+ to solve the problem of labels on all the contours mask1 = (v10m['lon'] > map_extent[0] - delt_x) & ( v10m['lon'] < map_extent[1] + delt_x) & ( v10m['lat'] > map_extent[2] - delt_y) & (v10m['lat'] < map_extent[3] + delt_y) u10m = u10m.where(mask1, drop=True) v10m = v10m.where(mask1, drop=True) uv = xr.merge([u10m.rename({'data': 'u'}), v10m.rename({'data': 'v'})]) uv.attrs['model'] = model uv.attrs['level'] = wind_level elements_graphics.draw_low_level_wind(uv=uv, map_extent=map_extent, regrid_shape=20, city=city, south_China_sea=south_China_sea, output_dir=output_dir, Global=Global)