Beispiel #1
0
def test_basic_dewpoint_rh():
    """Test dewpoint_rh function."""
    temp = np.array([30., 25., 10., 20., 25.]) * units.degC
    rh = np.array([30., 45., 55., 80., 85.]) / 100.

    real_td = np.array([11, 12, 1, 16, 22]) * units.degC
    assert_array_almost_equal(real_td, dewpoint_rh(temp, rh), 0)
Beispiel #2
0
async def dewp(session:CommandSession):
    raw = session.ctx['raw_message'].split('露点')[1].strip()
    parts = raw.split(' ')
    temp = ast.literal_eval(parts[0]) * units.degC
    rh = ast.literal_eval(parts[1]) * units.percent
    td = mpcalc.dewpoint_rh(temp, rh)
    await session.send(str(np.round_(td.magnitude, 2)))
Beispiel #3
0
def test_basic_dewpoint_rh():
    """Test dewpoint_rh function."""
    temp = np.array([30., 25., 10., 20., 25.]) * units.degC
    rh = np.array([30., 45., 55., 80., 85.]) / 100.

    real_td = np.array([11, 12, 1, 16, 22]) * units.degC
    assert_array_almost_equal(real_td, dewpoint_rh(temp, rh), 0)
Beispiel #4
0
    def __init__(self, pres, temp, rh=None, td=None, u=None, v=None, wspd=None,
                 wdir=None, alt=None, station=None, time=None, fig=None, **kwargs):
        if not fig:
            fig = plt.figure(figsize=(9, 9), dpi=200)
        super().__init__(fig=fig, rotation=30)
        # Parameter conversion
        nonetype = type(None)
        if isinstance(td, nonetype):
            td = mpcalc.dewpoint_rh(temp, rh * units.percent).magnitude
        if isinstance(u, nonetype) or isinstance(v, nonetype):
            u, v = mpcalc.wind_components(wspd * units('m/s'), wdir * units.degree)
            u = u.magnitude
            v = v.magnitude
        self.kw = kwargs
        # Interpolate Nans
        xi = np.arange(0, len(pres), 1)
        self.p_i = mpi.interpolate_nans_1d(xi, pres) * units('hPa')
        self.t_i = mpi.interpolate_nans_1d(self.p_i, temp) * units.degC
        self.td_i = mpi.interpolate_nans_1d(self.p_i, td) * units.degC
        self.u_i = mpi.interpolate_nans_1d(self.p_i, u) * units('m/s')
        self.v_i = mpi.interpolate_nans_1d(self.p_i, v) * units('m/s')
        self.alt = mpi.interpolate_nans_1d(self.p_i, alt) * units('m')
        self.st = station
        self.time = time
        self.dp_idx = np.where(~np.isnan(td))[0][-1]

        self.process_skewt()
Beispiel #5
0
def sta_SkewT(model='ECMWF',points={'lon':[116.3833], 'lat':[39.9]},
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,250,200,150,100],
    fhour=3,output_dir=None):

    try:
        data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl=''),
                    utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl='')]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # # 度数据
    initTime = get_latest_initTime(data_dir[0][0:-1]+"850")
    filename = initTime+'.'+str(fhour).zfill(3)
    TMP_4D=get_model_3D_grid(directory=data_dir[0][0:-1],filename=filename,levels=levels, allExists=False)
    TMP_2D=TMP_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    u_4D=get_model_3D_grid(directory=data_dir[1][0:-1],filename=filename,levels=levels, allExists=False)
    u_2D=u_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    v_4D=get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False)
    v_2D=v_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    HGT_4D=get_model_3D_grid(directory=data_dir[3][0:-1],filename=filename,levels=levels, allExists=False)
    HGT_2D=HGT_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    HGT_2D.attrs['model']=model
    HGT_2D.attrs['points']=points

    RH_4D=get_model_3D_grid(directory=data_dir[4][0:-1],filename=filename,levels=levels, allExists=False)
    RH_2D=RH_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))

    wind_dir_2D=mpcalc.wind_direction(u_2D['data'].values* units.meter / units.second,
        v_2D['data'].values* units.meter / units.second)
    wsp10m_2D=(u_2D['data']**2+v_2D['data']**2)**0.5
    Td2m=mpcalc.dewpoint_rh(TMP_2D['data'].values*units('degC'),RH_2D['data'].values/100.)

    p = np.squeeze(levels) * units.hPa
    T = np.squeeze(TMP_2D['data'].values) * units.degC
    Td = np.squeeze(np.array(Td2m)) * units.degC
    wind_speed = np.squeeze(wsp10m_2D.values) * units.meter
    wind_dir = np.squeeze(np.array(wind_dir_2D)) * units.degrees
    u=np.squeeze(u_2D['data'].values)* units.meter
    v=np.squeeze(v_2D['data'].values)* units.meter

    fcst_info= xr.DataArray(np.array(u_2D['data'].values),
                        coords=u_2D['data'].coords,
                        dims=u_2D['data'].dims,
                        attrs={'points': points,
                                'model': model})

    sta_graphics.draw_sta_skewT(
        p=p,T=T,Td=Td,wind_speed=wind_speed,wind_dir=wind_dir,u=u,v=v,
        fcst_info=fcst_info)
Beispiel #6
0
def get_dp(ta, hur, dp_mask=True):

    dp = np.array(
        mpcalc.dewpoint_rh(ta * units.units.degC, hur * units.units.percent))

    if dp_mask:
        return dp
    else:
        dp = np.array(dp)
        dp[np.isnan(dp)] = -85.
        return dp
Beispiel #7
0
def calc_thta_vir(united_data):
    """
    returns virtual potential temperature (K)
    and equvalent potential temperaure (K)
    """

    pres = united_data['PRES']
    temp = united_data['TEMP']
    rh = united_data['HUM']
    mixing = mpcalc.mixing_ratio_from_relative_humidity(rh, temp, pres)
    theta_vir = mpcalc.virtual_potential_temperature(pres, temp, mixing)

    td = mpcalc.dewpoint_rh(temp, rh)
    theta_e = mpcalc.equivalent_potential_temperature(pres, temp, td)

    return theta_vir, theta_e
Beispiel #8
0
def main():
    """In the main function we basically read the files and prepare the variables to be plotted.
    This is not included in utils.py as it can change from case to case."""
    file = glob(input_file)
    print_message('Using file '+file[0])
    dset = xr.open_dataset(file[0])
    dset = dset.metpy.parse_cf()

    # Select 850 hPa level using metpy
    dset_850hpa = dset
    theta_e = mpcalc.equivalent_potential_temperature(850 * units.hPa, dset['t'].metpy.sel(vertical=850 * units.hPa),
                     mpcalc.dewpoint_rh(dset['t'].metpy.sel(vertical=850 * units.hPa), dset['r'].metpy.sel(vertical=850 * units.hPa)/100.)).to(units.degC)

    mslp = dset['prmsl'].metpy.unit_array.to('hPa')
    lon, lat = get_coordinates(dset)

    time = pd.to_datetime(dset.time.values)
    cum_hour=np.array((time-time[0]) / pd.Timedelta('1 hour')).astype("int")

    levels_thetae = np.arange(-25., 75., 1.)
    levels_mslp = np.arange(mslp.min().astype("int"), mslp.max().astype("int"), 7.)

    for projection in projections:# This works regardless if projections is either single value or array
        fig = plt.figure(figsize=(figsize_x, figsize_y))
        ax  = plt.gca()        
        m, x, y =get_projection(lon, lat, projection)
        # Create a mask to retain only the points inside the globe
        # to avoid a bug in basemap and a problem in matplotlib
        mask = np.logical_or(x<1.e20, y<1.e20)
        x = np.compress(mask,x)
        y = np.compress(mask,y)
        # Parallelize the plotting by dividing into chunks and processes 
        # All the arguments that need to be passed to the plotting function
        args=dict(m=m, x=x, y=y, ax=ax,
                 theta_e=np.compress(mask, theta_e, axis=1), mslp=np.compress(mask, mslp, axis=1),
                 levels_thetae=levels_thetae,levels_mslp=levels_mslp, time=time, projection=projection,
                 cum_hour=cum_hour)

        print_message('Pre-processing finished, launching plotting scripts')
        if debug:
            plot_files(time[1:2], **args)
        else:
            # Parallelize the plotting by dividing into chunks and processes 
            dates = chunks(time, chunks_size)
            plot_files_param=partial(plot_files, **args)
            p = Pool(processes)
            p.map(plot_files_param, dates)
Beispiel #9
0
def get_soundings(data, ids, lats, lons, date):
    """
    ids = [ORD, MDW, etc]
    Gathers the data for writing to sounding files
    """
    date = date.strftime("%Y%m%d%H")
    points = []
    elevs = []
    for id_ in ids:
        entry = metadata[id_]
        points.append([entry[1], entry[0]])

    idx_locs = nearest_idx(points, lons, lats)

    t = data.select(name='Temperature', level=levs)
    rh = data.select(name='Relative humidity', level=levs)
    u = data.select(name='U component of wind', level=levs)
    v = data.select(name='V component of wind', level=levs)
    hgt = data.select(name='Geopotential Height', level=levs)

    n_levs = len(levs)
    data_cube = np.zeros(
        (5, n_levs, t[0].values.shape[0], t[0].values.shape[1]))
    #data_cube[0,0,:,:] = t_2m
    #data_cube[1,0,:,:] = td_2m
    for k in range(0, n_levs):
        t_lev = t[k].values * units.kelvin
        rh_lev = rh[k].values / 100.
        rh_lev = rh_lev * units('dimensionless')
        td_lev = mpcalc.dewpoint_rh(t_lev, rh_lev)

        u_lev = u[k].values * units('m/s').to('knots')
        v_lev = v[k].values * units('m/s').to('knots')

        data_cube[0, k, :, :] = t_lev
        data_cube[1, k, :, :] = td_lev
        data_cube[2, k, :, :] = u_lev
        data_cube[3, k, :, :] = v_lev
        data_cube[4, k, :, :] = hgt[k].values

    _write_to_sounding(data_cube, idx_locs, ids, date, fmt='sharppy')
Beispiel #10
0
 def calc(self):
     """Compute things not usually computed"""
     if (self.data['relh'] is None
             and None not in [self.data['tmpf'], self.data['dwpf']]):
         self.data['relh'] = bounded(
             mcalc.relative_humidity_from_dewpoint(
                 self.data['tmpf'] * munits.degF, self.data['dwpf'] *
                 munits.degF).to(munits.percent).magnitude, 0.5, 100.5)
     if (self.data['dwpf'] is None
             and None not in [self.data['tmpf'], self.data['relh']]
             and self.data['relh'] >= 1 and self.data['relh'] <= 100):
         self.data['dwpf'] = bounded(
             mcalc.dewpoint_rh(self.data['tmpf'] * munits.degF,
                               self.data['relh'] * munits.percent).to(
                                   munits.degF).magnitude, -100., 100.)
     if (self.data['feel'] is None and None not in [
             self.data['tmpf'], self.data['relh'], self.data['sknt']
     ]):
         self.data['feel'] = bounded(
             mcalc.apparent_temperature(
                 self.data['tmpf'] * munits.degF,
                 self.data['relh'] * munits.percent, self.data['sknt'] *
                 munits.knots).to(munits.degF).magnitude, -150., 200.)
Beispiel #11
0
def Time_Crossection_rh_uv_theta_e(initTime=None,model='ECMWF',data_source='MICAPS',points={'lon':[116.3833], 'lat':[39.9]},
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,200],
    t_gap=3,t_range=[0,48],output_dir=None,**kwargs):
  
    fhours = np.arange(t_range[0], t_range[1], t_gap)

    if(data_source == 'MICAPS'):
        try:
            data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl=''),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')]
        except KeyError:
            raise ValueError('Can not find all directories needed')
        
        if(initTime==None):
            initTime = get_latest_initTime(data_dir[0][0:-1]+"850")
        filenames = [initTime+'.'+str(fhour).zfill(3) for fhour in fhours]
        TMP_4D=get_model_3D_grids(directory=data_dir[0][0:-1],filenames=filenames,levels=levels, allExists=False)
        u_4D=get_model_3D_grids(directory=data_dir[1][0:-1],filenames=filenames,levels=levels, allExists=False)
        v_4D=get_model_3D_grids(directory=data_dir[2][0:-1],filenames=filenames,levels=levels, allExists=False)
        rh_4D=get_model_3D_grids(directory=data_dir[3][0:-1],filenames=filenames,levels=levels, allExists=False)
        Psfc_3D=get_model_grids(directory=data_dir[4][0:-1],filenames=filenames,allExists=False)

    if(data_source == 'CIMISS'):
        if(initTime != None):
            filename = utl.model_filename(initTime, 0,UTC=True)
        else:
            filename=utl.filename_day_back_model(day_back=0,fhour=0,UTC=True)
        try:
            rh_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='RHU'),
                        fcst_levels=levels, fcst_ele="RHU", units='%',pbar=True)

            u_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'),
                        fcst_levels=levels, fcst_ele="WIU", units='m/s',pbar=True)
                
            v_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'),
                        fcst_levels=levels, fcst_ele="WIV", units='m/s',pbar=True)

            TMP_4D=CMISS_IO.cimiss_model_3D_grids(init_time_str='20'+filename[0:8],valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='TEM'),
                        fcst_levels=levels, fcst_ele="TEM", units='K',pbar=True)
            TMP_4D['data'].values=TMP_4D['data'].values-273.15

            Psfc_3D=CMISS_IO.cimiss_model_grids(init_time_str='20'+filename[0:8], valid_times=fhours,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'),
                        fcst_level=0, fcst_ele="PRS", units='Pa',pbar=True)

        except KeyError:
            raise ValueError('Can not find all data needed')

    TMP_2D=TMP_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    u_2D=u_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    v_2D=v_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    rh_2D=rh_4D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    rh_2D.attrs['model']=model
    rh_2D.attrs['points']=points
    Psfc_1D=Psfc_3D.interp(lon=('points', points['lon']), lat=('points', points['lat']))
    v_2D2,pressure_2D = xr.broadcast(v_2D['data'],v_2D['level'])
    v_2D2,Psfc_2D = xr.broadcast(v_2D['data'],Psfc_1D['data'])
    Td_2D = mpcalc.dewpoint_rh(TMP_2D['data'].values*units.celsius,
                rh_2D['data'].values* units.percent)

    terrain_2D=pressure_2D-Psfc_2D
    rh,pressure = xr.broadcast(rh_2D['data'],rh_2D['level'])
    Theta_e=mpcalc.equivalent_potential_temperature(pressure,
                                                TMP_2D['data'].values*units.celsius, 
                                                Td_2D)

    theta_e_2D = xr.DataArray(np.array(Theta_e),
                        coords=rh_2D['data'].coords,
                        dims=rh_2D['data'].dims,
                        attrs={'units': Theta_e.units})

    crossection_graphics.draw_Time_Crossection_rh_uv_theta_e(
                    rh_2D=rh_2D, u_2D=u_2D, v_2D=v_2D,theta_e_2D=theta_e_2D,terrain_2D=terrain_2D,
                    t_range=t_range,output_dir=output_dir)
Beispiel #12
0
def test_percent_dewpoint_rh():
    """Test dewpoint_rh with rh in percent."""
    td = dewpoint_rh(10.6 * units.degC, 37 * units.percent)
    assert_almost_equal(td, 26. * units.degF, 0)
Beispiel #13
0
def test_warning_dewpoint_rh():
    """Test that warning is raised for >120% RH."""
    with pytest.warns(UserWarning):
        dewpoint_rh(10.6 * units.degC, 50)
Beispiel #14
0
def cape(filelist,storm,track,show):
    #Sort filelist.
    filelist=np.sort(filelist)

    # Get sampling periods (this will be a dictionary). See the toolbox
    print('Retrieving sampling periods')
    sampleperiods=getsamplingperiods(filelist,3.)

    # Iterate over all sampling periods.
    for sampindex,periodskey in enumerate(sampleperiods):

        #Allocate starting (stdt) and ending date (endt). Remeber dt is the convetional short-name for date.
        stdt=periodskey
        endt=sampleperiods[periodskey]

        # Define sampling period string
        period=str(stdt.hour)+'_'+str(stdt.day)+'-'+str(endt.hour)+'_'+str(endt.day)

        # Create new-empty lists.
        lats=[]
        lons=[]
        xs=[]
        ys=[]
        capes=[]
        cins=[]
	
        distfig = plt.figure(figsize=(13, 9))
        ax=distfig.add_subplot(111)
        print('start filelist loop')
        # Iterate over all files.
        for filename in filelist:



            # Select end-name of file by inspecting filename string. Notice how filename can change how file is read.
            if 'radazm' in filename.split('/')[-1] or 'eol' in filename.split('/')[-1]:
                end='radazm'
            else:
                end='avp'
            # Obtain properties of file, i.e., launch time and location into a dictionary (dicc).
            dicc=findproperties(filename,end)

            # Condition to see if current file is in sampling period.
            # Notice how if structure is constructed, condition finds times outside of sampling period and
            # if found outside the sampling period, continue to next file.
            if dicc['Launch Time']<stdt or dicc['Launch Time'] > endt:
                continue

            nump=np.genfromtxt(filename,skip_header=16,skip_footer=0)
            temperature=clean1(nump[:,5])
            pressure=clean1(nump[:,4])
            Height=clean1(nump[:,13])
            if np.nanmax(Height)<3500:
                continue
            #Clean for cape
            RelH=clean1(nump[:,7])
            lon=clean1(nump[:,14])
            lat=clean1(nump[:,15])
            lon=clean1(lon)
            lat=clean1(lat)
            mlon=np.nanmean(lon)
            mlat=np.nanmean(lat)
            RH=RelH/100
            T,P,rh,dz=cleanforcape(temperature,pressure,RH,Height)

            #Metpy set-up
            T=np.flip(T,0)
            rh=np.flip(rh,0)
            p=np.flip(P,0)
            dz=np.flip(dz,0)
            p=p*units.hPa
            T=T*units.celsius


            mixing=rh*mpcalc.saturation_mixing_ratio(p,T)
            epsilon=0.6219800858985514
            Tv=mpcalc.virtual_temperature(T, mixing,
                                      molecular_weight_ratio=epsilon)
            dwpoint=mpcalc.dewpoint_rh(T, rh)

            blh_indx=np.where(dz<500)
            try:
                parcelprofile=mpcalc.parcel_profile(p,np.nanmean(T[blh_indx])*units.celsius,mpcalc.dewpoint_rh(np.nanmean(T[blh_indx])*units.celsius, np.nanmean(rh[blh_indx]))).to('degC')
                Tv_parcelprofile=mpcalc.virtual_temperature(parcelprofile, mixing,
                                          molecular_weight_ratio=epsilon)
                cape,cin=cape_cin(p,Tv,dwpoint,Tv_parcelprofile,dz,T)
            except:
                continue

            plotskewT=True
            if plotskewT==True:

                os.system('mkdir figs/skewt')
                fig = plt.figure(figsize=(9, 9))
                skew = SkewT(fig, rotation=45)
                skew.ax.set_ylim(1000, 100)
                skew.ax.set_xlim(-40, 60)

                skew.plot(p, dwpoint, 'g',label=r'$T_{dp}$')
                skew.plot(p, Tv, 'r',label=r'$T_v$')
                plt.text(-120,120,str(np.around(cape,2)),fontsize=14,fontweight='bold')

                # Plot the data using normal plotting functions, in this case using
                # log scaling in Y, as dictated by the typical meteorological plot
                skew.plot(p,Tv_parcelprofile,'k',label=r'$T_{v env}$')
                skew.shade_cin(p, T, parcelprofile,label='CIN')
                skew.shade_cape(p, Tv, Tv_parcelprofile,label='CAPE')
                skew.plot_dry_adiabats()
                skew.plot_moist_adiabats()

                plt.legend()
                plt.title(storm + ' on' + period,fontsize=14)
                plt.savefig('figs/skewt/'+storm+str(dicc['Launch Time'].time())+'.png')
                #plt.show()
                plt.close()

            r,theta=cart_to_cylindr(mlon,mlat,track,dicc['Launch Time'])
            if not(np.isnan(r)) and not(np.isnan(theta)) and not(np.isnan(cape.magnitude)):
                xs.append(r*np.cos(theta))
                ys.append(r*np.sin(theta))
                capes.append(cape.magnitude)
                cins.append(cin)


            cs=ax.scatter(xs,ys,c=np.asarray(capes),cmap='jet')
            for i,xi in enumerate(xs):
                ax.text(xi,ys[i]+10,str(np.around(capes[i],1)))
        plt.colorbar(cs)
        ax.scatter(0,0,marker='v',s=100,color='black')
        ax.grid()
        ax.set_xlabel('X distance [km]')
        ax.set_ylabel('Y distance [km]')
        ax.set_title('CAPE distribution for '+storm+' on '+period,fontsize=14)
        distfig.savefig('figs/cape'+storm+period+'.png')
        if show:
            plt.show()
Beispiel #15
0
def test_scalar_dewpoint_rh():
    """Test dewpoint_rh with scalar values."""
    td = dewpoint_rh(10.6 * units.degC, 0.37)
    assert_almost_equal(td, 26. * units.degF, 0)
def Time_Crossection_rh_uv_theta_e(
        initTime=None,
        model='ECMWF',
        points={
            'lon': [116.3833],
            'lat': [39.9]
        },
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200],
        t_gap=3,
        t_range=[0, 48],
        output_dir=None):

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    # 读数据

    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl='')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    if (initTime == None):
        initTime = get_latest_initTime(data_dir[0][0:-1] + "850")
    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    TMP_4D = get_model_3D_grids(directory=data_dir[0][0:-1],
                                filenames=filenames,
                                levels=levels,
                                allExists=False)
    TMP_2D = TMP_4D.interp(lon=('points', points['lon']),
                           lat=('points', points['lat']))

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    u_4D = get_model_3D_grids(directory=data_dir[1][0:-1],
                              filenames=filenames,
                              levels=levels,
                              allExists=False)
    u_2D = u_4D.interp(lon=('points', points['lon']),
                       lat=('points', points['lat']))

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    v_4D = get_model_3D_grids(directory=data_dir[2][0:-1],
                              filenames=filenames,
                              levels=levels,
                              allExists=False)
    v_2D = v_4D.interp(lon=('points', points['lon']),
                       lat=('points', points['lat']))

    filenames = [initTime + '.' + str(fhour).zfill(3) for fhour in fhours]
    rh_4D = get_model_3D_grids(directory=data_dir[3][0:-1],
                               filenames=filenames,
                               levels=levels,
                               allExists=False)
    rh_2D = rh_4D.interp(lon=('points', points['lon']),
                         lat=('points', points['lat']))
    rh_2D.attrs['model'] = model
    rh_2D.attrs['points'] = points
    Td_2D = mpcalc.dewpoint_rh(TMP_2D['data'].values * units.celsius,
                               rh_2D['data'].values * units.percent)

    rh, pressure = xr.broadcast(rh_2D['data'], rh_2D['level'])

    Theta_e = mpcalc.equivalent_potential_temperature(
        pressure, TMP_2D['data'].values * units.celsius, Td_2D)

    theta_e_2D = xr.DataArray(np.array(Theta_e),
                              coords=rh_2D['data'].coords,
                              dims=rh_2D['data'].dims,
                              attrs={'units': Theta_e.units})

    crossection_graphics.draw_Time_Crossection_rh_uv_theta_e(
        rh_2D=rh_2D,
        u_2D=u_2D,
        v_2D=v_2D,
        theta_e_2D=theta_e_2D,
        t_range=t_range,
        output_dir=output_dir)
Beispiel #17
0
def Station_Snow_Synthetical_Forecast_From_Cassandra(
        model='ECMWF',
        output_dir=None,
        t_range=[0,84],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9]},
        initTime=None,
        draw_VIS=True,drw_thr=False,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' '}
            ):

    #+get all the directories needed
    try:
        dir_rqd=[ 
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_3_HOURS/",
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_6_HOURS/",
                "ECMWF_HR/SNOD/",
                "ECMWF_HR/SDEN/",
                "ECMWF_HR/UGRD_100M/",
                "ECMWF_HR/VGRD_100M/",
                "NWFD_SCMOC/VIS/",
                "NCEP_GFS_HR/SNOD/",
                "ECMWF_HR/SNOW06/",
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='T2m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='u10m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='v10m'),
                'ECMWF_ENSEMBLE/RAW/SNOW06/'
                ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    try:
        dir_opt=[ 
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='Td2m')
                ]
        name_opt=['Td2m']
    except:
        dir_opt=[
                utl.Cassandra_dir(data_type='surface',data_source=model,var_name='rh2m')
                ]
        name_opt=['rh2m']
          
    #+get all the directories needed

    if(initTime == None):
        last_file={model:get_latest_initTime(dir_rqd[0]),
                    'SCMOC':get_latest_initTime(dir_rqd[6]),
                    }
    else:
        last_file={model:initTime[0],
                    'SCMOC':initTime[1],
                    }        

    y_s={model:int('20'+last_file[model][0:2]),
        'SCMOC':int('20'+last_file['SCMOC'][0:2])}
    m_s={model:int(last_file[model][2:4]),
        'SCMOC':int(last_file['SCMOC'][2:4])}
    d_s={model:int(last_file[model][4:6]),
        'SCMOC':int(last_file['SCMOC'][4:6])}
    h_s={model:int(last_file[model][6:8]),
        'SCMOC':int(last_file['SCMOC'][6:8])}

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    for ifhour in fhours:
        if (ifhour == fhours[0] ):
            time_all=datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour))
        else:
            time_all=np.append(time_all,datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour)))            

    filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
    t2m=utl.get_model_points_gy(dir_rqd[9], filenames, points,allExists=False)
    
    if(name_opt[0] == 'rh2m'):
        rh2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)
        Td2m=mpcalc.dewpoint_rh(t2m['data'].values*units('degC'),rh2m['data'].values/100.)
        p_vapor=(rh2m['data'].values/100.)*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))

    if(name_opt[0] == 'Td2m'):
        Td2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)        
        rh2m=mpcalc.relative_humidity_from_dewpoint(t2m['data'].values* units('degC'),
                Td2m['data'].values* units('degC'))
        p_vapor=(np.array(rh2m))*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))
        Td2m=np.array(Td2m['data'].values)* units('degC')

    #SN06_ensm=utl.get_model_points_gy(dir_rqd[12], filenames, points,allExists=False)
    '''
    for i in range(0,len(SN06_ensm['forecast_period'])):
        SN06_std=np.std(np.squeeze(SN06_ensm['data'].values[i,:]))
        SN06_mean=np.mean(np.squeeze(SN06_ensm['data'].values[i,:]))
        if(i == 0):
            SN06_01=norm.pdf(0.01, SN06_mean, SN06_std)
            SN06_10=norm.pdf(0.1, SN06_mean, SN06_std)
            SN06_25=norm.pdf(0.25, SN06_mean, SN06_std)
            SN06_50=norm.pdf(0.5, SN06_mean, SN06_std)
            SN06_75=norm.pdf(0.75, SN06_mean, SN06_std)
            SN06_90=norm.pdf(0.9, SN06_mean, SN06_std)
            SN06_99=norm.pdf(0.99, SN06_mean, SN06_std)
        if(i > 0):
            SN06_01=[SN06_01,norm.pdf(0.01, SN06_mean, SN06_std)]
            SN06_10=[SN06_10,norm.pdf(0.1, SN06_mean, SN06_std)]
            SN06_25=[SN06_25,norm.pdf(0.25, SN06_mean, SN06_std)]
            SN06_50=[SN06_50,norm.pdf(0.5, SN06_mean, SN06_std)]
            SN06_75=[SN06_75,norm.pdf(0.75, SN06_mean, SN06_std)]
            SN06_90=[SN06_90,norm.pdf(0.9, SN06_mean, SN06_std)]
            SN06_99=[SN06_99,norm.pdf(0.99, SN06_mean, SN06_std)]

    SN06_ensm_stc={            
        'SN06_01'=SN06_01
        'SN06_10'=SN06_10
        'SN06_25'=SN06_25
        'SN06_50'=SN06_50
        'SN06_75'=SN06_75
        'SN06_90'=SN06_90
        'SN06_99'=SN06_99
        }
    '''
    u10m=utl.get_model_points_gy(dir_rqd[10], filenames, points,allExists=False)
    v10m=utl.get_model_points_gy(dir_rqd[11], filenames, points,allExists=False)
    wsp10m=(u10m['data']**2+v10m['data']**2)**0.5
    AT=1.07*t2m['data'].values+0.2*p_vapor-0.65*wsp10m-2.7
    #https://en.wikipedia.org/wiki/Wind_chill
    TWC=13.12+0.6215*t2m['data'].values-11.37*(wsp10m**0.16)+0.3965*t2m['data'].values*(wsp10m**0.16)

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [last_file['SCMOC']+'.'+str(fhour).zfill(3) for fhour in fhours]
    VIS=utl.get_model_points_gy(dir_rqd[6], filenames, points,allExists=False,fill_null=True,Null_value=-0.001)     

    if(last_file['SCMOC'] == last_file[model] and t_range[1] > 72):
        fhours = np.append(np.arange(3,72,3),np.arange(72, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]            

    if(last_file['SCMOC'] != last_file[model] and t_range[1] > 60):
        fhours = np.append(np.arange(3,60,3),np.arange(60, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] != last_file[model] and t_range[1] <= 60):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] == last_file[model] and t_range[1] <= 72):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    SNOD1=utl.get_model_points_gy(dir_rqd[2], filenames2, points,allExists=False)
    SNOD2=utl.get_model_points_gy(dir_rqd[7], filenames2, points,allExists=False)
    SDEN=utl.get_model_points_gy(dir_rqd[3], filenames2, points,allExists=False)
    SN06=utl.get_model_points_gy(dir_rqd[8], filenames2, points,allExists=False)
    u100m=utl.get_model_points_gy(dir_rqd[4], filenames2, points,allExists=False)
    v100m=utl.get_model_points_gy(dir_rqd[5], filenames2, points,allExists=False)
    wsp100m=(u100m['data']**2+v100m['data']**2)**0.5

    if(fhours[-1] < 120):
        gust10m=utl.get_model_points_gy(dir_rqd[0], filenames, points,allExists=False)
    if(fhours[-1] > 120):
        if(last_file['SCMOC'] == last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        if(last_file['SCMOC'] != last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        gust10m=utl.get_model_points_gy(dir_rqd[1], filenames, points,allExists=False)        
        
    sta_graphics.draw_Station_Snow_Synthetical_Forecast_From_Cassandra(
            TWC=TWC,AT=AT,u10m=u10m,v10m=v10m,u100m=u100m,v100m=v100m,
            gust10m=gust10m,wsp10m=wsp10m,wsp100m=wsp100m,SNOD1=SNOD1,SNOD2=SNOD2,SDEN=SDEN,SN06=SN06,
            draw_VIS=draw_VIS,VIS=VIS,drw_thr=drw_thr,
            time_all=time_all,
            model=model,points=points,
            output_dir=output_dir,extra_info=extra_info)
Beispiel #18
0
relh[relh < -100.] = np.nan
tair[tair < -100.] = np.nan
wspd[wspd < -100.] = np.nan
wdir[wdir < -100.] = np.nan
wmax[wmax < -100.] = np.nan
rain[rain < -100.] = np.nan
pres[pres < -100.] = np.nan
srad[srad < -100.] = np.nan
ta9m[ta9m < -100.] = np.nan
ws2m[ws2m < -100.] = np.nan
skin[skin < -100.] = np.nan

# Convert RH to Td - already read in as degC and m/s
tair = tair * units.degC
wspd = wspd * units.m / units.s
td = mcalc.dewpoint_rh(tair[:inow+1], relh[:inow+1] / 100.)

if args.SI:
    # already in SI, just grab magnitudes
    print('--Using SI Units--')
    tair = tair.magnitude
    td = td.magnitude
    wspd = wspd.magnitude
    tlab = '$^\circ$C'
    wslab = 'm s$^{-1}$'
    tlab2 = 'degC'
    wslab2 = 'm/s'

else:
    print('--Converting to Imperial Units--')
    tair = tair.to(units.degF).magnitude
Beispiel #19
0
def test_warning_dewpoint_rh():
    """Test that warning is raised for >120% RH."""
    with pytest.warns(UserWarning):
        dewpoint_rh(10.6 * units.degC, 50)
# 500 hPa CVA
dx, dy = mpcalc.lat_lon_grid_deltas(lon, lat)
vort_adv_500 = mpcalc.advection(
    avor_500, [u_500.to('m/s'), v_500.to('m/s')],
    (dx, dy), dim_order='yx') * 1e9
vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4)

####################################
# For the jet axes, we will calculate the windspeed at each level, and plot the highest values
wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5)
wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5)
wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5)

################################
# 700-hPa dewpoint depression will be calculated from Temperature_isobaric and RH
Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.)

######################################
# 12-hr surface pressure falls and 500-hPa height changes
pmsl_change = pmsl - pmsl_00z
hgt_500_change = hgt_500 - hgt_500_00z

######################################
# To plot the jet axes, we will mask the wind fields below the upper 1/3 of windspeed.

mask_500 = ma.masked_less_equal(wspd_500, 0.66 * np.max(wspd_500)).mask
u_500[mask_500] = np.nan
v_500[mask_500] = np.nan

# 300 hPa
mask_300 = ma.masked_less_equal(wspd_300, 0.66 * np.max(wspd_300)).mask
Beispiel #21
0
def read_process_write(filelist, storm):
    """
        ** Name of function says all. **

        *Parameters*

        filelist: `list`
        	Name of storm.
        track: `dict`
        	Dictionary with track. Output of :meth:`flightdata.trackandspeed`.
        storm: `string`
            Name of storm.

        .. note::

            This function makes use of several functions from :meth:`toolbox`, including: :meth:`toolbox.distance`
            It is also important to mention that this function makes use of the programming language Julia by interacting with the operating system and running the Julia script.




        *Returns*
        	temp_axisym.txt:`file` written file with Output fields.

        """
    #Sort filelist.
    filelist = np.sort(filelist)
    print(filelist)
    # Get sampling periods (this will be a dictionary). See the toolbox
    print('Retrieving sampling periods')
    sampleperiods = getsamplingperiods(filelist, 2.7)
    sampleperiods = {
        datetime.datetime(1998, 9, 19, 16, 33, 41):
        datetime.datetime(1998, 9, 19, 21, 0, 0),
        datetime.datetime(1998, 9, 19, 21, 0, 0):
        datetime.datetime(1998, 9, 20, 5, 3, 0)
    }
    omega = 7.2921 * (10**-5)
    # Iterate over all sampling periods.
    for sampindex, periodskey in enumerate(sampleperiods):

        #Allocate starting (stdt) and ending date (endt). Remeber dt is the convetional short-name for date.
        stdt = periodskey
        endt = sampleperiods[periodskey]

        # Define sampling period string
        if stdt.hour >= 10:
            hh = str(stdt.hour)
        else:
            hh = '0' + str(stdt.hour)
        if stdt.day >= 10:
            dd = str(stdt.day)
        else:
            dd = '0' + str(stdt.day)
        if endt.hour < 10:
            fhh = '0' + str(endt.hour)
        else:
            fhh = str(endt.hour)
        if endt.day < 10:
            fdd = '0' + str(endt.day)
        else:
            fdd = str(endt.day)
        period = dd + hh + '-' + fdd + fhh
        print(period)
        # possible user print
        print(stdt, endt)

        # Create new-empty lists.
        lats = []
        lons = []

        # Remove outputfile
        os.system('rm temp_axisym.txt')
        dropsincore = 0
        print('start filelist loop')
        # Iterate over all files.
        for filename in filelist:

            # Select end-name of file by inspecting filename string. Notice how filename can change how file is read.
            if 'radazm' in filename.split('/')[-1] or 'eol' in filename.split(
                    '/')[-1]:
                end = 'radazm'
            else:
                end = 'avp'
            # Obtain properties of file, i.e., launch time and location into a dictionary (dicc).
            dicc = findproperties(filename, end)

            # Condition to see if current file is in sampling period.
            # Notice how if structure is constructed, condition finds times outside of sampling period and
            # if found outside the sampling period, continue to next file.
            if dicc['Launch Time'] < stdt or dicc['Launch Time'] > endt:
                continue

            # Allocate reading parameters.
            if end == 'avp':
                # This section (not used unless indicated by user) is made for the raw datasets or dropsondes downloaded individually.
                # Read format
                head = 6
                foot = 19
                # Read of file and allocate into numpy array. with excemption syntax.
                try:
                    nump = np.genfromtxt(filename,
                                         skip_header=head,
                                         skip_footer=foot)
                except:
                    continue
                # Allocate longitude and latitude.
                lon = nump[:, 11]
                lat = nump[:, 12]
                # Allocate Temperature (T), Pressure (P), Height (H).
                T = nump[:, 6]
                P = nump[:, 5]
                H = nump[:, 13]
                #Allocate Relative humidity. (RH)
                RH = nump[:, 7]

                # Allocate wind magnitude.
                ur = nump[:, 9]
                # Process wind speed to eliminate false values.
                ur = cleanu(clean1(ur))

                # Allocate wind direction.
                udir = nump[:, 8]
                # Process wind direction.
                udir = clean1(udir)
                rfile = nump[:, 17]
                azifile = nump[:, 18]
                # Divide absolute wind speed and wind direction into u-v speeds.
                u = -ur * np.sin(np.pi * udir / 180)
                v = -ur * np.cos(np.pi * udir / 180)

                # Allocate vertical velocity.
                w = nump[:, 10]

                # Obtain year/month/day and hour/minute/second values.
                yymmdd = nump[:, 3]
                hhmmss = nump[:, 4]
                # Convert to single values, hours minutes and seconds.
                hours, minutes, seconds = timeconversion(hhmmss)

            # Radazm is the label used for the typical dropsonde files used by this computer project.
            # As such, more detailed is given for this part of the conditional argument.
            elif end == 'radazm':
                # File read parameters.
                head = 16
                foot = 0

                # Allocate filename fields into a numpy array (nump)
                nump = np.genfromtxt(filename,
                                     skip_header=head,
                                     skip_footer=foot)

                # Allocate Temperature (T), Pressure (P), Height (H).
                T = nump[:, 5]
                P = nump[:, 4]
                H = nump[:, 13]

                # Allocate time arrays.
                hours = nump[:, 1]
                minutes = nump[:, 2]
                seconds = nump[:, 3]

                # Allocate Relative Humidity (RH), u and v wind speeds.
                RH = nump[:, 7]
                u = nump[:, 8]
                v = nump[:, 9]

                # Get wind direction (not used) but skillful user might be interested.
                udir = nump[:, 11]

                # Obtain vertical windspeed.
                w = nump[:, -1]
                rfile = nump[:, 17]
                azifile = nump[:, 18]
                # Get longitude and latitude.
                lon = nump[:, 14]
                lat = nump[:, 15]

            # Clean longitude and latitude from possible nan values.
            lon = clean1(lon)
            lat = clean1(lat)
            # Obtain and round a mean location to proceed to condition to drop environmental dropsondes.
            # and keep inner-core measurements.
            mlon = np.nanmean(lon)
            mlat = np.nanmean(lat)
            lati = np.around(mlat, 4)
            longi = np.around(mlon, 4)

            # Obtain mean radius (r) and azimuth (theta)
            #                r,theta=cart_to_cylindr(mlon,mlat,track,dicc['Launch Time'])

            # Continue condition only if near-inner core dropsonde.
            #                if r>290:
            #                	continue
            dropsincore += 1
            # print filenames used.
            print(filename)

            # Append longitudes and latitudes to list, useful for quality control on this samplingperiod.
            lats.append(lati)
            lons.append(longi)

            # Clean arrays and save to arrays.
            v_speed = clean2(cleanu(clean1(v)))
            pressure = clean2(cleanu(clean1(P)))
            height = clean2(clean1(H))
            temperature = cleanu(clean2(clean1(T)))
            u_speed = clean2(cleanu(clean1(u)))
            w_speed = cleanu(clean2(clean1(w)))
            relhum = clean2(clean1(RH))
            #if end=='radazm':
            #        height=getHiso(pressure,temperature,height)
            # Estimate dewpoint using metpy.
            dewpoint = mpcalc.dewpoint_rh((temperature + 273) * units.kelvin,
                                          relhum / 100.)

            # Get equivalent_potential_temperature
            theta_e = equivalent_potential_temperature(
                pressure * units.hPa, (temperature + 273) * units.kelvin,
                dewpoint)

            # Retrieve potential temperature.
            pot_temp = potential_temperature(
                pressure * units.mbar, (temperature + 273) * units.kelvin)

            # Get storm velocity for this sounding.
            #ustorm,vstorm=stormu(u[1],v[1],dicc['Launch Time'],track[3])

            # Iterate over vectores, we use longitude vector for simplicity but all vectors (height, u_speed, etc) have the same length so for
            # syntax can be used with either vector.
            for j, longi in enumerate(lon):

                # Get closest date using time arrays. Define datetime object.
                date = datetime.datetime(dicc['Launch Time'].year,
                                         dicc['Launch Time'].month,
                                         dicc['Launch Time'].day,
                                         int(hours[j]), int(minutes[j]),
                                         int(seconds[j]))

                # Check for nan, if so continue, if observation cannot be located, then all points are useless.
                if np.isnan(longi) or np.isnan(lat[j]):
                    continue

                # Try and obtain radius and azimuth for current observation.
#                    try:
#k                    	r,theta=cart_to_cylindr(longi,lat[j],track,date)
#                   except:
#                      continue
                r = rfile[j]

                theta = radians(-azifile[j] + 90)
                #        print(azifile[j],theta,radians(azifile[j]-90))
                # Correct u_speed, make it storm relative.
                u_speed[j] = u_speed[j]  #-ustorm
                v_speed[j] = v_speed[j]  #-vstorm

                # Check for nans in fields. If nans, continue.
                if np.isnan(r) or np.isnan(theta) or np.isnan(
                        u[j]) or np.isnan(H[j]) or np.isnan(v[j]):
                    continue

                coriolis_f = (2 * omega * np.sin(radians(lat[j]))) * 10**(2)
                # After all checks, write file.
                #print('writing to file')

                # Open file to append.
                f = open('temp_axisym.txt', 'a')

                # Write all rounded fields to file. While the use of a dictionary and a for loop could synthetize the following line
                # this approach would use more lines and take more memory (big dictionaries are expensive).
                f.write(
                    str(np.around(r, 5)) + '\t' + str(np.around(theta, 5)) +
                    '\t' + str(np.around(u_speed[j], 3)) + '\t' +
                    str(np.around(v_speed[j], 3)) + '\t' +
                    str(np.around(w_speed[j], 4)) + '\t' +
                    str(np.around(height[j], 2)) + '\t' +
                    str(np.around(pot_temp[j].magnitude, 3)) + '\t' +
                    str(np.around(theta_e[j].magnitude, 3)) + '\t' +
                    str(np.around(pressure[j], 3)) + '\t' +
                    str(np.around(coriolis_f, 4)) + '\n')

                # Close file object (f).
                f.close()
        print('drops in core ' + str(dropsincore))
        print('end of filelist loop')

        # threshold of 6 good dropsondes in sampling period to proceed to call Julia.
        if len(lats) < 1:
            os.system('rm temp_axisym.txt')
            continue

        # Why use Julia? you might ask.
        # Why are you not using Julia? I would reply.
        print('Go to Julia @')
        print('julia urutheta.jl')
        os.system('julia urutheta.jl')
        os.system('cp tempjulia.txt outfiles/' + storm + period + '.txt')

        print('starting plotting sequence')
        # Call extra processing or plotting routines.
        #print('python3 3Dfields.py %s %s' % (storm,endt.time()))
        #            os.system('python3 gradientwind.py %s %s' % (storm,period))

        # Remove temporary file.
        os.system('rm temp_axisym.txt')
Beispiel #22
0
def Miller_Composite_Chart(initial_time=None,
                           fhour=24,
                           day_back=0,
                           model='GRAPES_GFS',
                           map_ratio=19 / 9,
                           zoom_ratio=20,
                           cntr_pnt=[102, 34],
                           Global=False,
                           south_China_sea=True,
                           area='全国',
                           city=False,
                           output_dir=None):

    # micaps data directory
    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl='700'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='300'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='300'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='500'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='500'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl='850'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl='850'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl='700'),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl='500'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='BLI'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='Td2m'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='PRMSL')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # get filename
    if (initial_time != None):
        filename = utl.model_filename(initial_time, fhour)
        filename2 = utl.model_filename(initial_time, fhour - 12)
    else:
        filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour)
        filename2 = utl.filename_day_back_model(day_back=day_back,
                                                fhour=fhour - 12)

    # retrieve data from micaps server
    rh_700 = get_model_grid(directory=data_dir[0], filename=filename)
    if rh_700 is None:
        return

    u_300 = get_model_grid(directory=data_dir[1], filename=filename)
    if u_300 is None:
        return

    v_300 = get_model_grid(directory=data_dir[2], filename=filename)
    if v_300 is None:
        return

    u_500 = get_model_grid(directory=data_dir[3], filename=filename)
    if u_500 is None:
        return

    v_500 = get_model_grid(directory=data_dir[4], filename=filename)
    if v_500 is None:
        return

    u_850 = get_model_grid(directory=data_dir[5], filename=filename)
    if u_850 is None:
        return

    v_850 = get_model_grid(directory=data_dir[6], filename=filename)
    if v_850 is None:
        return

    t_700 = get_model_grid(directory=data_dir[7], filename=filename)
    if t_700 is None:
        return

    hgt_500 = get_model_grid(directory=data_dir[8], filename=filename)
    if hgt_500 is None:
        return

    hgt_500_2 = get_model_grid(directory=data_dir[8], filename=filename2)
    if hgt_500_2 is None:
        return

    BLI = get_model_grid(directory=data_dir[9], filename=filename)
    if BLI is None:
        return

    Td2m = get_model_grid(directory=data_dir[10], filename=filename)
    if Td2m is None:
        return

    PRMSL = get_model_grid(directory=data_dir[11], filename=filename)
    if PRMSL is None:
        return

    PRMSL2 = get_model_grid(directory=data_dir[11], filename=filename2)
    if PRMSL2 is None:
        return

    lats = np.squeeze(rh_700['lat'].values)
    lons = np.squeeze(rh_700['lon'].values)
    x, y = np.meshgrid(rh_700['lon'], rh_700['lat'])

    tmp_700 = t_700['data'].values.squeeze() * units('degC')
    u_300 = (u_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_300 = (v_300['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_500 = (u_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_500 = (v_500['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    u_850 = (u_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    v_850 = (v_850['data'].values.squeeze() * units.meter /
             units.second).to('kt')
    hgt_500 = (hgt_500['data'].values.squeeze()) * 10 / 9.8 * units.meter
    rh_700 = rh_700['data'].values.squeeze()
    lifted_index = BLI['data'].values.squeeze() * units.kelvin
    Td_sfc = Td2m['data'].values.squeeze() * units('degC')
    dx, dy = mpcalc.lat_lon_grid_deltas(lons, lats)

    avor_500 = mpcalc.absolute_vorticity(u_500, v_500, dx, dy,
                                         y * units.degree)
    pmsl = PRMSL['data'].values.squeeze() * units('hPa')

    hgt_500_2 = (hgt_500_2['data'].values.squeeze()) * 10 / 9.8 * units.meter
    pmsl2 = PRMSL2['data'].values.squeeze() * units('hPa')

    # 500 hPa CVA
    vort_adv_500 = mpcalc.advection(
        avor_500, [u_500.to('m/s'), v_500.to('m/s')],
        (dx, dy), dim_order='yx') * 1e9
    vort_adv_500_smooth = gaussian_filter(vort_adv_500, 4)

    wspd_300 = gaussian_filter(mpcalc.wind_speed(u_300, v_300), 5)
    wspd_500 = gaussian_filter(mpcalc.wind_speed(u_500, v_500), 5)
    wspd_850 = gaussian_filter(mpcalc.wind_speed(u_850, v_850), 5)

    Td_dep_700 = tmp_700 - mpcalc.dewpoint_rh(tmp_700, rh_700 / 100.)

    pmsl_change = pmsl - pmsl2
    hgt_500_change = hgt_500 - hgt_500_2

    mask_500 = ma.masked_less_equal(wspd_500, 0.66 * np.max(wspd_500)).mask
    u_500[mask_500] = np.nan
    v_500[mask_500] = np.nan

    # 300 hPa
    mask_300 = ma.masked_less_equal(wspd_300, 0.66 * np.max(wspd_300)).mask
    u_300[mask_300] = np.nan
    v_300[mask_300] = np.nan

    # 850 hPa
    mask_850 = ma.masked_less_equal(wspd_850, 0.66 * np.max(wspd_850)).mask
    u_850[mask_850] = np.nan
    v_850[mask_850] = np.nan

    # prepare data
    if (area != None):
        cntr_pnt, zoom_ratio = utl.get_map_area(area_name=area)

    map_extent = [0, 0, 0, 0]
    map_extent[0] = cntr_pnt[0] - zoom_ratio * 1 * map_ratio
    map_extent[1] = cntr_pnt[0] + zoom_ratio * 1 * map_ratio
    map_extent[2] = cntr_pnt[1] - zoom_ratio * 1
    map_extent[3] = cntr_pnt[1] + zoom_ratio * 1

    delt_x = (map_extent[1] - map_extent[0]) * 0.2
    delt_y = (map_extent[3] - map_extent[2]) * 0.1

    #+ to solve the problem of labels on all the contours
    idx_x1 = np.where((lons > map_extent[0] - delt_x)
                      & (lons < map_extent[1] + delt_x))
    idx_y1 = np.where((lats > map_extent[2] - delt_y)
                      & (lats < map_extent[3] + delt_y))

    fcst_info = {
        'lon': lons,
        'lat': lats,
        'fhour': fhour,
        'model': model,
        'init_time': t_700.coords['forecast_reference_time'].values
    }

    synthetical_graphics.draw_Miller_Composite_Chart(
        fcst_info=fcst_info,
        u_300=u_300,
        v_300=v_300,
        u_500=u_500,
        v_500=v_500,
        u_850=u_850,
        v_850=v_850,
        pmsl_change=pmsl_change,
        hgt_500_change=hgt_500_change,
        Td_dep_700=Td_dep_700,
        Td_sfc=Td_sfc,
        pmsl=pmsl,
        lifted_index=lifted_index,
        vort_adv_500_smooth=vort_adv_500_smooth,
        map_extent=map_extent,
        add_china=True,
        city=False,
        south_China_sea=True,
        output_dir=None,
        Global=False)
)

# In[6]:

# read in the datasets and prepare dataset for input to methods
# such as calculating dewpoint and converting units
ds = xr.open_mfdataset(
    ['air.sig995.2018.nc', 'rhum.sig995.2018.nc', 'slp.2018.nc']).rename({
        'air':
        'tmp',
        'rhum':
        'rlh'
    }).load()

ds['dpt'] = (['time', 'lat', 'lon'],
             dewpoint_rh(ds['tmp'].values * units('degK'),
                         ds['rlh'].values / 100).m)
ds['tmp'].data = (ds['tmp'].values * units('degK')).to('degC').m
ds['slp'].data = (ds['slp'].values * units('Pa')).to('hPa').m

# In[7]:

# subset to every 10 degrees latitudinally and longitudinally and
# every 10th and 20th day of each month of the year, 4 times a day,
# because it literally takes hours to calculate the wet bulb temperature
# on a gridded dataset with Normand's method (built-in loop / trial and error)
ds_sub = ds.sel(time=ds['time.day'].isin([10, 20]),
                lat=ds['lat'][::4],
                lon=ds['lon'][::4])

# In[ ]:
Beispiel #24
0
        # Append longitudes and latitudes to list, useful for quality control on this samplingperiod.
        lats.append(lati)
        lons.append(longi)

        # Clean arrays and save to arrays.
        v_speed = clean2(cleanu(clean1(v)))
        pressure = clean2(cleanu(clean1(P)))
        height = clean2(clean1(H))
        temperature = cleanu(clean2(clean1(T)))
        u_speed = clean2(cleanu(clean1(u)))
        w_speed = cleanu(clean2(clean1(w)))
        relhum = clean2(clean1(RH))
        #if end=='radazm':
        #        height=getHiso(pressure,temperature,height)
        # Estimate dewpoint using metpy.
        dewpoint = mpcalc.dewpoint_rh((temperature + 273) * units.kelvin,
                                      relhum / 100.)

        # Get equivalent_potential_temperature
        theta_e = equivalent_potential_temperature(
            pressure * units.hPa, (temperature + 273) * units.kelvin, dewpoint)

        # Retrieve potential temperature.
        pot_temp = potential_temperature(pressure * units.mbar,
                                         (temperature + 273) * units.kelvin)

        # Get storm velocity for this sounding.
        ustorm, vstorm = stormu(u[1], v[1], dicc['Launch Time'], track[3])

        # Iterate over vectores, we use longitude vector for simplicity but all vectors (height, u_speed, etc) have the same length so for
        # syntax can be used with either vector.
        for j, longi in enumerate(lon):
Beispiel #25
0
def test_scalar_dewpoint_rh():
    """Test dewpoint_rh with scalar values."""
    td = dewpoint_rh(10.6 * units.degC, 0.37)
    assert_almost_equal(td, 26. * units.degF, 0)
Beispiel #26
0
temp = testdata['T']
pres = testdata['P']
rh = testdata['RH']
ws = testdata['WS']
wsmax = testdata['WSMAX']
wd = testdata['WD']
date = testdata['DATE']

# ID For Plotting on Meteogram
probe_id = '0102A'

data = dict()
data['wind_speed'] = (np.array(ws) * units('m/s')).to(units('knots'))
data['wind_speed_max'] = (np.array(wsmax) * units('m/s')).to(units('knots'))
data['wind_direction'] = np.array(wd) * units('degrees')
data['dewpoint'] = dewpoint_rh((np.array(temp) * units('degC')).to(units('K')),
                               np.array(rh) / 100.).to(units('degF'))
data['air_temperature'] = (np.array(temp) * units('degC')).to(units('degF'))
data['mean_slp'] = calc_mslp(np.array(temp), np.array(pres), hgt_example) * units('hPa')
data['relative_humidity'] = np.array(rh)
data['times'] = np.array(date)

fig = plt.figure(figsize=(20, 16))
meteogram = Meteogram(fig, data['times'], probe_id)
meteogram.plot_winds(data['wind_speed'], data['wind_direction'], data['wind_speed_max'])
meteogram.plot_thermo(data['air_temperature'], data['dewpoint'])
meteogram.plot_rh(data['relative_humidity'])
meteogram.plot_pressure(data['mean_slp'])
fig.subplots_adjust(hspace=0.5)
plt.show()
def Crosssection_Wind_Theta_e_Qv(
        initial_time=None,
        fhour=24,
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200],
        day_back=0,
        model='ECMWF',
        output_dir=None,
        st_point=[20, 120.0],
        ed_point=[50, 130.0],
        map_extent=[70, 140, 15, 55],
        h_pos=[0.125, 0.665, 0.25, 0.2]):

    # micaps data directory
    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl='500')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # get filename
    if (initial_time != None):
        filename = utl.model_filename(initial_time, fhour)
    else:
        filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour)

    # retrieve data from micaps server
    rh = get_model_3D_grid(directory=data_dir[0][0:-1],
                           filename=filename,
                           levels=levels,
                           allExists=False)
    if rh is None:
        return
    rh = rh.metpy.parse_cf().squeeze()

    u = get_model_3D_grid(directory=data_dir[1][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if u is None:
        return
    u = u.metpy.parse_cf().squeeze()

    v = get_model_3D_grid(directory=data_dir[2][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if v is None:
        return
    v = v.metpy.parse_cf().squeeze()

    v2 = get_model_3D_grid(directory=data_dir[2][0:-1],
                           filename=filename,
                           levels=levels,
                           allExists=False)
    if v2 is None:
        return
    v2 = v2.metpy.parse_cf().squeeze()

    t = get_model_3D_grid(directory=data_dir[3][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if t is None:
        return
    t = t.metpy.parse_cf().squeeze()

    gh = get_model_grid(data_dir[4], filename=filename)
    if t is None:
        return

    resolution = u['lon'][1] - u['lon'][0]
    x, y = np.meshgrid(u['lon'], u['lat'])

    dx, dy = mpcalc.lat_lon_grid_deltas(u['lon'], u['lat'])
    for ilvl in levels:
        u2d = u.sel(level=ilvl)
        #u2d['data'].attrs['units']=units.meter/units.second
        v2d = v.sel(level=ilvl)
        #v2d['data'].attrs['units']=units.meter/units.second

        absv2d = mpcalc.absolute_vorticity(
            u2d['data'].values * units.meter / units.second,
            v2d['data'].values * units.meter / units.second, dx, dy,
            y * units.degree)

        if (ilvl == levels[0]):
            absv3d = v2
            absv3d['data'].loc[dict(level=ilvl)] = np.array(absv2d)
        else:
            absv3d['data'].loc[dict(level=ilvl)] = np.array(absv2d)
    absv3d['data'].attrs['units'] = absv2d.units

    #rh=rh.rename(dict(lat='latitude',lon='longitude'))
    cross = cross_section(rh, st_point, ed_point)
    cross_rh = cross.set_coords(('lat', 'lon'))
    cross = cross_section(u, st_point, ed_point)
    cross_u = cross.set_coords(('lat', 'lon'))
    cross = cross_section(v, st_point, ed_point)
    cross_v = cross.set_coords(('lat', 'lon'))

    cross_u['data'].attrs['units'] = units.meter / units.second
    cross_v['data'].attrs['units'] = units.meter / units.second
    cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components(
        cross_u['data'], cross_v['data'])

    cross = cross_section(t, st_point, ed_point)
    cross_t = cross.set_coords(('lat', 'lon'))
    cross = cross_section(absv3d, st_point, ed_point)

    cross_Td = mpcalc.dewpoint_rh(cross_t['data'].values * units.celsius,
                                  cross_rh['data'].values * units.percent)

    rh, pressure = xr.broadcast(cross_rh['data'], cross_t['level'])

    Qv = mpcalc.specific_humidity_from_dewpoint(cross_Td, pressure)

    cross_Qv = xr.DataArray(np.array(Qv) * 1000.,
                            coords=cross_rh['data'].coords,
                            dims=cross_rh['data'].dims,
                            attrs={'units': units('g/kg')})

    Theta_e = mpcalc.equivalent_potential_temperature(
        pressure, cross_t['data'].values * units.celsius, cross_Td)

    cross_Theta_e = xr.DataArray(np.array(Theta_e),
                                 coords=cross_rh['data'].coords,
                                 dims=cross_rh['data'].dims,
                                 attrs={'units': Theta_e.units})

    crossection_graphics.draw_Crosssection_Wind_Theta_e_Qv(
        cross_Qv=cross_Qv,
        cross_Theta_e=cross_Theta_e,
        cross_u=cross_u,
        cross_v=cross_v,
        gh=gh,
        h_pos=h_pos,
        st_point=st_point,
        ed_point=ed_point,
        levels=levels,
        map_extent=map_extent,
        output_dir=output_dir)
Beispiel #28
0
# Temporary variables for ease
temp = testdata['T']
pres = testdata['P']
rh = testdata['RH']
ws = testdata['WS']
wsmax = testdata['WSMAX']
wd = testdata['WD']
date = testdata['DATE']

# ID For Plotting on Meteogram
probe_id = '0102A'

data = {'wind_speed': (np.array(ws) * units('m/s')).to(units('knots')),
        'wind_speed_max': (np.array(wsmax) * units('m/s')).to(units('knots')),
        'wind_direction': np.array(wd) * units('degrees'),
        'dewpoint': dewpoint_rh((np.array(temp) * units('degC')).to(units('K')),
                                np.array(rh) / 100.).to(units('degF')),
        'air_temperature': (np.array(temp) * units('degC')).to(units('degF')),
        'mean_slp': calc_mslp(np.array(temp), np.array(pres), hgt_example) * units('hPa'),
        'relative_humidity': np.array(rh), 'times': np.array(date)}

fig = plt.figure(figsize=(20, 16))
add_metpy_logo(fig, 250, 180)
meteogram = Meteogram(fig, data['times'], probe_id)
meteogram.plot_winds(data['wind_speed'], data['wind_direction'], data['wind_speed_max'])
meteogram.plot_thermo(data['air_temperature'], data['dewpoint'])
meteogram.plot_rh(data['relative_humidity'])
meteogram.plot_pressure(data['mean_slp'])
fig.subplots_adjust(hspace=0.5)
plt.show()
def Crosssection_Wind_Temp_RH(
        initial_time=None,
        fhour=24,
        levels=[1000, 950, 925, 900, 850, 800, 700, 600, 500, 400, 300, 200],
        day_back=0,
        model='ECMWF',
        output_dir=None,
        st_point=[43.5, 111.5],
        ed_point=[33, 125.0],
        map_extent=[70, 140, 15, 55],
        h_pos=[0.125, 0.665, 0.25, 0.2]):

    # micaps data directory
    try:
        data_dir = [
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='RH',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='UGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='VGRD',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='TMP',
                              lvl=''),
            utl.Cassandra_dir(data_type='high',
                              data_source=model,
                              var_name='HGT',
                              lvl='500'),
            utl.Cassandra_dir(data_type='surface',
                              data_source=model,
                              var_name='PSFC')
        ]
    except KeyError:
        raise ValueError('Can not find all directories needed')

    # get filename
    if (initial_time != None):
        filename = utl.model_filename(initial_time, fhour)
    else:
        filename = utl.filename_day_back_model(day_back=day_back, fhour=fhour)

    # retrieve data from micaps server
    rh = get_model_3D_grid(directory=data_dir[0][0:-1],
                           filename=filename,
                           levels=levels,
                           allExists=False)
    if rh is None:
        return
    rh = rh.metpy.parse_cf().squeeze()

    u = get_model_3D_grid(directory=data_dir[1][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if u is None:
        return
    u = u.metpy.parse_cf().squeeze()

    v = get_model_3D_grid(directory=data_dir[2][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if v is None:
        return
    v = v.metpy.parse_cf().squeeze()

    v2 = get_model_3D_grid(directory=data_dir[2][0:-1],
                           filename=filename,
                           levels=levels,
                           allExists=False)
    if v2 is None:
        return
    v2 = v2.metpy.parse_cf().squeeze()

    t = get_model_3D_grid(directory=data_dir[3][0:-1],
                          filename=filename,
                          levels=levels,
                          allExists=False)
    if t is None:
        return
    t = t.metpy.parse_cf().squeeze()

    gh = get_model_grid(data_dir[4], filename=filename)

    psfc = get_model_grid(data_dir[5], filename=filename)
    psfc = psfc.metpy.parse_cf().squeeze()

    mask1 = ((psfc['lon'] >= t['lon'].values.min()) &
             (psfc['lon'] <= t['lon'].values.max()) &
             (psfc['lat'] >= t['lat'].values.min()) &
             (psfc['lat'] <= t['lat'].values.max()))

    t2, psfc_bdcst = xr.broadcast(t['data'], psfc['data'].where(mask1,
                                                                drop=True))
    mask2 = (psfc_bdcst > -10000)
    psfc_bdcst = psfc_bdcst.where(mask2, drop=True)
    #psfc_bdcst=psfc_bdcst.metpy.parse_cf().squeeze()
    if t is None:
        return

    resolution = u['lon'][1] - u['lon'][0]
    x, y = np.meshgrid(u['lon'], u['lat'])

    dx, dy = mpcalc.lat_lon_grid_deltas(u['lon'], u['lat'])

    #rh=rh.rename(dict(lat='latitude',lon='longitude'))
    cross = cross_section(rh, st_point, ed_point)
    cross_rh = cross.set_coords(('lat', 'lon'))
    cross = cross_section(u, st_point, ed_point)
    cross_u = cross.set_coords(('lat', 'lon'))
    cross = cross_section(v, st_point, ed_point)
    cross_v = cross.set_coords(('lat', 'lon'))

    cross_psfc = cross_section(psfc_bdcst, st_point, ed_point)
    #cross_psfc=cross.set_coords(('lat', 'lon'))

    cross_u['data'].attrs['units'] = units.meter / units.second
    cross_v['data'].attrs['units'] = units.meter / units.second
    cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components(
        cross_u['data'], cross_v['data'])

    cross = cross_section(t, st_point, ed_point)
    cross_Temp = cross.set_coords(('lat', 'lon'))

    cross_Td = mpcalc.dewpoint_rh(cross_Temp['data'].values * units.celsius,
                                  cross_rh['data'].values * units.percent)

    rh, pressure = xr.broadcast(cross_rh['data'], cross_Temp['level'])
    cross_terrain = pressure - cross_psfc

    crossection_graphics.draw_Crosssection_Wind_Temp_RH(
        cross_rh=cross_rh,
        cross_Temp=cross_Temp,
        cross_u=cross_u,
        cross_v=cross_v,
        cross_terrain=cross_terrain,
        gh=gh,
        h_pos=h_pos,
        st_point=st_point,
        ed_point=ed_point,
        levels=levels,
        map_extent=map_extent,
        model=model,
        output_dir=output_dir)
Beispiel #30
0
def Crosssection_Wind_Temp_RH(
    initTime=None, fhour=24,
    levels=[1000, 950, 925, 900, 850, 800, 700,600,500,400,300,200],
    day_back=0,model='ECMWF',data_source='MICAPS',
    output_dir=None,
    st_point = [43.5, 111.5],
    ed_point = [33, 125.0],
    map_extent=[70,140,15,55],
    lw_ratio=[16,9],
    h_pos=[0.125, 0.665, 0.25, 0.2],**kwargs ):

    if(data_source == 'MICAPS'):
        try:
            data_dir = [utl.Cassandra_dir(data_type='high',data_source=model,var_name='RH',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='UGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='VGRD',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='TMP',lvl=''),
                        utl.Cassandra_dir(data_type='high',data_source=model,var_name='HGT',lvl='500'),
                        utl.Cassandra_dir(data_type='surface',data_source=model,var_name='PSFC')]
        except KeyError:
            raise ValueError('Can not find all directories needed')

        if(initTime != None):
            filename = utl.model_filename(initTime, fhour)
        else:
            filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour)
            
        rh=get_model_3D_grid(directory=data_dir[0][0:-1],filename=filename,levels=levels, allExists=False)
        u=get_model_3D_grid(directory=data_dir[1][0:-1],filename=filename,levels=levels, allExists=False)
        v=get_model_3D_grid(directory=data_dir[2][0:-1],filename=filename,levels=levels, allExists=False)
        t=get_model_3D_grid(directory=data_dir[3][0:-1],filename=filename,levels=levels, allExists=False)
        gh=get_model_grid(data_dir[4], filename=filename)
        psfc=get_model_grid(data_dir[5], filename=filename)

    if(data_source == 'CIMISS'):
        if(initTime != None):
            filename = utl.model_filename(initTime, fhour,UTC=True)
        else:
            filename=utl.filename_day_back_model(day_back=day_back,fhour=fhour,UTC=True)
            
        try:
            rh=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='RHU'),
                        fcst_levels=levels, fcst_ele="RHU", units='%')

            u=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIU'),
                        fcst_levels=levels, fcst_ele="WIU", units='m/s')
                
            v=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='WIV'),
                        fcst_levels=levels, fcst_ele="WIV", units='m/s')

            t=CMISS_IO.cimiss_model_3D_grid(init_time_str='20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='TEM'),
                        fcst_levels=levels, fcst_ele="TEM", units='K')
            t['data'].values=t['data'].values-273.15

            gh=CMISS_IO.cimiss_model_by_time('20'+filename[0:8],valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='GPH'),
                        fcst_level=500, fcst_ele="GPH", units='gpm')
            gh['data'].values=gh['data'].values/10.

            psfc=CMISS_IO.cimiss_model_by_time('20'+filename[0:8], valid_time=fhour,
                        data_code=utl.CMISS_data_code(data_source=model,var_name='PRS'),
                        fcst_level=0, fcst_ele="PRS", units='Pa')
            psfc['data']=psfc['data']/100.

        except KeyError:
            raise ValueError('Can not find all data needed') 

    rh = rh.metpy.parse_cf().squeeze()
    u = u.metpy.parse_cf().squeeze()
    v = v.metpy.parse_cf().squeeze()
    t = t.metpy.parse_cf().squeeze()
    psfc=psfc.metpy.parse_cf().squeeze()

    #if(psfc['lon'].values[0] != t['lon'].values[0]):
    mask1 = (
            (psfc['lon']>=t['lon'].values.min())&
            (psfc['lon']<=t['lon'].values.max())&
            (psfc['lat']>=t['lat'].values.min())&
            (psfc['lat']<=t['lat'].values.max())
            )

    t2,psfc_bdcst=xr.broadcast(t['data'],psfc['data'].where(mask1, drop=True))
    mask2=(psfc_bdcst > -10000)
    psfc_bdcst=psfc_bdcst.where(mask2, drop=True)
    #else:
    #    psfc_bdcst=psfc['data'].copy

    resolution=u['lon'][1]-u['lon'][0]
    x,y=np.meshgrid(u['lon'], u['lat'])

    dx,dy=mpcalc.lat_lon_grid_deltas(u['lon'],u['lat'])

    cross = cross_section(rh, st_point, ed_point)
    cross_rh=cross.set_coords(('lat', 'lon'))
    cross = cross_section(u, st_point, ed_point)
    cross_u=cross.set_coords(('lat', 'lon'))
    cross = cross_section(v, st_point, ed_point)
    cross_v=cross.set_coords(('lat', 'lon'))
    
    cross_psfc = cross_section(psfc_bdcst, st_point, ed_point)

    cross_u['data'].attrs['units']=units.meter/units.second
    cross_v['data'].attrs['units']=units.meter/units.second
    cross_u['t_wind'], cross_v['n_wind'] = mpcalc.cross_section_components(cross_u['data'],cross_v['data'])
    
    cross = cross_section(t, st_point, ed_point)
    cross_Temp=cross.set_coords(('lat', 'lon'))

    cross_Td = mpcalc.dewpoint_rh(cross_Temp['data'].values*units.celsius,
                cross_rh['data'].values* units.percent)

    rh,pressure = xr.broadcast(cross_rh['data'],cross_Temp['level'])
    cross_terrain=pressure-cross_psfc

    crossection_graphics.draw_Crosssection_Wind_Temp_RH(
                    cross_rh=cross_rh, cross_Temp=cross_Temp, cross_u=cross_u,
                    cross_v=cross_v,cross_terrain=cross_terrain,gh=gh,
                    h_pos=h_pos,st_point=st_point,ed_point=ed_point,lw_ratio=lw_ratio,
                    levels=levels,map_extent=map_extent,model=model,
                    output_dir=output_dir)
Beispiel #31
0
def Station_Synthetical_Forecast_From_Cassandra(
        model='ECMWF',
        output_dir=None,
        t_range=[0,84],
        t_gap=3,
        points={'lon':[116.3833], 'lat':[39.9]},
        initTime=None,
        draw_VIS=True,drw_thr=False,
        extra_info={
            'output_head_name':' ',
            'output_tail_name':' ',
            'point_name':' '}
            ):

    #+get all the directories needed
    try:
        dir_rqd=[ 
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_3_HOURS/",
                "ECMWF_HR/10_METRE_WIND_GUST_IN_THE_LAST_6_HOURS/",
                "ECMWF_HR/TCDC/",
                "ECMWF_HR/LCDC/",
                "ECMWF_HR/UGRD_100M/",
                "ECMWF_HR/VGRD_100M/",
                "NWFD_SCMOC/VIS/",

                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='RAIN03'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='RAIN06'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='T2m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='u10m'),
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='v10m'),
                ]
    except KeyError:
        raise ValueError('Can not find all required directories needed')
    
    try:
        dir_opt=[ 
                utl.Cassandra_dir(
                    data_type='surface',data_source=model,var_name='Td2m')
                ]
        name_opt=['Td2m']
    except:
        dir_opt=[
                utl.Cassandra_dir(data_type='surface',data_source=model,var_name='rh2m')
                ]
        name_opt=['rh2m']
          
    #+get all the directories needed

    if(initTime == None):
        last_file={model:get_latest_initTime(dir_rqd[0]),
                    'SCMOC':get_latest_initTime(dir_rqd[6]),
                    }
    else:
        last_file={model:initTime[0],
                    'SCMOC':initTime[1],
                    }        

    y_s={model:int('20'+last_file[model][0:2]),
        'SCMOC':int('20'+last_file['SCMOC'][0:2])}
    m_s={model:int(last_file[model][2:4]),
        'SCMOC':int(last_file['SCMOC'][2:4])}
    d_s={model:int(last_file[model][4:6]),
        'SCMOC':int(last_file['SCMOC'][4:6])}
    h_s={model:int(last_file[model][6:8]),
        'SCMOC':int(last_file['SCMOC'][6:8])}

    fhours = np.arange(t_range[0], t_range[1], t_gap)

    for ifhour in fhours:
        if (ifhour == fhours[0] ):
            time_all=datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour))
        else:
            time_all=np.append(time_all,datetime(y_s['SCMOC'],m_s['SCMOC'],d_s['SCMOC'],h_s['SCMOC'])+timedelta(hours=int(ifhour)))            

    filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
    t2m=utl.get_model_points_gy(dir_rqd[9], filenames, points,allExists=False)
    
    if(name_opt[0] == 'rh2m'):
        rh2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)
        Td2m=mpcalc.dewpoint_rh(t2m['data'].values*units('degC'),rh2m['data'].values/100.)
        p_vapor=(rh2m['data'].values/100.)*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))

    if(name_opt[0] == 'Td2m'):
        Td2m=utl.get_model_points_gy(dir_opt[0], filenames, points,allExists=False)        
        rh2m=mpcalc.relative_humidity_from_dewpoint(t2m['data'].values* units('degC'),
                Td2m['data'].values* units('degC'))
        p_vapor=(np.array(rh2m))*6.105*(math.e**((17.27*t2m['data'].values/(237.7+t2m['data'].values))))
        Td2m=np.array(Td2m['data'].values)* units('degC')

    u10m=utl.get_model_points_gy(dir_rqd[10], filenames, points,allExists=False)
    v10m=utl.get_model_points_gy(dir_rqd[11], filenames, points,allExists=False)
    wsp10m=(u10m['data']**2+v10m['data']**2)**0.5
    AT=1.07*t2m['data'].values+0.2*p_vapor-0.65*wsp10m-2.7      
    if((t_range[1]) > 72):
        fhours = np.arange(6, t_range[1], 6)
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        r03=utl.get_model_points_gy(dir_rqd[8], filenames, points,allExists=False)
    else:
        r03=utl.get_model_points_gy(dir_rqd[7], filenames, points,allExists=False)

    fhours = np.arange(t_range[0], t_range[1], t_gap)
    filenames = [last_file['SCMOC']+'.'+str(fhour).zfill(3) for fhour in fhours]
    VIS=utl.get_model_points_gy(dir_rqd[6], filenames, points,allExists=False,fill_null=True,Null_value=-0.001)     

    if(last_file['SCMOC'] == last_file[model] and t_range[1] > 72):
        fhours = np.append(np.arange(3,72,3),np.arange(72, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]            

    if(last_file['SCMOC'] != last_file[model] and t_range[1] > 60):
        fhours = np.append(np.arange(3,60,3),np.arange(60, (t_range[1]), 6))
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] != last_file[model] and t_range[1] <= 60):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    if(last_file['SCMOC'] == last_file[model] and t_range[1] <= 72):
        fhours = np.arange(t_range[0], t_range[1], t_gap)
        filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        filenames2 = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]

    TCDC=utl.get_model_points_gy(dir_rqd[2], filenames2, points,allExists=False)
    LCDC=utl.get_model_points_gy(dir_rqd[3], filenames2, points,allExists=False)
    u100m=utl.get_model_points_gy(dir_rqd[4], filenames2, points,allExists=False)
    v100m=utl.get_model_points_gy(dir_rqd[5], filenames2, points,allExists=False)
    wsp100m=(u100m['data']**2+v100m['data']**2)**0.5

    if(fhours[-1] < 120):
        gust10m=utl.get_model_points_gy(dir_rqd[0], filenames, points,allExists=False)
    if(fhours[-1] > 120):
        if(last_file['SCMOC'] == last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour).zfill(3) for fhour in fhours]
        if(last_file['SCMOC'] != last_file[model]):
            fhours = np.arange(0, t_range[1], 6)
            filenames = [last_file[model]+'.'+str(fhour+12).zfill(3) for fhour in fhours]
        gust10m=utl.get_model_points_gy(dir_rqd[1], filenames, points,allExists=False)        
        
    sta_graphics.draw_Station_Synthetical_Forecast_From_Cassandra(
            t2m=t2m,Td2m=Td2m,AT=AT,u10m=u10m,v10m=v10m,u100m=u100m,v100m=v100m,
            gust10m=gust10m,wsp10m=wsp10m,wsp100m=wsp100m,r03=r03,TCDC=TCDC,LCDC=LCDC,
            draw_VIS=draw_VIS,VIS=VIS,drw_thr=drw_thr,
            time_all=time_all,
            model=model,points=points,
            output_dir=output_dir,extra_info=extra_info)
        minutes_meso_long = mesoData[0, :iMesoTime + 1]
        dtmeso_long = [
            datetime(timeTakeoff.year, timeTakeoff.month, timeTakeoff.day) +
            timedelta(minutes=iminutes) for iminutes in minutes_meso_long
        ]
        tlongmeso = [mpdates.date2num(itime) for itime in dtmeso_long]

        RHmeso = mesoData[1, iMesoTime]
        T2meso = mesoData[2, iMesoTime]
        T9meso = mesoData[3, iMesoTime]
        umeso = mesoData[4, iMesoTime]
        vmeso = mesoData[5, iMesoTime]
        pmeso = mesoData[6, iMesoTime]
        sradmeso = mesoData[7, :iMesoTime + 1]
        Td2meso = np.array(
            mcalc.dewpoint_rh(T2meso * units.degC, RHmeso / 100.))
else:
    tmeso = np.nan
    RHmeso = np.nan
    T2meso = np.nan
    T9meso = np.nan
    umeso = np.nan
    vmeso = np.nan
    pmeso = np.nan
    Td2meso = np.nan
    sradmeso = np.nan

#################################################
## Find indices for coptersonde and dgps files ##
#################################################
def calculate_dewpoint(temperature,rh):
    temperature=temperature*units.degF
    rh=rh*units.percent
    return round(mpcalc.dewpoint_rh(temperature, rh).to('degF'),1)
def readRadiometerData(fnme):
              
     def plot_fsi(parm,fsi_data,_date): 
         
             import matplotlib.pyplot as plt; from pylab import savefig ;  from matplotlib import cm
             C=fsi_data         
             fig, ax1= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             ax2 = ax1.twinx()

               
             ax1 = C.iloc[:,2].plot.line(ax=ax1,marker='o',grid=False, legend=True, style=None, rot=45,linewidth=3,color='k',)              

             ax2 = C.iloc[:,1].plot.line(ax=ax2,marker='o', grid=False, legend=True, style=None,  rot=45,linewidth=3,color='r')              
                                      
             leg1=ax1.legend(loc='upper left',fontsize=18)
             leg2=ax2.legend(loc='upper right',fontsize=18)
             
             ax1.axhline(y=25,linewidth=3, color='g')
             ax1.axhline(y=35,linewidth=3, color='b')
             ax2.axhline(y=1000,linewidth=3, color='r',linestyle='--')

             ax1.set_yticks(np.linspace(-15, ax1.get_ybound()[1]+1, 15))          
             ax2.set_yticks(np.linspace(0, ax2.get_ybound()[1]+1, 15))          
             ax1.tick_params(axis='y', colors='k',labelsize=18) ; ax1.set_ylabel('Fog Threat Index',color='k',fontsize=18) ;                                    
             ax2.tick_params(axis='y', colors='r',labelsize=18) ; ax2.set_ylabel('Visibility',color='r',fontsize=18) ;            
                                    
             plt.title('Fog Threat & Visibility',color='black',fontsize=18,y=1.05)

             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])
                 
             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]+'/'+parm+'Fog_threat_visibility'+_date[0:8]+'.png'
             
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)                
     def plot_fsi_catg(parm,fsi_data,_date,catg): 
         
             import matplotlib.pyplot as plt; from pylab import savefig ;  from matplotlib import cm
             C=fsi_data         
             fig, ax1= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             ax2 = ax1.twinx()

               
             ax1 = C.iloc[:,2].plot.line(ax=ax1,marker='o',grid=False, legend=True, style=None, rot=45,linewidth=3,color='k',)              

             ax2 = C.iloc[:,1].plot.line(ax=ax2,marker='o', grid=False, legend=True, style=None,  rot=45,linewidth=3,color='r')              
                                      
             leg1=ax1.legend(loc='upper left',fontsize=18)
             leg2=ax2.legend(loc='upper right',fontsize=18)
             
             ax1.axhline(y=25,linewidth=3, color='g')
             ax1.axhline(y=35,linewidth=3, color='b')
             ax2.axhline(y=1000,linewidth=3, color='r',linestyle='--')

             ax1.set_yticks(np.linspace(-15, ax1.get_ybound()[1]+1, 15))          
             ax2.set_yticks(np.linspace(0, ax2.get_ybound()[1]+1, 15))          
             ax1.tick_params(axis='y', colors='k',labelsize=18) ; ax1.set_ylabel('Fog Threat Index',color='k',fontsize=18) ;                                    
             ax2.tick_params(axis='y', colors='r',labelsize=18) ; ax2.set_ylabel('Visibility',color='r',fontsize=18) ;            
                                    
             plt.title('Fog Threat & Visibility',color='black',fontsize=18,y=1.05)

             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])
                 
             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]+'/'+parm+'_'+catg+'_Fog_threat_visibility'+_date[0:8]+'.png'
             
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)      
              
     def plot_contour_data(parm,vert_prof,catg,xtk):
             import matplotlib.pyplot as plt; from pylab import savefig ;  from matplotlib import cm ; import matplotlib.colors as mcolors
         
             fig, ax= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             #ax4 = ax1.twinx()
             x =np.arange(0,vert_prof.iloc[:,4:41].T.shape[1],1) ; 
             y = np.arange(0,vert_prof.iloc[:,4:41].T.shape[0])
             X, Y = np.meshgrid(x, y)

             clevs=[260,262,264,266,268,270,272,274,276,278,280,282,284,285,286,287,288,289,290,291,292]
             #colors1 = plt.cm.binary(np.linspace(0., 1, 128))
             #colors1 = plt.cm.gist_heat_r(np.linspace(0, 1, 128))

             colors2 = plt.cm.Blues(np.linspace(0., 1, 128))
             colors3 = plt.cm.Reds(np.linspace(0, 1, 128))


             # combine them and build a new colormap
             colors = np.vstack((colors2,colors3))
             mymap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)



             cs =plt.contourf(X,Y,vert_prof.iloc[:,4:41].T,levels=clevs,cmap=mymap)
             #cs1 =plt.contour(X,Y,vert_prof.iloc[:,4:45].T,levels=clevs,colors='K',linewidths=0.3) 
             #plt.clabel(cs1, inline=1, fontsize=16)
             cbar=plt.colorbar(cs, shrink=0.8, extend='both') ;  cbar.set_ticks([clevs]) ; cbar.ax.invert_yaxis()

             ax.set_xticks(x[::xtk]) ; 
             xTickMarks=vert_prof['Date'][::xtk]
             xtickNames = ax.set_xticklabels(xTickMarks)            
             plt.setp(xtickNames, rotation=90, fontsize=10,family='sans-serif')
            
             ax.set_yticks(y[::5]) ; 
             yTickMarks=vert_prof.columns[4:41][::5]
             ytickNames = ax.set_yticklabels(yTickMarks,fontsize=18)
    
             ax.tick_params(axis='x', colors='blue') ; ax.tick_params(axis='y', colors='blue')                             
             ax.set_ylabel('Height Levels',color='blue',fontsize=18) ;  titl='Temperature Profile:'+_date
             plt.title(titl,color='black',fontsize=18,y=1.05)                             
             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])

             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:6]+'/'+parm+'_'+catg+'_hours_vertical_profile'+_date[0:6]+'_1km.png'
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)  
################################################################       
             fig, ax= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             #ax4 = ax1.twinx()
             x =np.arange(0,vert_prof.iloc[:,4:].T.shape[1],1) ; 
             y = np.arange(0,vert_prof.iloc[:,4:].T.shape[0])
             X, Y = np.meshgrid(x, y)
             clevs=[190,200,210,220,225,230,235,240,245,250,255,260,262,264,266,268,270,272,274,276,278,280,282,284,285,286,287,288,289,290,291,292]

             cs =plt.contourf(X,Y,vert_prof.iloc[:,4:].T,levels=clevs,cmap=cm.gist_rainbow_r )
             cs1 =plt.contour(X,Y,vert_prof.iloc[:,4:].T,levels=clevs,colors='K',linewidths=0.3) 
             plt.clabel(cs1, inline=1, fontsize=16)
             cbar=plt.colorbar(cs, shrink=0.8, extend='both') ;  cbar.set_ticks([clevs]) ; cbar.ax.invert_yaxis()

             ax.set_xticks(x[::xtk]) ; 
             xTickMarks=vert_prof['Date'][::xtk]
             xtickNames = ax.set_xticklabels(xTickMarks)            
             plt.setp(xtickNames, rotation=90, fontsize=10,family='sans-serif')
            
             ax.set_yticks(y[::5]) ; 
             yTickMarks=vert_prof.columns[4:][::5]
             ytickNames = ax.set_yticklabels(yTickMarks,fontsize=18)
    
             ax.tick_params(axis='x', colors='blue') ; ax.tick_params(axis='y', colors='blue')                             
             ax.set_ylabel('Height Levels',color='blue',fontsize=18) ;  titl='Temperature Profile:'+_date
             plt.title(titl,color='black',fontsize=18,y=1.05)

             plt.xticks(size=18)
    
             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:6]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:6])

             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:6]+'/'+parm+'_'+catg+'_hours_vertical_profile'+_date[0:6]+'.png'
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)  

     def plot_contour_dailydata(parm,vert_prof,_date):
             import matplotlib.pyplot as plt; from pylab import savefig ;  from matplotlib import cm ; import matplotlib.colors as mcolors
             from matplotlib.colors import from_levels_and_colors ;
         
             fig, ax= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             #ax4 = ax1.twinx()
             x =np.arange(0,vert_prof.iloc[:,4:].T.shape[1],1) ; 
             y = np.arange(0,vert_prof.iloc[:,4:].T.shape[0])
             X, Y = np.meshgrid(x, y)
             clevs=[190,200,210,220,225,230,235,240,245,250,255,260,262,264,266,268,270,272,274,276,278,280,282,284,285,286,287,288,289,290,291,292]


#             mymap = mcolors.ListedColormap(['peachpuff','navajowhite','mistyrose','steelblue','cornflowerblue','slateblue','royalblue','blue','dodgerblue','deepskyblue','skyblue','mediumturquoise',\
#                                             'mediumaquamarine','lightseagreen','seagreen','greenyellow','indianred','forestgreen','yellow','gold','orange','darkorange',\
#                                             'sandybrown','limegreen','coral','orangered','red','hotpink','darkorchid','blueviolet','purple'])
#             nice_cmap= plt.get_cmap(mymap)
#             colors = nice_cmap([0,1, 2, 3, 4, 5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30])
#             colors =nice_cmap([30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0])
#             #cmap, norm = from_levels_and_colors(clevs, colors, extend='both')
#             #norml = mcolors.BoundaryNorm(clevs, ncolors=cmap.N, clip=True)

             colors2 = plt.cm.Blues(np.linspace(0., 1, 128))
             colors3 = plt.cm.Reds(np.linspace(0, 1, 128))


             # combine them and build a new colormap
             colors = np.vstack((colors2,colors3))
             mymap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)

             cs =plt.contourf(X,Y,vert_prof.iloc[:,4:].T,levels=clevs,cmap=cm.gist_rainbow_r )  #cm.gist_rainbow_r
             cbar=plt.colorbar(cs, shrink=0.8, extend='both') ;  cbar.set_ticks([clevs]) ; cbar.ax.invert_yaxis()

             cs1 =plt.contour(X,Y,vert_prof.iloc[:,4:].T,levels=clevs,colors='K',linewidths=0.3) 
             plt.clabel(cs1, inline=1, fontsize=13)

             ax.set_xticks(x[::5]) ; 
             xTickMarks=vert_prof['Date'][::5]
             xtickNames = ax.set_xticklabels(xTickMarks)            
             plt.setp(xtickNames, rotation=90, fontsize=10,family='sans-serif')
            
             ax.set_yticks(y[::5]) ; 
             yTickMarks=vert_prof.columns[4:][::5]
             ytickNames = ax.set_yticklabels(yTickMarks,fontsize=18)
    
             ax.tick_params(axis='x', colors='blue') ; ax.tick_params(axis='y', colors='blue')                             
             ax.set_ylabel('Height Levels',color='blue',fontsize=18) ;  titl='Temperature Profile:'+_date
             plt.title(titl,color='black',fontsize=18,y=1.05)

             plt.xticks(size=18)
    
             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])
                 
             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]+'/'+parm+'_vertical_profile'+_date[0:8]+'.png'
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)               
##############################################################             
             
             fig, ax= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             #ax4 = ax1.twinx()
             x =np.arange(0,vert_prof.iloc[:,4:41].T.shape[1],1) ; 
             y = np.arange(0,vert_prof.iloc[:,4:41].T.shape[0])
             X, Y = np.meshgrid(x, y)

             
             clevs=[260,262,264,266,268,270,272,274,276,278,280,282,284,285,286,287,288,289,290,291,292]

             colors2 = plt.cm.Blues(np.linspace(0., 1, 128))
             colors3 = plt.cm.Reds(np.linspace(0, 1, 128))


             # combine them and build a new colormap
             colors = np.vstack((colors2,colors3))
             mymap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)

             cs =plt.contourf(X,Y,vert_prof.iloc[:,4:41].T,levels=clevs,cmap=mymap )  #cm.gist_rainbow_r
             cbar=plt.colorbar(cs, shrink=0.8, extend='both') ;  cbar.set_ticks([clevs]) ; cbar.ax.invert_yaxis()

             cs1 =plt.contour(X,Y,vert_prof.iloc[:,4:41].T,levels=clevs,colors='K',linewidths=0.3) 
             plt.clabel(cs1, inline=1, fontsize=13)

             ax.set_xticks(x[::5]) ; 
             xTickMarks=vert_prof['Date'][::5]
             xtickNames = ax.set_xticklabels(xTickMarks)            
             plt.setp(xtickNames, rotation=90, fontsize=10,family='sans-serif')
            
             ax.set_yticks(y[::5]) ; 
             yTickMarks=vert_prof.columns[4:41][::5]
             ytickNames = ax.set_yticklabels(yTickMarks,fontsize=18)
    
             ax.tick_params(axis='x', colors='blue') ; ax.tick_params(axis='y', colors='blue')                             
             ax.set_ylabel('Height Levels',color='blue',fontsize=18) ;  titl='Temperature Profile:'+_date
             plt.title(titl,color='black',fontsize=18,y=1.05)

             plt.xticks(size=18)
    
             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])

             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]+'/'+parm+'_vertical_profile'+_date[0:8]+'_1km.png'
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)                            
#############################################################################################################################################            
     def plot_contour_prefog(parm,vert_prof,_date,catg):
             import matplotlib.pyplot as plt; from pylab import savefig ;  from matplotlib import cm; import matplotlib.colors as mcolors
         
             fig, ax= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             #ax4 = ax1.twinx()
             x =np.arange(0,vert_prof.iloc[:,4:].T.shape[1],1) ; 
             y = np.arange(0,vert_prof.iloc[:,4:].T.shape[0])
             X, Y = np.meshgrid(x, y)
            
             clevs=[190,200,210,220,225,230,235,240,245,250,255,260,262,264,266,268,270,272,274,276,278,280,282,284,285,286,287,288,289,290,291,292]

             cs =plt.contourf(X,Y,vert_prof.iloc[:,4:].T,levels=clevs,cmap=cm.gist_rainbow_r )
             cbar=plt.colorbar(cs, shrink=0.8, extend='both') ;  cbar.set_ticks([clevs]) ; cbar.ax.invert_yaxis()

             cs1 =plt.contour(X,Y,vert_prof.iloc[:,4:].T,levels=clevs,colors='K',linewidths=0.3) 
             plt.clabel(cs1, inline=1, fontsize=13)

             ax.set_xticks(x[::5]) ; 
             xTickMarks=vert_prof['Date'][::5]
             xtickNames = ax.set_xticklabels(xTickMarks)            
             plt.setp(xtickNames, rotation=90, fontsize=10,family='sans-serif')
            
             ax.set_yticks(y[::5]) ; 
             yTickMarks=vert_prof.columns[4:][::5]
             ytickNames = ax.set_yticklabels(yTickMarks,fontsize=18)
    
             ax.tick_params(axis='x', colors='blue') ; ax.tick_params(axis='y', colors='blue')                             
             ax.set_ylabel('Height Levels',color='blue',fontsize=18) ;  titl='Temperature Profile:'+_date
             plt.title(titl,color='black',fontsize=18,y=1.05)

             plt.xticks(size=18)
    
             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])

             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]+'/'+parm+catg+'_vertical_profile'+_date[0:8]+'.png'
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)  
##############################################################
             fig, ax= plt.subplots(1, sharex=True, sharey=False,figsize=(12,12),dpi=50)
             #ax4 = ax1.twinx()
             x =np.arange(0,vert_prof.iloc[:,4:41].T.shape[1],1) ; 
             y = np.arange(0,vert_prof.iloc[:,4:41].T.shape[0])
             X, Y = np.meshgrid(x, y)
             clevs=[260,262,264,266,268,270,272,274,276,278,280,282,284,285,286,287,288,289,290,291,292]

             colors2 = plt.cm.Blues(np.linspace(0., 1, 128))
             colors3 = plt.cm.Reds(np.linspace(0, 1, 128))


             # combine them and build a new colormap
             colors = np.vstack((colors2,colors3))
             mymap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)

             cs =plt.contourf(X,Y,vert_prof.iloc[:,4:41].T,levels=clevs,cmap=mymap )  #cm.gist_rainbow_r
             cbar=plt.colorbar(cs, shrink=0.8, extend='both') ;  cbar.set_ticks([clevs]) ; cbar.ax.invert_yaxis()

             cs1 =plt.contour(X,Y,vert_prof.iloc[:,4:41].T,levels=clevs,colors='K',linewidths=0.3) 
             plt.clabel(cs1, inline=1, fontsize=13)

             ax.set_xticks(x[::5]) ; 
             xTickMarks=vert_prof['Date'][::5]
             xtickNames = ax.set_xticklabels(xTickMarks)            
             plt.setp(xtickNames, rotation=90, fontsize=10,family='sans-serif')
            
             ax.set_yticks(y[::5]) ; 
             yTickMarks=vert_prof.columns[4:41][::5]
             ytickNames = ax.set_yticklabels(yTickMarks,fontsize=18)
    
             ax.tick_params(axis='x', colors='blue') ; ax.tick_params(axis='y', colors='blue')                             
             ax.set_ylabel('Height Levels',color='blue',fontsize=18) ;  titl='Temperature Profile:'+_date
             plt.title(titl,color='black',fontsize=18,y=1.05)

             plt.xticks(size=18)
    
             plt.tight_layout(h_pad=3) ; 
         
             if not os.path.exists(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]):
                 os.makedirs(outpath+'/FogAnalysis/'+parm+'/'+_date[0:8])

             outFile=outpath+'/FogAnalysis/'+parm+'/'+_date[0:8]+'/'+parm+catg+'_vertical_profile'+_date[0:8]+'_1km.png'
             savefig(outFile);
             plt.close(fig) ; fig.clf(fig)                            
                          
###########################################################################################################################################

     hpc_file=main+'data/Files/hpc_201803.csv'
     rhp_file=main+'data/Files/rhp_201803.csv'
     tpc_file=main+'data/Files/tpc_201803.csv'
     tpb_file=main+'data/Files/tpb_201803.csv'

     met_file=main+'data/Files/met_minute_201803.csv'
     vis_file=main+'data/Files/vis_201803.csv'
     cbh_file=main+'data/Files/cbh_201803.csv'

####################################################################################################################################          
     #tpb_data=pd.read_csv(tpb_file).as_matrix()
     tpb_data=np.genfromtxt(tpb_file,delimiter=',',dtype='S')  ; 
     columns=np.empty((1,94)).astype(str) ; columns[0,0]='Date' ;  columns[0,1:]=tpb_data[0,7:] ;   
     date_cols=tpb_data[1:,0:6].astype(int)
     date_ary=np.vstack([(dt.datetime(*x).replace(year=2000+dt.datetime(*x).year)).strftime('%Y%m%d%H%M%S') for x in date_cols[:]])        
     tpb_data_1=np.concatenate([columns,np.concatenate([date_ary,tpb_data[1:,7:]],axis=1)],axis=0)
     
     tpb_data_1=pd.DataFrame(data=tpb_data_1[1:,:],columns=tpb_data_1[0,:])  
     tpb_data_1['Date']=tpb_data_1['Date'].apply(pd.to_datetime, errors='ignore') ; 
     tpb_data_1.iloc[:,1:]=tpb_data_1.iloc[:,1:].apply(pd.to_numeric,errors='coerce')
     tpb_data_1.index = pd.to_datetime(tpb_data_1.Date) ;          
     tpb_data_1.index =tpb_data_1.index.tz_localize(pytz.utc).tz_convert(pytz.timezone('Asia/Dubai'))
     tpb_data_1['Date']=tpb_data_1.index
     tpb_data_1=tpb_data_1.iloc[:,:].resample('1Min').mean() ; tpb_data_1.insert(0,'Date', tpb_data_1.index)
     #tpb_data_hour=tpb_data_1.iloc[:,:].resample('1H').mean()  ; tpb_data_hour.insert(0,'Date', tpb_data_hour.index) ; 
     idx = pd.date_range('2018-03-01 00:00:00', '2018-03-31 23:59:00',freq='T').tz_localize(pytz.timezone('Asia/Dubai')) #Missing data filled with Nan
     tpb_data_2=tpb_data_1.reindex(idx, fill_value=np.nan)
     tpb_data_2['Date']=tpb_data_2.index

##########################################################################################################################################
     #tpc_data=pd.read_csv(tpc_file).as_matrix()
     tpc_data=np.genfromtxt(tpc_file,delimiter=',',dtype='S')  ; 
     columns=np.empty((1,94)).astype(str) ; columns[0,0]='Date' ;  columns[0,1:]=tpc_data[0,7:] ;   
     date_cols=tpc_data[1:,0:6].astype(int)
     date_ary=np.vstack([(dt.datetime(*x).replace(year=2000+dt.datetime(*x).year)).strftime('%Y%m%d%H%M%S') for x in date_cols[:]])        
     tpc_data_1=np.concatenate([columns,np.concatenate([date_ary,tpc_data[1:,7:]],axis=1)],axis=0)
     
     tpc_data_1=pd.DataFrame(data=tpc_data_1[1:,:],columns=tpc_data_1[0,:])  
     tpc_data_1['Date']=tpc_data_1['Date'].apply(pd.to_datetime, errors='ignore') ; 
     tpc_data_1.iloc[:,1:]=tpc_data_1.iloc[:,1:].apply(pd.to_numeric,errors='coerce')
     tpc_data_1.index = pd.to_datetime(tpc_data_1.Date) ; 
     tpc_data_1.index =tpc_data_1.index.tz_localize(pytz.utc).tz_convert(pytz.timezone('Asia/Dubai'))
     tpc_data_1['Date']=tpc_data_1.index
         
     tpc_data_1=tpc_data_1.iloc[:,:].resample('1Min').mean() ; tpc_data_1.insert(0,'Date', tpc_data_1.index)
     #tpc_data_hour=tpc_data_1.iloc[:,:].resample('1H').mean()  ; tpc_data_hour.insert(0,'Date', tpc_data_hour.index) ; 
     idx = pd.date_range('2018-03-01 00:00:00', '2018-03-31 23:59:00',freq='T').tz_localize(pytz.timezone('Asia/Dubai')) #Missing data filled with Nan
     tpc_data_2=tpc_data_1.reindex(idx, fill_value=np.nan)
     tpc_data_2['Date']=tpc_data_2.index
       
##################################################################################################################################################        
     #hpc_data=pd.read_csv(hpc_file).as_matrix()
     hpc_data=np.genfromtxt(hpc_file,delimiter=',',dtype='S')  ; 
     columns=np.empty((1,94)).astype(str) ; columns[0,0]='Date' ;  columns[0,1:]=hpc_data[0,7:] ;   
     date_cols=hpc_data[1:,0:6].astype(int)
     date_ary=np.vstack([(dt.datetime(*x).replace(year=2000+dt.datetime(*x).year)).strftime('%Y%m%d%H%M%S') for x in date_cols[:]])        
     hpc_data_1=np.concatenate([columns,np.concatenate([date_ary,hpc_data[1:,7:]],axis=1)],axis=0)
     
     hpc_data_1=pd.DataFrame(data=hpc_data_1[1:,:],columns=hpc_data_1[0,:])  
     hpc_data_1['Date']=hpc_data_1['Date'].apply(pd.to_datetime, errors='ignore') ; 
     hpc_data_1.iloc[:,1:]=hpc_data_1.iloc[:,1:].apply(pd.to_numeric,errors='coerce')
     hpc_data_1.index = pd.to_datetime(hpc_data_1.Date) ; 
     hpc_data_1.index =hpc_data_1.index.tz_localize(pytz.utc).tz_convert(pytz.timezone('Asia/Dubai'))
     hpc_data_1['Date']=hpc_data_1.index
         
     hpc_data_1=hpc_data_1.iloc[:,:].resample('1Min').mean() ; hpc_data_1.insert(0,'Date', hpc_data_1.index)
     #hpc_data_hour=hpc_data_1.iloc[:,:].resample('1H').mean()  ; hpc_data_hour.insert(0,'Date', hpc_data_hour.index) ; 
     
     hpc_data_2=hpc_data_1.reindex(idx, fill_value=np.nan)
     hpc_data_2['Date']=hpc_data_2.index     

     #rhp_data=pd.read_csv(rhp_file).as_matrix()
     rhp_data=np.genfromtxt(rhp_file,delimiter=',',dtype='S')  ; 
     columns=np.empty((1,94)).astype(str) ; columns[0,0]='Date' ;  columns[0,1:]=rhp_data[0,7:] ;   
     date_cols=rhp_data[1:,0:6].astype(int)
     date_ary=np.vstack([(dt.datetime(*x).replace(year=2000+dt.datetime(*x).year)).strftime('%Y%m%d%H%M%S') for x in date_cols[:]])        
     rhp_data_1=np.concatenate([columns,np.concatenate([date_ary,rhp_data[1:,7:]],axis=1)],axis=0)
     
     rhp_data_1=pd.DataFrame(data=rhp_data_1[1:,:],columns=rhp_data_1[0,:])  
     rhp_data_1['Date']=rhp_data_1['Date'].apply(pd.to_datetime, errors='ignore') ; 
     rhp_data_1.iloc[:,1:]=rhp_data_1.iloc[:,1:].apply(pd.to_numeric,errors='coerce')
     rhp_data_1.index = pd.to_datetime(rhp_data_1.Date) ; 
     rhp_data_1.index =rhp_data_1.index.tz_localize(pytz.utc).tz_convert(pytz.timezone('Asia/Dubai'))
     rhp_data_1['Date']=rhp_data_1.index
         
     rhp_data_1=rhp_data_1.iloc[:,:].resample('1Min').mean() ; rhp_data_1.insert(0,'Date', rhp_data_1.index)
     #rhp_data_hour=rhp_data_1.iloc[:,:].resample('1H').mean()  ; rhp_data_hour.insert(0,'Date', rhp_data_hour.index) ; 
     
     rhp_data_2=rhp_data_1.reindex(idx, fill_value=np.nan)
     rhp_data_2['Date']=rhp_data_2.index
     
     spc_data_2=calculateSPH(tpc_data_2,hpc_data_2)
################################################################################################################################
#     met_data=pd.read_csv(met_file).as_matrix()   

#     columns=np.empty((1,7)).astype(str) ; columns[0,0]='Date' ; columns[0,1]='pressure' ; columns[0,2]='Temperature' ; 
#     columns[0,3]='RH' ; columns[0,4]='WS' ; columns[0,5]='WD'; columns[0,6]='RainRate'
#     date_cols=met_data[1:,0:6].astype(int)
#     date_ary=np.vstack([(dt.datetime(*x).replace(year=2000+dt.datetime(*x).year)).strftime('%Y%m%d%H%M%S') for x in date_cols[:]])        
#     met_data_1=np.concatenate([columns,np.concatenate([date_ary,met_data[1:,7:]],axis=1)],axis=0)

     met_data=pd.read_csv(met_file)

     met_data_1=met_data.drop(['Date.1'],axis=1)
     
     met_data_1['Date']=met_data_1['Date'].apply(pd.to_datetime, errors='ignore') ; 
     met_data_1[['pressure','Temperature','RH','WS','WD','RainRate']]=met_data_1[['pressure','Temperature','RH','WS','WD','RainRate']].apply(pd.to_numeric,errors='coerce')
     #met_data_1['WS']=met_data_1['WS']*0.2777   #(5/18  km/h to m/sec)
     met_data_1.index = met_data_1.Date ; 
     met_data_1.index =met_data_1.index.tz_localize(pytz.utc).tz_convert(pytz.timezone('Asia/Dubai'))
     met_data_1['Date']=met_data_1.index
        
     #met_data_1=met_data_1.iloc[:,:].resample('1Min').mean() ; met_data_1.insert(0,'Date', met_data_1.index)
     #met_data_hour=met_data_1.iloc[:,:].resample('1H').mean()  ; met_data_hour.insert(0,'Date', met_data_hour.index) ; 
     met_data_2=met_data_1.reindex(idx, fill_value=pd.np.nan) 
     met_data_2['Date']=met_data_2.index
###############################################################################################################################
     vis_data=pd.read_csv(vis_file)
     vis_data['Date']=vis_data['Date'].apply(pd.to_datetime, errors='ignore',format='%Y-%m-%d-%H-%M') ; 
     vis_data[['Ex.Coeff','Visibility(m)','Lux(KM)','DN_Flag','ErrorCode','ctrlrelay']]=\
     vis_data[['Ex.Coeff','Visibility(m)','Lux(KM)','DN_Flag','ErrorCode','ctrlrelay']].apply(pd.to_numeric,errors='coerce')
     vis_data.index = vis_data.Date ;        
     vis_data.index =vis_data.index.tz_localize(pytz.timezone('Asia/Dubai')) #.tz_convert(pytz.utc))
     vis_data['Date']=vis_data.index
     
     vis_data_2=vis_data.reindex(idx, fill_value=np.nan) 
     vis_data_2['Date']=vis_data_2.index

##################################################################################################################################

     cbh_data=pd.read_csv(cbh_file).as_matrix()
     columns=np.empty((1,2)).astype(str) ; columns[0,0]='Date' ; columns[0,-1]='cbh' 
     date_cols=cbh_data[1:,0:6].astype(int)
     date_ary=np.vstack([(dt.datetime(*x).replace(year=2000+dt.datetime(*x).year)).strftime('%Y%m%d%H%M%S') for x in date_cols[:]])        
     cbh_data_1=np.concatenate([columns,np.concatenate([date_ary,cbh_data[1:,7:10]],axis=1)],axis=0)
     
     cbh_data_1=pd.DataFrame(data=cbh_data_1[1:,:],columns=cbh_data_1[0,:])  
     cbh_data_1['Date']=cbh_data_1['Date'].apply(pd.to_datetime, errors='ignore') ; 
     cbh_data_1[['cbh']]=cbh_data_1[['cbh']].apply(pd.to_numeric,errors='coerce')
     cbh_data_1.index = cbh_data_1.Date ;          
     cbh_data_1=cbh_data_1.iloc[:,:].resample('1Min').mean() ; cbh_data_1.insert(0,'Date', cbh_data_1.index)
     cbh_data_1.index =cbh_data_1.index.tz_localize(pytz.utc).tz_convert(pytz.timezone('Asia/Dubai'))
     cbh_data_1['Date']=cbh_data_1.index

     cbh_data_2=cbh_data_1.reindex(idx, fill_value=pd.np.nan) 
     cbh_data_2['Date']=cbh_data_2.index
     
#################################################################################################################
     ## dewpoint calculated from RH
     tpb_hour=tpb_data_2.iloc[:,:].resample('30Min').mean()  ; tpb_hour.insert(0,'Date', tpb_hour.index) ;  
     rhp_hour=rhp_data_2.iloc[:,:].resample('30Min').mean()  ; rhp_hour.insert(0,'Date', rhp_hour.index) ; 
     tpc_hour=tpc_data_2.iloc[:,:].resample('30Min').mean()  ; tpc_hour.insert(0,'Date', tpc_hour.index) ; 
     met_hour=met_data_2.iloc[:,:].resample('30Min').mean()  ; met_hour.insert(0,'Date', met_hour.index) ; 
     vis_hour=vis_data_2.iloc[:,:].resample('30Min').mean()  ; vis_hour.insert(0,'Date', vis_hour.index) ; 
     cbh_hour=cbh_data_2.iloc[:,:].resample('30Min').mean()  ; cbh_hour.insert(0,'Date', cbh_hour.index) ;  

     dpt_data=dewpoint_rh(np.array(tpc_hour.iloc[:,1:])*units('K'),(np.array(rhp_hour.iloc[:,1:])/100.)).to(units('K'))
     dpt_data_1=pd.DataFrame(dpt_data.m,columns=rhp_hour.columns[1:])
     dpt_data_1.index=rhp_hour.Date
     dpt_data_1.insert(0,'Date',rhp_hour.Date)

     dpt_data_tpb=dewpoint_rh(np.array(tpb_hour.iloc[:,1:])*units('K'),(np.array(rhp_hour.iloc[:,1:])/100.)).to(units('K'))
     dpt_data_1_tpb=pd.DataFrame(dpt_data_tpb.m,columns=rhp_hour.columns[1:])
     dpt_data_1_tpb.index=rhp_hour.Date
     dpt_data_1_tpb.insert(0,'Date',rhp_hour.Date)


#########################################################################################################################
     ## Wet bulb Potential Temperature
     import aoslib ; metpy.calc.potential_temperature
     hght=np.array(rhp_hour.columns[1:].astype(int))
     h_to_ps=(list(np.round(height_to_pressure_std(np.array(rhp_hour.columns[1:].astype(int))*units('meter')).m)))
     h_to_pss=pd.concat([pd.DataFrame(h_to_ps).transpose()]*tpb_hour.shape[0])
     tpb_hour_wet=aoslib.calctw(h_to_pss,tpb_hour.iloc[:,1:],rhp_hour.iloc[:,1:])
     tpb_hour_wet_1=pd.DataFrame(tpb_hour_wet,columns=h_to_ps)    #tpb_hour.columns[1:]
     tpb_hour_wet_1.index=tpb_hour.Date    
     tpb_hour_wet_1.insert(0,'Date',tpb_hour.Date)

     mix_ratio_1=aoslib.mixrat(h_to_pss,tpb_hour.iloc[:,1:],rhp_hour.iloc[:,1:])
     mix_ratio_2=pd.DataFrame(mix_ratio_1,columns=h_to_ps)
     mix_ratio_2.index=tpb_hour.Date    
     mix_ratio_2.insert(0,'Date',tpb_hour.Date)

     #tpb_hour_wet_2=aoslib.awips.thetawa(tpb_hour.iloc[0:2,1:],dpt_data_1_tpb.iloc[0:2,1:],h_to_pss.iloc[0:2,:],mix_ratio_1[0:2,:])

     A_w=pd.concat([tpb_hour['Date'],tpb_hour[' 1440'],dpt_data_1_tpb[' 1440'] ,mix_ratio_2[852.0]],axis=1).dropna(axis=0, how='any')
     A_w.columns=['Date','T','Td','Mr']
     tpb_hour_wet_2=pd.DataFrame([aoslib.awips.thetawa(np.round(A_w['T'][ii],1),np.round(A_w['Td'][ii],1) ,850,A_w['Mr'][ii]) for ii in range(0,A_w.shape[0])])
     tpb_hour_wet_2.index=A_w.Date    
     tpb_hour_wet_2.insert(0,'Date',A_w.Date)

     

     #mix_ratio_1=metpy.calc.mixing_ratio_from_relative_humidity(np.array(rhp_hour.iloc[:,1:])/100.,np.array(tpc_hour.iloc[:,1:])*units('K'),h_to_pss.as_matrix()*units.hectopascal)


     ###### Fog Stability Index ##############################
     h_to_ps=(list(np.round(height_to_pressure_std(np.array(rhp_hour.columns[1:].astype(int))*units('meter')).m)))
     h_to_ps.insert(0,'Date')
     dpt_data_2=dpt_data_1 ; dpt_data_2.columns=h_to_ps

     A=pd.concat([dpt_data_2['Date'],met_hour['Temperature'],tpc_hour['0'],dpt_data_2[1013.0],tpc_hour[' 10'],met_hour['WS']],axis=1)
     A.columns=['Date','met_tmp','TPC_0','Dew_0','tpc_10','met_ws']
     
     fsi_index=(4*(A['Temperature']))-2*((A['Temperature']) + (A[1013.0]))+ (A['WS']*1.94384)

     fsi_index_1=pd.concat([dpt_data_2['Date'],vis_hour['Visibility(m)'],fsi_index],axis=1)

     fsi_hig=(fsi_index_1.iloc[np.where(fsi_index_1['Visibility(m)'] >5000)])                              #.between_time('22:00','06:00') ; 
     fsi_mod=(fsi_index_1.iloc[np.where((fsi_index_1['Visibility(m)'] >1000)&(fsi_index_1['Visibility(m)'] <5000) )]) #.between_time('22:00','06:00') ;  
     fsi_fog=fsi_index_1.iloc[np.where(fsi_index_1['Visibility(m)'] <=1000)]

     [[fsi_fog[0].min(), fsi_fog[0].max()],[fsi_mod[0].min(), fsi_mod[0].max()],[fsi_hig[0].min(), fsi_hig[0].max()]]

     fog_point = (0.044 * A['met_tmp']) + (0.844 * A['Dew_0']) - 0.55 
     fog_threat= tpb_hour_wet_1[852.0]-fog_point


###########################################################################################################################
     h_to_ps=(list(np.round(height_to_pressure_std(np.array(rhp_hour.columns[1:].astype(int))*units('meter')).m)))
     h_to_ps.insert(0,'Date')
     
     dpt_data_2_tpb=dpt_data_1_tpb ; dpt_data_2_tpb.columns=h_to_ps
     



##############################  TPB
     A=pd.concat([dpt_data_2_tpb['Date'],met_hour['Temperature'],tpb_hour['0'],tpb_hour[' 1000'],dpt_data_2_tpb[1013.0],dpt_data_2_tpb[852.0],met_hour['WS'],\
                   met_hour['RH'],vis_hour['Ex.Coeff'],cbh_hour['cbh']],axis=1)

     fsi_index_tpb_1=np.round((4*(A['Temperature']))-2*((A[' 1000']) + (A[1013.0]))+ (A['WS']*1.94384)+4*(A['cbh']/1000))
     #######################

#     A=pd.concat([dpt_data_2_tpb['Date'],met_hour['Temperature'],tpb_hour['0'],tpb_hour[' 460'],dpt_data_2_tpb[1013.0],dpt_data_2_tpb[959.0],met_hour['WS'],met_hour['RH']],axis=1)
#
#     fsi_index_tpb_2=(A['0']-A[' 460']) + (A[1013.0]-A[959.0]) + (A['WS']*1.94384) #+A['RH']
#
#
#     fsi_index_tpb_3=(A['0']-A[1013.0]) + (A[' 460']-A[959.0]) + (A['WS']*1.94384) #+A['RH']
#
#     #fsi_index_tpb=(A['0']-A[1013.0]) + (A['0']-A[' 1440']) + (A['WS']*1.94384) #+A['RH']


     fsi_index_1=pd.concat([dpt_data_2_tpb['Date'],vis_hour['Visibility(m)'],fsi_index_tpb_1,met_hour['RH'],met_hour['WS']],axis=1)
     fsi_index_1.columns=['Date','Visibility(m)','fsi','RH','WS']
     fsi_index_1=fsi_index_1 #.between_time('23:00','06:00') 
     fsi_hig=(fsi_index_1.iloc[np.where(fsi_index_1['Visibility(m)'] >3000)])                              #.between_time('22:00','06:00') ; 
     fsi_mod=(fsi_index_1.iloc[np.where((fsi_index_1['Visibility(m)'] >1000)&(fsi_index_1['Visibility(m)'] <=3000) )]) #.between_time('22:00','06:00') ;  
     fsi_fog=fsi_index_1.iloc[np.where(fsi_index_1['Visibility(m)'] <=1000)]

     [[fsi_fog['fsi'].min(), fsi_fog['fsi'].max()],[fsi_mod['fsi'].min(), fsi_mod['fsi'].max()],[fsi_hig['fsi'].min(), fsi_hig['fsi'].max()]]


#################### TPC 

     A=pd.concat([dpt_data_1['Date'],met_hour['Temperature'],tpc_hour['0'],tpc_hour[' 1000'],dpt_data_1[1013.0],dpt_data_1[852.0],met_hour['WS'],\
                                       met_hour['RH'],vis_hour['Ex.Coeff'],cbh_hour['cbh']],axis=1)

     fsi_index_tpb_1=np.round((4*(A['Temperature']))-2*((A[' 1000']) + (A[1013.0]))+ (A['WS']*1.94384)+(A['cbh']/1000))
     #######################

#     A=pd.concat([dpt_data_2_tpb['Date'],met_hour['Temperature'],tpb_hour['0'],tpb_hour[' 460'],dpt_data_2_tpb[1013.0],dpt_data_2_tpb[959.0],met_hour['WS'],met_hour['RH']],axis=1)
#
#     fsi_index_tpb_2=(A['0']-A[' 460']) + (A[1013.0]-A[959.0]) + (A['WS']*1.94384) #+A['RH']
#
#
#     fsi_index_tpb_3=(A['0']-A[1013.0]) + (A[' 460']-A[959.0]) + (A['WS']*1.94384) #+A['RH']
#
#     #fsi_index_tpb=(A['0']-A[1013.0]) + (A['0']-A[' 1440']) + (A['WS']*1.94384) #+A['RH']


     fsi_index_1=pd.concat([dpt_data_2['Date'],vis_hour['Visibility(m)'],fsi_index_tpb_1,met_hour['RH'],met_hour['WS']],axis=1)
     fsi_index_1.columns=['Date','Visibility(m)','fsi','RH','WS']
     fsi_index_1=fsi_index_1 #.between_time('23:00','06:00') 
     fsi_hig=(fsi_index_1.iloc[np.where(fsi_index_1['Visibility(m)'] >3000)])                              #.between_time('22:00','06:00') ; 
     fsi_mod=(fsi_index_1.iloc[np.where((fsi_index_1['Visibility(m)'] >1000)&(fsi_index_1['Visibility(m)'] <=3000) )]) #.between_time('22:00','06:00') ;  
     fsi_fog=fsi_index_1.iloc[np.where(fsi_index_1['Visibility(m)'] <=1000)]

     [[fsi_fog['fsi'].min(), fsi_fog['fsi'].max()],[fsi_mod['fsi'].min(), fsi_mod['fsi'].max()],[fsi_hig['fsi'].min(), fsi_hig['fsi'].max()]]

     
#############################################################################################################################################################
     parm='DPT' ; _date='201803'

     A1=pd.concat([vis_data_2['Date'],vis_data_2['Visibility(m)'], met_data_2['RH'],met_data_2['WS']], axis=1)
     A_hour=A1.iloc[:,:].resample('30Min').mean()  ; A_hour.insert(0,'Date', A_hour.index) ; 
     B_hour=pd.concat([A_hour,dpt_data_1_tpb.iloc[:,1:]],axis=1)

     for indx in np.unique([x.strftime('%Y%m%d') for x in B_hour.index.date]) : 
         B1_hour=B_hour.loc[indx]     

#################         contour plots
         plot_contour_dailydata(parm,B1_hour,indx)    

         fog_st_time=(B1_hour.iloc[np.where((B1_hour['Visibility(m)'] <=1300) & (B1_hour['RH'] >=88) & (B1_hour['WS'] <=3.0))]).between_time('00:00','10:00') 
         
         if not fog_st_time.empty :
             pre_fg_st_time=(dt.datetime.strptime(fog_st_time.index.to_datetime()[0].strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S')-dt.timedelta(hours=3)).strftime('%Y-%m-%d %H:%M:%S')
             pre_fg_ed_time=fog_st_time.index.to_datetime()[0].strftime('%Y-%m-%d %H:%M:%S')    
             pre_fg_data=(B_hour.loc[pre_fg_st_time:pre_fg_ed_time]).dropna(axis=0, how='any')
             if not pre_fg_data.shape[0] <=1 : 
                 plot_contour_prefog(parm,pre_fg_data,indx,'pre_fog')

             fg_stt_time=fog_st_time.index.to_datetime()[0].strftime('%Y-%m-%d %H:%M:%S')
             fg_ed_time=fog_st_time.index.to_datetime()[-1].strftime('%Y-%m-%d %H:%M:%S')    
             fg_data=(B1_hour.loc[fg_stt_time:fg_ed_time]).dropna(axis=0, how='any')
             if not fg_data.shape[0] <=1 :            
                 plot_contour_prefog(parm,fg_data,indx,'fog')
             
             
             post_fg_stt_time=(dt.datetime.strptime(fog_st_time.index.to_datetime()[-1].strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S')+dt.timedelta(minutes=1)).strftime('%Y-%m-%d %H:%M:%S')            
             post_fg_ed_time=(dt.datetime.strptime(fog_st_time.index.to_datetime()[-1].strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S')+dt.timedelta(hours=3)).strftime('%Y-%m-%d %H:%M:%S')
             post_fog_data=(B1_hour.loc[post_fg_stt_time:post_fg_ed_time]).dropna(axis=0, how='any')
             if not post_fog_data.empty :            
                 plot_contour_prefog(parm,post_fog_data,indx,'post_fog')             

#############################################################################################



     B_data_hig=(B_hour.iloc[np.where(B_hour['Visibility(m)'] >1000)])                              #.between_time('22:00','06:00') ; 
     #B_data_low=(A.iloc[np.where((A['Visibility(m)'] >1000)&(A['Visibility(m)'] <5000) )]) #.between_time('22:00','06:00') ;  
     B_data_fog=B_hour.iloc[np.where(B_hour['Visibility(m)'] <=1000)]

     B_data_fog_1=(B_data_fog.iloc[np.where((B_data_fog['RH'] >88) & (B_data_fog['WS'] <3.0))]) #.between_time('22:00','06:00')
     B_hour_fog=B_data_fog_1.iloc[:,:].resample('30Min').mean()  ; B_hour_fog.insert(0,'Date', B_hour_fog.index) ; 
     B1_hour_fog=B_hour_fog.dropna(axis=0, how='any')
    
     plot_contour_data(parm,B1_hour_fog,'fog',5)

     B_hour_hig=B_data_hig.iloc[:,:].resample('30Min').mean() ; B_hour_hig.insert(0,'Date', B_hour_hig.index) ;
     B1_hour_hig=B_hour_hig.dropna(axis=0, how='any')  
     
     plot_contour_data(parm,B_hour_hig,'high',30)

#     B_hour_low=B_data_low.iloc[:,:].resample('30Min').mean() ; B_hour_low.insert(0,'Date', B_hour_low.index) ;
#     B1_hour_low=B_hour_low.dropna(axis=0, how='any')  
#     
#     plot_contour_data(parm,B1_hour_low,'low')
#     plot_contour_data_1km(parm,B1_hour_low,'low')

###########################################################################
     parm='DPT' ; _date='201803'
     fsi_index_1=fsi_index_1.dropna(axis=0,how='any') 
     for indx in np.unique([x.strftime('%Y%m%d') for x in fsi_index_1.index.date]) : 
         B1_hour=fsi_index_1.loc[indx]  
         if not B1_hour.empty :
             plot_fsi(parm,B1_hour,indx)

         fog_st_time=(B1_hour.iloc[np.where((B1_hour['Visibility(m)'] <=1000) & (B1_hour['RH'] >=88) & (B1_hour['WS'] <=3.0))]).between_time('00:00','10:00') 

         if not fog_st_time.empty :
             pre_fg_st_time=(dt.datetime.strptime(fog_st_time.index.to_datetime()[0].strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S')-dt.timedelta(hours=3)).strftime('%Y-%m-%d %H:%M:%S')
             pre_fg_ed_time=fog_st_time.index.to_datetime()[0].strftime('%Y-%m-%d %H:%M:%S')    
             pre_fg_data=(fsi_index_1.loc[pre_fg_st_time:pre_fg_ed_time]).dropna(axis=0, how='any')

             if not pre_fg_data.shape[0] <=1 : 
                 plot_fsi_catg(parm,pre_fg_data,indx,'pre_fog')

             fg_stt_time=fog_st_time.index.to_datetime()[0].strftime('%Y-%m-%d %H:%M:%S')
             fg_ed_time=fog_st_time.index.to_datetime()[-1].strftime('%Y-%m-%d %H:%M:%S')    
             fg_data=(fsi_index_1.loc[fg_stt_time:fg_ed_time]).dropna(axis=0, how='any')
             if not fg_data.shape[0] <=1 :            
                 plot_fsi_catg(parm,fg_data,indx,'fog')
             
             
             post_fg_stt_time=(dt.datetime.strptime(fog_st_time.index.to_datetime()[-1].strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S')+dt.timedelta(minutes=1)).strftime('%Y-%m-%d %H:%M:%S')            
             post_fg_ed_time=(dt.datetime.strptime(fog_st_time.index.to_datetime()[-1].strftime('%Y-%m-%d %H:%M:%S'),'%Y-%m-%d %H:%M:%S')+dt.timedelta(hours=3)).strftime('%Y-%m-%d %H:%M:%S')
             post_fog_data=(fsi_index_1.loc[post_fg_stt_time:post_fg_ed_time]).dropna(axis=0, how='any')
             if not post_fog_data.empty :            
                 plot_fsi_catg(parm,post_fog_data,indx,'post_fog')             
Beispiel #35
0
    T = (InFile[TempVname][...] * units('kelvin')).to(
        units('degC'))  # input temp is K, convert to deg C
    RH = (InFile[RhVname][...] / 100.0 * units('radian')).to(
        '')  # convert to dimensionless, use
    # radian initially since it's a
    # dimensionless quantity
    Z = InFile[Zname][...] * units('meter')
    Nz = len(Z)

    InFile.close()

    # Convert the heights to pressure values
    P = metc.height_to_pressure_std(Z)  # P comes back in millibars

    # Calculate the dew point temperature
    Td = metc.dewpoint_rh(T, RH)  # Td comes back in deg C

    # Make the plot
    Ptitle = "{0:s} (averaged over final 20 days)".format(LabelScheme[Sim])
    Fig = plt.figure(figsize=(9, 9))

    skewt = metp.SkewT(Fig, rotation=30)

    skewt.plot(P, T, 'r')
    skewt.plot(P, Td, 'g')
    skewt.ax.set_xlim(-80, 30)
    skewt.ax.set_ylim(1000, 50)
    skewt.ax.title.set_text(Ptitle)

    skewt.plot_dry_adiabats()
    skewt.plot_moist_adiabats()
Beispiel #36
0
def test_percent_dewpoint_rh():
    """Test dewpoint_rh with rh in percent."""
    td = dewpoint_rh(10.6 * units.degC, 37 * units.percent)
    assert_almost_equal(td, 26. * units.degF, 0)