示例#1
0
    def __modelProf(self):
        if self.model == "GFS":
            d = BufkitFile('ftp://ftp.meteo.psu.edu/pub/bufkit/' + self.model + '/' + self.runtime[:-1] + '/'
                + self.model.lower() + '3_' + self.loc.lower() + '.buf')
        else:
            d = BufkitFile('ftp://ftp.meteo.psu.edu/pub/bufkit/' + self.model + '/' + self.runtime[:-1] + '/'
                + self.model.lower() + '_' + self.loc.lower() + '.buf')
        self.d = d

        if self.model == "SREF":
            for i in self.prof_idx:
                profs = []
                for j in range(len(d.wdir)):
                    ##print "MAKING PROFILE OBJECT: " + datetime.strftime(d.dates[i], '%Y%m%d/%H%M')
                    if j == 0:
                        profs.append(profile.create_profile(profile='convective', omeg = d.omeg[j][i], hght = d.hght[j][i],
                        tmpc = d.tmpc[j][i], dwpc = d.dwpc[j][i], pres = d.pres[j][i], wspd=d.wspd[j][i], wdir=d.wdir[j][i]))
                        self.progress.emit()
                    else:
                        profs.append(profile.create_profile(profile='default', omeg = d.omeg[j][i], hght = d.hght[j][i],
                        tmpc = d.tmpc[j][i], dwpc = d.dwpc[j][i], pres = d.pres[j][i], wspd=d.wspd[j][i], wdir=d.wdir[j][i]))
                self.profs.append(profs)

        else:
            for i in self.prof_idx:
                ##print "MAKING PROFILE OBJECT: " + datetime.strftime(d.dates[i], '%Y%m%d/%H%M')
                self.profs.append(profile.create_profile(profile='convective', omeg = d.omeg[0][i], hght = d.hght[0][i],
                    tmpc = d.tmpc[0][i], dwpc = d.dwpc[0][i], pres = d.pres[0][i], wspd=d.wspd[0][i], wdir=d.wdir[0][i]))
                self.progress.emit()
示例#2
0
    def __archiveProf(self):
        """
        Get the archive sounding based on the user's selections.
        """
        ## construct the URL
        arch_file = open(self.link, 'r')

        ## read in the file
        data = np.array(arch_file.read().split('\n'))
        ## take care of possible whitespace issues
        for i in range(len(data)):
            data[i] = data[i].strip()
        arch_file.close()

        ## necessary index points
        title_idx = np.where( data == '%TITLE%')[0][0]
        start_idx = np.where( data == '%RAW%' )[0] + 1
        finish_idx = np.where( data == '%END%')[0]

        ## create the plot title
        plot_title = data[title_idx + 1].upper() + ' (User Selected)'

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx : finish_idx][:])
        sound_data = StringIO( full_data )

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )

        ## construct the Profile object
        prof = profile.create_profile( profile='convective', pres=p, hght=h, tmpc=T, dwpc=Td,
                                wdir=wdir, wspd=wspd, location=self.loc)
        return prof, plot_title
示例#3
0
    def _parse(self):
        file_data = self._downloadFile()
        ## read in the file
        data = np.array([l.strip() for l in file_data.split('\n')])

        ## necessary index points
        title_idx = np.where(data == '%TITLE%')[0][0]
        start_idx = np.where(data == '%RAW%')[0] + 1
        finish_idx = np.where(data == '%END%')[0]

        ## create the plot title
        data_header = data[title_idx + 1].split()
        location = data_header[0]
        time = datetime.strptime(data_header[1][:11], '%y%m%d/%H%M')

        if time > datetime.utcnow(
        ):  #If the strptime accidently makes the sounding the future:
            # If the strptime accidently makes the sounding in the future (like with SARS archive)
            # i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
            time = datetime.strptime('19' + data_header[1][:11], '%Y%m%d/%H%M')

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx:finish_idx][:])
        sound_data = StringIO(full_data)

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt(sound_data,
                                                delimiter=',',
                                                comments="%",
                                                unpack=True)
        #       idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        pres = p  #[idx]
        hght = h  #[idx]
        tmpc = T  #[idx]
        dwpc = Td  #[idx]
        wspd = wspd  #[idx]
        wdir = wdir  #[idx]

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw',
                                      pres=pres,
                                      hght=hght,
                                      tmpc=tmpc,
                                      dwpc=dwpc,
                                      wdir=wdir,
                                      wspd=wspd,
                                      location=location,
                                      date=time,
                                      latitude=35.)

        prof_coll = prof_collection.ProfCollection(
            {'': [prof]},
            [time],
        )

        prof_coll.setMeta('loc', location)
        prof_coll.setMeta('observed', True)
        prof_coll.setMeta('base_time', time)
        return prof_coll
示例#4
0
    def __observedProf(self):
        """
        Get the observed sounding based on the user's selections
        """
        ## if the profile is the latest, pull the latest profile
        if self.prof_time == "Latest":
            timestr = self.prof_time.upper()
        ## otherwise, convert the menu string to the URL format
        else:
            timestr = self.prof_time[2:4] + self.prof_time[5:7] + self.prof_time[8:10] + self.prof_time[11:-1]
            timestr += "_OBS"
        ## construct the URL
        url = urllib.urlopen('http://www.spc.noaa.gov/exper/soundings/' + timestr + '/' + self.loc.upper() + '.txt')
        ## read in the file
        data = np.array(url.read().split('\n'))
        ## necessary index points
        title_idx = np.where( data == '%TITLE%')[0][0]
        start_idx = np.where( data == '%RAW%' )[0] + 1
        finish_idx = np.where( data == '%END%')[0]

        ## create the plot title
        plot_title = data[title_idx + 1] + ' (Observed)'

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx : finish_idx][:])
        sound_data = StringIO( full_data )

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )

        ## construct the Profile object
        prof = profile.create_profile( profile='convective', pres=p, hght=h, tmpc=T, dwpc=Td,
                                wdir=wdir, wspd=wspd, location=self.loc)
        return prof, plot_title
示例#5
0
    def _parse(self, file_name):
        file_data = self._downloadFile(file_name)           
        ## read in the file
        data = np.array([l.strip() for l in file_data.split('\n')])

        ## necessary index points
        title_idx = np.where( data == '%TITLE%')[0][0]
        start_idx = np.where( data == '%RAW%' )[0] + 1
        finish_idx = np.where( data == '%END%')[0]

        ## create the plot title
        data_header = data[title_idx + 1].split()
        location = data_header[0]
        time = data_header[1][:11]

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx : finish_idx][:])
        sound_data = StringIO( full_data )

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )
#       idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        pres = p #[idx]
        hght = h #[idx]
        tmpc = T #[idx]
        dwpc = Td #[idx]
        wspd = wspd #[idx]
        wdir = wdir #[idx]

        prof = profile.create_profile(profile='raw', pres=pres, hght=hght, tmpc=tmpc, dwpc=dwpc,
            wdir=wdir, wspd=wspd, location=location)

        return {'':[ prof ]}, [ datetime.strptime(time, '%y%m%d/%H%M') ]
示例#6
0
    def _parseSection(self, section):
        parts = section.split('\n')
        if ' F' in parts[1]:
            valid, fhr = parts[1].split(' F')
            fhr = int(fhr)
        else:
            valid = parts[1]
            fhr = 0
        dt_obj = datetime.strptime(valid, 'TIME = %y%m%d/%H%M')
        member = parts[0].split('=')[-1].strip()
        location = parts[2].split('SLAT')[0].split('=')[-1].strip()
        headers = [ h.lower() for h in parts[4].split(", ") ]
        data = '\n'.join(parts[5:])
    
        if not is_py3():
            sound_data = StringIO(data)
        else:
            sound_data = BytesIO(data.encode())

        prof_vars = np.genfromtxt( sound_data, delimiter=',', unpack=True)
        prof_var_dict = dict(zip(headers, prof_vars))
        def maybe_replace(old_var, new_var):
            if old_var in prof_var_dict:
                prof_var_dict[new_var] = prof_var_dict[old_var]
                del prof_var_dict[old_var]
        maybe_replace('omga', 'omeg')
        maybe_replace('temp', 'tmpc')
        maybe_replace('dewp', 'dwpc')
        maybe_replace('uwin', 'u')
        maybe_replace('vwin', 'v')
        
        prof = profile.create_profile(profile='raw', location=location, date=dt_obj, missing=-999.0, **prof_var_dict)
        
        return prof, dt_obj, dt_obj - timedelta(hours=fhr), member
def make_plot(index):
    #index = (r,c)
    r,c = index
    print(r,c)
    xlat = xlats[r,c]
    xlon = xlons[r,c]

    mean_hgt = np.nanmean(z_agl[:,:,r,c], axis=0)
    mean_t = np.nanmean(t[:,:,r,c], axis=0)
    mean_q = np.nanmean(q[:,:,r,c], axis=0)
    mean_p = np.nanmean(p[:,:,r,c], axis=0)
    mean_td = thermo.temp_at_mixrat(mean_q*1000., mean_p)
    mean_u = np.nanmean(u[:,:,r,c], axis=0)
    mean_v = np.nanmean(v[:,:,r,c], axis=0)    
    mean_omega = np.nanmean(omega[:,:,r,c], axis=0)
    print("start prof")
    prof = profile.create_profile(profile='convective', pres=mean_p, hght=mean_hgt, tmpc=mean_t, \
                    dwpc=mean_td, u=mean_u, v=mean_v, omeg=mean_omega, missing=-9999, strictQC=False, latitude=xlat,date=vdateobj)

    print("end prof")
    member_profs = np.empty(len(z_agl[:,1]), dtype=object)
    print("start member profs")
    # Loop over all of the members and create profile objects for each of them.
    for m in range(len(z_agl[:,1,r,c])): 
        member_profs[m] =  profile.create_profile(profile='convective', pres=p[m,:,r,c], hght=z_agl[m,:,r,c], tmpc=t[m,:,r,c], \
                        dwpc=td[m,:,r,c], u=u[m,:,r,c], v=v[m,:,r,c], missing=-9999, strictQC=False, latitude=xlat,date=vdateobj)
        members = {'hght': z_agl[:,:,r,c], 'pres': p[:,:,r,c], 'tmpc': t[:,:,r,c], 'dwpc': td[:,:,r,c], 'u': u[:,:,r,c], 'v': v[:,:,r,c], 'member_profs': member_profs}

    print("end member profs")

    #find summary file lat/lons
    wherelatlon = np.where((sumlats == xlat)&(sumlons == xlon))
    latwhere = wherelatlon[0][0]
    lonwhere = wherelatlon[1][0]
    cape_ml = cape_ml_0[:,latwhere-3:latwhere+4,lonwhere-3:lonwhere+4]
    srh_0to1 = srh_0to1_0[:,latwhere-3:latwhere+4,lonwhere-3:lonwhere+4]



    figname = os.path.join(outdir,'wofs_snd_{:02d}_{:02d}_f{:03d}.png'.format(r+1,c+1,leadminute))
    # pass the data to the plotting script.
    print('rounded lat/lon',round(xlat,2), round(xlon,2))
    plot_wof(prof, members, figname, str(round(xlat,2)), str(round(xlon,2)), idateobj, vdateobj, x_pts=cape_ml, y_pts=srh_0to1)
示例#8
0
    def _parse(self):
        file_data = self._downloadFile()
        snfile = [l for l in file_data.split('\n')]

        bgn = -1
        end = -1
        ttl = -1
        stl = -1

        for i in range(len(snfile)):
            if snfile[i] == "<PRE>": 
                bgn = i+5
            if snfile[i][:10] == "</PRE><H3>": 
                end = i-1
            if snfile[i][:4] == "<H2>" and snfile[i][-5:] == "</H2>": 
                ttl = i
            if 'Station latitude' in snfile[i]:
                stl = i

        if bgn == -1 or end == -1 or ttl == -1:
            raise IOError("Looks like the server had difficulty handling the request.  Try again in a few minutes.")

        snd_data = []
        for i in range(bgn, end+1):
            vals = []
            for j in [ 0, 1, 2, 3, 6, 7 ]:
                val = snfile[i][(7 * j):(7 * (j + 1))].strip()

                if val == "":
                    vals.append(UWYODecoder.MISSING)
                else:
                    vals.append(float(val))
            snd_data.append(vals)

        col_names = ['pres', 'hght', 'tmpc', 'dwpc', 'wdir', 'wspd']
        snd_dict = dict((v, p) for v, p in zip(col_names, list(zip(*snd_data))))

        snd_date = datetime.strptime(snfile[ttl][-20:-5], "%HZ %d %b %Y")

        loc = snfile[ttl][10:14]
        if stl == -1:
            lat = 35.
        else:
            lat = float(snfile[stl].split(':')[-1].strip())

        prof = profile.create_profile(profile='raw', location=loc, date=snd_date, latitude=lat, missing=UWYODecoder.MISSING, **snd_dict)

        prof_coll = prof_collection.ProfCollection(
            {'':[ prof ]},
            [ snd_date ],
        )

        prof_coll.setMeta('loc', loc)
        prof_coll.setMeta('observed', True)
        return prof_coll
示例#9
0
def getProf(fname):
    dec = spc_decoder.SPCDecoder(fname)
    profs = dec.getProfiles()
    stn_id = dec.getStnId()

    all_profs = profs._profs
    prof = all_profs[''][0]
    dates = profs._dates
    prof = profile.create_profile(pres=prof.pres, hght=prof.hght, tmpc=prof.tmpc, dwpc=prof.dwpc, wspd=prof.wspd, \
                                      wdir=prof.wdir, strictQC=False, profile='convective', date=dates[0])
    return prof
示例#10
0
    def _parseSection(self, section):
        parts = section.split('\n')
        dt_obj = datetime.strptime(parts[1], 'TIME = %y%m%d/%H%M')
        member = parts[0].split('=')[-1].strip()
        location = parts[2].split('SLAT')[0].split('=')[-1].strip()
        data = '\n'.join(parts[5:])
        sound_data = StringIO( data )
        p, h, t, td, wdir, wspd, omeg = np.genfromtxt( sound_data, delimiter=',', unpack=True)

        prof = profile.create_profile(profile='raw', pres=p[1:], hght=h[1:], tmpc=t[1:], dwpc=td[1:], wspd=wspd[1:],\
                                      wdir=wdir[1:], omeg=omeg[1:], location=location, missing=-999.0)
        return prof, dt_obj, member
示例#11
0
    def _parseSection(self, section):
        parts = section.split('\n')
        dt_obj = datetime.strptime(parts[1], 'TIME = %y%m%d/%H%M')
        member = parts[0].split('=')[-1].strip()
        location = parts[2].split('SLAT')[0].split('=')[-1].strip()
        data = '\n'.join(parts[5:])
        sound_data = StringIO(data)
        p, h, t, td, wdir, wspd, omeg = np.genfromtxt(sound_data,
                                                      delimiter=',',
                                                      unpack=True)

        prof = profile.create_profile(profile='raw', pres=p[1:], hght=h[1:], tmpc=t[1:], dwpc=td[1:], wspd=wspd[1:],\
                                      wdir=wdir[1:], omeg=omeg[1:], location=location, date=dt_obj, missing=-999.0)
        return prof, dt_obj, member
示例#12
0
    def _parse(self):
        file_data = self._downloadFile()
        ## read in the file
        data = np.array([l.strip() for l in file_data.split('\n')])

        ## necessary index points
        title_idx = np.where( data == '%TITLE%')[0][0]
        start_idx = np.where( data == '%RAW%' )[0] + 1
        finish_idx = np.where( data == '%END%')[0]

        ## create the plot title
        data_header = data[title_idx + 1].split()
        location = data_header[0]
        time = datetime.strptime(data_header[1][:11], '%y%m%d/%H%M')
        
        if time > datetime.utcnow(): #If the strptime accidently makes the sounding the future:
            # If the strptime accidently makes the sounding in the future (like with SARS archive)
            # i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
            time = datetime.strptime('19' + data_header[1][:11], '%Y%m%d/%H%M')

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx : finish_idx][:])
        sound_data = StringIO( full_data )

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )
#       idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        pres = p #[idx]
        hght = h #[idx]
        tmpc = T #[idx]
        dwpc = Td #[idx]
        wspd = wspd #[idx]
        wdir = wdir #[idx]

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw', pres=pres, hght=hght, tmpc=tmpc, dwpc=dwpc,
            wdir=wdir, wspd=wspd, location=location, date=time, latitude=35.)

        prof_coll = prof_collection.ProfCollection(
            {'':[ prof ]}, 
            [ time ],
        )

        prof_coll.setMeta('loc', location)
        prof_coll.setMeta('observed', True)
        prof_coll.setMeta('base_time', time)
        return prof_coll
示例#13
0
    def _parse(self):

        # time = datetime.now()

        file_data = self._downloadFile()
        modInit, modName, fHour, coords = file_data.split(' ')
        # if len(modInit) == 2:
        #     import time
        #     modInit = time.strftime('%Y%m%d')+modInit

        locationStr = coords.strip('\n')

        textFile = '/home/apache/climate/data/forecast/sndgs/'+modInit+'_'+modName+'_'+fHour+'_'+coords.strip('\n')+'_raw.txt'
        #writeTimes(textFile, 'Begin')

        data_header = 'Location: '

        time = datetime.strptime(modInit, '%Y%m%d%H') + timedelta(hours=int(fHour))

        # Determine if it's a site ID:
        if ',' not in coords:
            import numpy as np
            sites, siteCoords = np.genfromtxt('/home/apache/climate/hanis/model/fsound/text/sid.txt', dtype=str, unpack=True, delimiter=' ')
            i = np.where(sites == 'KORD')
            coords = siteCoords[i[0][0]]

        variables = fsonde_decoder.decode(modInit, modName, fHour, coords)
        #writeTimes(textFile, 'After Decode')

        pres = variables['pres']
        hght = variables['hght']
        tmpc = variables['temp']
        dwpc = variables['dewp']
        u = variables['ugrd']
        v = variables['vgrd']
        omeg = variables['omeg']
        
        wdir, wspd = utils.comp2vec(u, v)
        # wspd = [s*1.94384 for s in wspd]

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw', pres=pres, hght=hght, tmpc=tmpc, dwpc=dwpc, wdir=wdir, wspd=wspd, omeg=omeg, location=locationStr, date=time, latitude=35.)
        prof_coll = prof_collection.ProfCollection({'':[ prof ]},[ time ],)
        prof_coll.setMeta('loc', locationStr)
        #writeTimes(textFile, 'End')
        return prof_coll
示例#14
0
def test_url():
    path = 'examples/data/14061619.OAX'
    # Sys.argv[1] should be the URL to the file that is being tested.
    profs, stn_id = decode(path)
    print((profs._profs))

    for k in profs._profs.keys():
        all_prof = profs._profs[k]
        dates = profs._dates
        for i in range(len(all_prof)):
            prof = all_prof[i]
            new_prof = profile.create_profile(pres=prof.pres, hght=prof.hght, tmpc=prof.tmpc, dwpc=prof.dwpc, wspd=prof.wspd, \
                                              wdir=prof.wdir, strictQC=False, profile='convective', date=dates[i])
            #for key in dir(new_prof):
            #    print((key, getattr(new_prof,key)))

    print(new_prof.mupcl.bplus)
示例#15
0
def decode(filename):

    dec = SPCDecoder(filename)

    if dec is None:
        raise IOError("Could not figure out the format of '%s'!" % filename)

    # Returns the set of profiles from the file that are from the "Profile" class.
    profs = dec.getProfiles()
    stn_id = dec.getStnId()

    for k in list(profs._profs.keys()):
        all_prof = profs._profs[k]
        dates = profs._dates
        for i in range(len(all_prof)):
            prof = all_prof[i]
            new_prof = profile.create_profile(pres=prof.pres, hght=prof.hght, tmpc=prof.tmpc, dwpc=prof.dwpc, wspd=prof.wspd, \
                                              wdir=prof.wdir, strictQC=False, profile='convective', date=dates[i])
            return new_prof, dates[i], stn_id
示例#16
0
 def make_profile(self, i):
     d = self.d
     prof = profile.create_profile(profile='convective', hght = d.hght[0][i],
             tmpc = d.tmpc[0][i], dwpc = d.dwpc[0][i], pres = d.pres[0][i],
             wspd=d.wspd[0][i], wdir=d.wdir[0][i])
     return prof
示例#17
0
p_std = data_std[:, 0]
T = data_all[:, 1]
Td = data_all[:, 2]
RH = data_all[:, 3]
#Td = 243.04*(np.log(RH/100)+((17.625*T)/(243.04+T)))/(17.625-np.log(RH/100)-((17.625*T)/(243.04+T)))
h = data_all[:, 4]
h = [i - np.min(h) for i in h]  #reduce to ground level
spd = data_all[:, 5]
spd_std = data_std[:, 5]
direc = data_all[:, 6]
direc_std = data_std[:, 6]

prof = profile.create_profile(profile='default',
                              pres=p,
                              hght=h,
                              tmpc=T,
                              dwpc=Td,
                              wspd=spd,
                              wdir=direc,
                              strictQC=False)

#interpolate pressure to important height levels
h_new = [1000] + [3000, 6000, 9000, 12000, 15000]
for i in range(len(h_new)):
    if np.max(h) > h_new[i]:
        index = i
h_new_labels = ['1 km', '3 km', '6 km', '9 km', '12 km', '15 km']
h_new_labels = h_new_labels[0:index + 1]
p_interped_func = interpolate.interp1d(h, p)
p_interped = p_interped_func(h_new[0:index + 1])

# Add units to the data arrays
示例#18
0
    if fhr % args.interval != 0:
        print "fhr not multiple of", args.interval, "skipping", sfile
        continue
    if len(station) > 3 and station not in no_ignore_station:
        print "skipping", sfile
        continue
    skew.ax.set_title(title, horizontalalignment="left", x=0, fontsize=12) 
    print "reading", sfile
    data = open(sfile).read()
    pres, hght, tmpc, dwpc, wdir, wspd, latitude, longitude = parseGEMPAK(data)

    if wdir.size == 0:
        print "no good data lines. empty profile"
        continue

    prof = profile.create_profile(profile='default', pres=pres, hght=hght, tmpc=tmpc, 
                                    dwpc=dwpc, wspd=wspd, wdir=wdir, latitude=latitude, longitude=longitude, missing=-999., strictQC=True)

    #### Adding a Parcel Trace
    sfcpcl = params.parcelx( prof, flag=1 ) # Surface Parcel
    #fcstpcl = params.parcelx( prof, flag=2 ) # Forecast Parcel
    mupcl = params.parcelx( prof, flag=3 ) # Most-Unstable Parcel
    mlpcl = params.parcelx( prof, flag=4 ) # 100 mb Mean Layer Parcel
    # Set the parcel trace to be plotted as the Most-Unstable parcel.
    pcl = mupcl

    # Temperature, dewpoint, virtual temperature, wetbulb, parcel profiles
    temperature_trace, = skew.plot(prof.pres, prof.tmpc, 'r', linewidth=2) # temperature profile 
    # annotate temperature in F at bottom of T profile
    temperatureF = skew.ax.text(prof.tmpc[0], prof.pres[0]+10, utils.INT2STR(thermo.ctof(prof.tmpc[0])), 
            verticalalignment='top', horizontalalignment='center', size=7, color=temperature_trace.get_color())
    skew.plot(prof.pres, prof.vtmp, 'r', linewidth=0.5)                    # Virtual temperature profile
示例#19
0
hghts = ds.z.values / G
uwnd = ds.u.values
vwnd = ds.v.values
wdirs = wind_direction(uwnd, vwnd)
wspds = wind_speed(uwnd, vwnd)

rh = ds.r.values / 100.
tmpc = ds.t.values
dwpc = dewpoint_from_rh(tmpc, rh)
tmpc = tmpc - ZEROCNK

t = 0
for j in range(tmpc.shape[2]):
    for i in range(tmpc.shape[3]):
        prof = SHARPPY_PROFILE.create_profile(pres=pres,
                                              tmpc=tmpc[t, :, j, i],
                                              hght=hghts[t, :, j, i],
                                              dwpc=dwpc[t, :, j, i],
                                              wspd=wspds[t, :, j, i],
                                              wdir=wdirs[t, :, j, i])
        eff_inflow = params.effective_inflow_layer(prof)
        ebot_hght = interp.to_agl(prof, interp.hght(prof, eff_inflow[0]))
        etop_hght = interp.to_agl(prof, interp.hght(prof, eff_inflow[1]))

        srwind = params.bunkers_storm_motion(prof)
        effective_srh = winds.helicity(prof,
                                       ebot_hght,
                                       etop_hght,
                                       stu=srwind[0],
                                       stv=srwind[1])
示例#20
0
    print('... reading file:  ' + filename)
    url = open(filename)
    data = np.array(url.read().split('\n'))
    title_idx = np.where(data == '%TITLE%')[0][0]
    start_idx = np.where(data == '%RAW%')[0]
    finish_idx = np.where(data == '%END%')[0]
    plot_title = data[title_idx + 1]
    full_data = '\n'.join(data[start_idx:finish_idx][:])
    sound_data = StringIO(full_data)
    P, h, T, Td, wdir, wspd = np.genfromtxt(sound_data,
                                            delimiter=',',
                                            comments="%",
                                            unpack=True)
    prof = profile.create_profile(pres=P,
                                  hght=h,
                                  tmpc=T,
                                  dwpc=Td,
                                  wdir=wdir,
                                  wspd=wspd)
    pcl = parcelx(prof)
    sfcpcl = tab.params.parcelx(prof, flag=1)  #Surface Parcel
    fcstpcl = tab.params.parcelx(prof, flag=2)  #Forecast Parcel
    mupcl = tab.params.parcelx(prof, flag=3)  #Most-Unstable Parcel
    mlpcl = tab.params.parcelx(prof, flag=4)  #100 mb Mean Layer Parcel

    csvfile = open(('Lift_Parcel_' + filename[:-4] + '.txt'), 'wb')
    writer = csv.writer(csvfile, delimiter=' ')
    writer.writerow([plot_title])
    writer.writerow('\r')
    loop_idx = range(1)
    for idx in loop_idx:
        a = [('Surface CAPE = ' + str(sfcpcl.bplus))]  #J/Kg
示例#21
0
               sys.argv[2])
    data = np.array(url.read().split('\n'))
    title_idx = np.where(data == '%TITLE%')[0][0]
    start_idx = np.where(data == '%RAW%')[0] + 1
    finish_idx = np.where(data == '%END%')[0]
    plot_title = data[title_idx + 1] + ' (Observed)'
    full_data = '\n'.join(data[start_idx:finish_idx][:])
    sound_data = StringIO(full_data)
    p, h, T, Td, wdir, wspd = np.genfromtxt(sound_data,
                                            delimiter=',',
                                            comments="%",
                                            unpack=True)
    prof = profile.create_profile(profile='convective',
                                  pres=p,
                                  hght=h,
                                  tmpc=T,
                                  dwpc=Td,
                                  wdir=wdir,
                                  wspd=wspd,
                                  location=sys.argv[1])

elif len(sys.argv) > 1 and sys.argv[1] != "test":
    gmtime = datetime.datetime.utcnow()
    t_str = str(gmtime)
    year = t_str[2:4]
    month = t_str[5:7]
    day = t_str[8:10]
    hour = t_str[11:13]
    if int(hour) > 12:
        current_ob = '12'
    else:
        current_ob = '00'
示例#22
0
    def _parseMember(self, text):
        data = np.array(text.split('\r\n'))
        data_idxs = []
        new_record = False
        begin_idx = 0
        member_name = data[0]
        dates = []
        # Figure out the indices for the data chunks
        for i in range(len(data)):
            if "STID" in data[i]:
                # Here is information about the record
                spl = data[i].split()
                station = spl[2]
                wmo_id = spl[5]
                dates.append(datetime.strptime(spl[8], '%y%m%d/%H%M'))
                slat = float(data[i+1].split()[2])
                slon = float(data[i+1].split()[5])
                selv = float(data[i+1].split()[8])
                stim = float(data[i+2].split()[2])

            if data[i].find('HGHT') >= 0 and new_record == False:
                # we've found a new data chunk
                new_record = True
                begin_idx = i+1
            elif 'STID' in data[i] and new_record == True:
                # We've found the end of the data chunk
                new_record = False
                data_idxs.append((begin_idx, i-1))
            elif 'STN' in data[i] and new_record == True:
                # We've found the end of the last data chunk of the file
                new_record = False
                data_idxs.append((begin_idx, i))
            elif new_record == True:
                continue
                    ##print data[i]
        
        data_idxs = data_idxs[1:]
        # Make arrays to store the data
        profiles = []        

        # Parse out the profiles
        for i in range(len(data_idxs)):
            data_stuff = data[data_idxs[i][0]: data_idxs[i][1]]
            profile_length = len(data[data_idxs[i][0]: data_idxs[i][1]])/2

            hght = np.zeros((profile_length,), dtype=float)
            pres = np.zeros((profile_length,), dtype=float)
            tmpc = np.zeros((profile_length,), dtype=float)
            dwpc = np.zeros((profile_length,), dtype=float)
            wdir = np.zeros((profile_length,), dtype=float)
            wspd = np.zeros((profile_length,), dtype=float)
            omeg = np.zeros((profile_length,), dtype=float)

            for j in np.arange(0, profile_length * 2, 2):
                if len(data_stuff[j+1].split()) == 1:
                    hght[j / 2] = float(data_stuff[j+1].split()[0])
                else:
                    hght[j / 2] = float(data_stuff[j+1].split()[1])
                tmpc[j / 2] = float(data_stuff[j].split()[1])
                dwpc[j / 2] = float(data_stuff[j].split()[3])
                pres[j / 2] = float(data_stuff[j].split()[0])
                wspd[j / 2] = float(data_stuff[j].split()[6])
                wdir[j / 2] = float(data_stuff[j].split()[5])
                omeg[j / 2] = float(data_stuff[j].split()[7])

            prof = profile.create_profile(profile='raw', pres=pres, hght=hght, tmpc=tmpc, dwpc=dwpc, 
                wdir=wdir, wspd=wspd, omeg=omeg, location=station, date=dates[i], latitude=slat)

            profiles.append(prof)

        return member_name, profiles, dates
示例#23
0
def do_sharppy(spc_file):
    """
    Based on the tutorial which can be found here: http://nbviewer.ipython.org/github/sharppy/SHARPpy/blob/master/tutorials/SHARPpy_basics.ipynb
    SHARPpy can be found here: https://github.com/sharppy/SHARPpy
    Credit goes to:
    Patrick Marsh (SPC)
    Kelton Halbert (OU School of Meteorology)
    Greg Blumberg (OU/CIMMS)
    Tim Supinie (OU School of Meteorology)
    
    """
    import sharppy
    import sharppy.sharptab.profile as profile
    import sharppy.sharptab.interp as interp
    import sharppy.sharptab.winds as winds
    import sharppy.sharptab.utils as utils
    import sharppy.sharptab.params as params
    import sharppy.sharptab.thermo as thermo
    import matplotlib.pyplot as plt
    from StringIO import StringIO
    from matplotlib.axes import Axes
    import matplotlib.transforms as transforms
    import matplotlib.axis as maxis
    import matplotlib.spines as mspines
    import matplotlib.path as mpath
    from matplotlib.projections import register_projection

    spc_file = open('skewt_data', 'r').read()

    def parseSPC(spc_file):
        ## read in the file
        data = np.array([l.strip() for l in spc_file.split('\n')])

        ## necessary index points
        title_idx = np.where(data == '%TITLE%')[0][0]
        start_idx = np.where(data == '%RAW%')[0] + 1
        finish_idx = np.where(data == '%END%')[0]

        ## create the plot title
        data_header = data[title_idx + 1].split()
        location = data_header[0] + ' ' + data_header[1]
        time = data_header[2]
        title = location + ' ' + time
        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx:finish_idx][:])
        sound_data = StringIO(full_data)

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt(sound_data,
                                                delimiter=',',
                                                comments="%",
                                                unpack=True)

        return p, h, T, Td, wdir, wspd, title

    pres, hght, tmpc, dwpc, wdir, wspd, title = parseSPC(spc_file)
    prof = profile.create_profile(profile='default', pres=pres, hght=hght, tmpc=tmpc, \
    dwpc=dwpc, wspd=wspd, wdir=wdir, missing=-9999, strictQC=True)

    sfcpcl = params.parcelx(prof, flag=1)  # Surface Parcel
    fcstpcl = params.parcelx(prof, flag=2)  # Forecast Parcel
    mupcl = params.parcelx(prof, flag=3)  # Most-Unstable Parcel
    mlpcl = params.parcelx(prof, flag=4)  # 100 mb Mean Layer Parcel

    msl_hght = prof.hght[prof.sfc]  # Grab the surface height value
    print "SURFACE HEIGHT (m MSL):", msl_hght
    agl_hght = interp.to_agl(prof, msl_hght)  # Converts to AGL
    print "SURFACE HEIGHT (m AGL):", agl_hght
    msl_hght = interp.to_msl(prof, agl_hght)  # Converts to MSL
    print "SURFACE HEIGHT (m MSL):", msl_hght
    print "Most-Unstable CAPE:", mupcl.bplus  # J/kg
    print "Most-Unstable CIN:", mupcl.bminus  # J/kg
    print "Most-Unstable LCL:", mupcl.lclhght  # meters AGL
    print "Most-Unstable LFC:", mupcl.lfchght  # meters AGL
    print "Most-Unstable EL:", mupcl.elhght  # meters AGL
    print "Most-Unstable LI:", mupcl.li5  # C

    class SkewXTick(maxis.XTick):
        def draw(self, renderer):
            if not self.get_visible(): return
            renderer.open_group(self.__name__)

            lower_interval = self.axes.xaxis.lower_interval
            upper_interval = self.axes.xaxis.upper_interval

            if self.gridOn and transforms.interval_contains(
                    self.axes.xaxis.get_view_interval(), self.get_loc()):
                self.gridline.draw(renderer)

            if transforms.interval_contains(lower_interval, self.get_loc()):
                if self.tick1On:
                    self.tick1line.draw(renderer)
                if self.label1On:
                    self.label1.draw(renderer)

            if transforms.interval_contains(upper_interval, self.get_loc()):
                if self.tick2On:
                    self.tick2line.draw(renderer)
                if self.label2On:
                    self.label2.draw(renderer)

            renderer.close_group(self.__name__)

    # This class exists to provide two separate sets of intervals to the tick,
    # as well as create instances of the custom tick
    class SkewXAxis(maxis.XAxis):
        def __init__(self, *args, **kwargs):
            maxis.XAxis.__init__(self, *args, **kwargs)
            self.upper_interval = 0.0, 1.0

        def _get_tick(self, major):
            return SkewXTick(self.axes, 0, '', major=major)

        @property
        def lower_interval(self):
            return self.axes.viewLim.intervalx

        def get_view_interval(self):
            return self.upper_interval[0], self.axes.viewLim.intervalx[1]

    # This class exists to calculate the separate data range of the
    # upper X-axis and draw the spine there. It also provides this range
    # to the X-axis artist for ticking and gridlines
    class SkewSpine(mspines.Spine):
        def _adjust_location(self):
            trans = self.axes.transDataToAxes.inverted()
            if self.spine_type == 'top':
                yloc = 1.0
            else:
                yloc = 0.0
            left = trans.transform_point((0.0, yloc))[0]
            right = trans.transform_point((1.0, yloc))[0]

            pts = self._path.vertices
            pts[0, 0] = left
            pts[1, 0] = right
            self.axis.upper_interval = (left, right)

    # This class handles registration of the skew-xaxes as a projection as well
    # as setting up the appropriate transformations. It also overrides standard
    # spines and axes instances as appropriate.
    class SkewXAxes(Axes):
        # The projection must specify a name.  This will be used be the
        # user to select the projection, i.e. ``subplot(111,
        # projection='skewx')``.
        name = 'skewx'

        def _init_axis(self):
            #Taken from Axes and modified to use our modified X-axis
            self.xaxis = SkewXAxis(self)
            self.spines['top'].register_axis(self.xaxis)
            self.spines['bottom'].register_axis(self.xaxis)
            self.yaxis = maxis.YAxis(self)
            self.spines['left'].register_axis(self.yaxis)
            self.spines['right'].register_axis(self.yaxis)

        def _gen_axes_spines(self):
            spines = {
                'top': SkewSpine.linear_spine(self, 'top'),
                'bottom': mspines.Spine.linear_spine(self, 'bottom'),
                'left': mspines.Spine.linear_spine(self, 'left'),
                'right': mspines.Spine.linear_spine(self, 'right')
            }
            return spines

        def _set_lim_and_transforms(self):
            """
            This is called once when the plot is created to set up all the
            transforms for the data, text and grids.
            """
            rot = 30

            #Get the standard transform setup from the Axes base class
            Axes._set_lim_and_transforms(self)

            # Need to put the skew in the middle, after the scale and limits,
            # but before the transAxes. This way, the skew is done in Axes
            # coordinates thus performing the transform around the proper origin
            # We keep the pre-transAxes transform around for other users, like the
            # spines for finding bounds
            self.transDataToAxes = self.transScale + (
                self.transLimits + transforms.Affine2D().skew_deg(rot, 0))

            # Create the full transform from Data to Pixels
            self.transData = self.transDataToAxes + self.transAxes

            # Blended transforms like this need to have the skewing applied using
            # both axes, in axes coords like before.
            self._xaxis_transform = (
                transforms.blended_transform_factory(
                    self.transScale + self.transLimits,
                    transforms.IdentityTransform()) +
                transforms.Affine2D().skew_deg(rot, 0)) + self.transAxes

    # Now register the projection with matplotlib so the user can select
    # it.
    register_projection(SkewXAxes)

    pcl = mupcl
    # Create a new figure. The dimensions here give a good aspect ratio
    fig = plt.figure(figsize=(6.5875, 6.2125))
    ax = fig.add_subplot(111, projection='skewx')
    ax.grid(True)

    pmax = 1000
    pmin = 10
    dp = -10
    presvals = np.arange(int(pmax), int(pmin) + dp, dp)

    # plot the moist-adiabats
    for t in np.arange(-10, 45, 5):
        tw = []
        for p in presvals:
            tw.append(thermo.wetlift(1000., t, p))
        ax.semilogy(tw, presvals, 'k-', alpha=.2)

    def thetas(theta, presvals):
        return ((theta + thermo.ZEROCNK) / (np.power(
            (1000. / presvals), thermo.ROCP))) - thermo.ZEROCNK

    # plot the dry adiabats
    for t in np.arange(-50, 110, 10):
        ax.semilogy(thetas(t, presvals), presvals, 'r-', alpha=.2)

    plt.title(title, fontsize=14, loc='left')
    # Plot the data using normal plotting functions, in this case using
    # log scaling in Y, as dicatated by the typical meteorological plot
    ax.semilogy(prof.tmpc, prof.pres, 'r', lw=2)
    ax.semilogy(prof.dwpc, prof.pres, 'g', lw=2)
    ax.semilogy(pcl.ttrace, pcl.ptrace, 'k-.', lw=2)

    # An example of a slanted line at constant X
    l = ax.axvline(0, color='b', linestyle='--')
    l = ax.axvline(-20, color='b', linestyle='--')

    # Disables the log-formatting that comes with semilogy
    ax.yaxis.set_major_formatter(plt.ScalarFormatter())
    ax.set_yticks(np.linspace(100, 1000, 10))
    ax.set_ylim(1050, 100)

    ax.xaxis.set_major_locator(plt.MultipleLocator(10))
    ax.set_xlim(-50, 50)
    plt.show()

    ##PLOTS SKEWT OK ABOVE HERE ##
    """
示例#24
0
        continue
    skew.ax.set_title(title, horizontalalignment="left", x=0, fontsize=12)
    print("reading " + sfile)
    data = open(sfile).read()
    pres, hght, tmpc, dwpc, wdir, wspd, latitude, longitude = parseGEMPAK(data)
    print("finished reading " + sfile)

    if wdir.size == 0:
        print("no good data lines. empty profile")
        continue

    prof = profile.create_profile(profile='default',
                                  pres=pres,
                                  hght=hght,
                                  tmpc=tmpc,
                                  dwpc=dwpc,
                                  wspd=wspd,
                                  wdir=wdir,
                                  latitude=latitude,
                                  longitude=longitude,
                                  strictQC=True)

    #### Adding a Parcel Trace
    sfcpcl = params.parcelx(prof, flag=1)  # Surface Parcel
    #fcstpcl = params.parcelx( prof, flag=2 ) # Forecast Parcel
    mupcl = params.parcelx(prof, flag=3)  # Most-Unstable Parcel
    mlpcl = params.parcelx(prof, flag=4)  # 100 mb Mean Layer Parcel
    # Set the parcel trace to be plotted as the Most-Unstable parcel.
    pcl = mupcl

    # Temperature, dewpoint, virtual temperature, wetbulb, parcel profiles
    temperature_trace, = skew.plot(prof.pres, prof.tmpc, 'r',
示例#25
0
    def _parse(self):
        global dyn_inset
        file_data = self._downloadFile()
        ## read in the file
        data = np.array([l.strip() for l in file_data.split('\n')])

        ## necessary index points
        #title_idx = np.where( 'Station:' in data )
        date_idx = data[0]
        title_idx = data[1]
        #print date_idx
        #print title_idx
        start_idx = (np.where(np.char.find(data, 'OMEGASTART') > -1)[0][0]) + 1
        #print data[start_idx]
        finish_idx = np.where(np.char.find(data, 'OMEGAEND') > -1)[0][0]

        ## create the plot title
        location = title_idx
        time = datetime.strptime(date_idx, '%a %d %b %Y | %H%M UTC')
        #print time.strftime('%Y %M %d %H')

        # data_header = 'Location: ' + location + ' ' + data[date_idx]
        # if 'analysis' in data[date_idx]:
        #     #print "analysis"
        #     timeStr = str(data[date_idx].split('for ')[1]).upper()
        #     time = datetime.strptime(timeStr, '%H%MZ %d %b %y')
        # else:
        #     #print "forecast"
        #     timeStr = str(data[date_idx].split('valid ')[1]).upper()
        #     time = datetime.strptime(timeStr, '%HZ %a %d %b %y')

        # if time > datetime.utcnow(): #If the strptime accidently makes the sounding the future:
        #     # If the strptime accidently makes the sounding in the future (like with SARS archive)
        #     # i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
        #     time = datetime.strptime('19' + data_header[1][:11], '%Y%m%d/%H%M')

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx:finish_idx][:])
        sound_data = StringIO(full_data)

        #print datetime.strftime('%Y %m %d %H',time)

        #full_data = np.array(full_data)
        #print sound_data

        ## read the data into arrays
        p, T, Td, h, wspd, wdir, omeg = np.genfromtxt(sound_data,
                                                      unpack=True,
                                                      usecols=(0, 1, 2, 3, 4,
                                                               5, 6))
        #idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        pres = p  #[idx]
        hght = h  #[idx]
        tmpc = T  #[idx]
        dwpc = Td  #[idx]
        wspd = wspd  #[idx]
        wdir = wdir  #[idxi]
        omeg = omeg  #[idx]
        print omeg
        if dwpc[0] < 40:
            dyn_inset = 'winter'
        else:
            dyn_inset = 'severe'

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw',
                                      pres=pres,
                                      hght=hght,
                                      tmpc=tmpc,
                                      dwpc=dwpc,
                                      wdir=wdir,
                                      wspd=wspd,
                                      omeg=omeg,
                                      location=location,
                                      date=time,
                                      latitude=35.)
        prof_coll = prof_collection.ProfCollection(
            {'': [prof]},
            [time],
        )
        prof_coll.setMeta('loc', location)
        print "Using Omega Decoder."
        return prof_coll
示例#26
0
import sharppy.io.spc_decoder as spc_decoder
import sharppy.sharptab.profile as profile
import sharppy.sharptab.watch_type as watch
import numpy.testing as npt
import numpy as np

files = ['examples/data/14061619.OAX']
dec = spc_decoder.SPCDecoder(files[0])
profs = dec.getProfiles()
stn_id = dec.getStnId()

all_profs = profs._profs
prof = all_profs[''][0]
dates = profs._dates
prof = profile.create_profile(pres=prof.pres, hght=prof.hght, tmpc=prof.tmpc, dwpc=prof.dwpc, wspd=prof.wspd, \
                                  wdir=prof.wdir, strictQC=False, profile='convective', date=dates[0])


def test_heat_index():
    temps = np.array([104, 100, 92, 92, 86, 80, 80, 60, 30])
    rh = np.array([55, 65, 60, 90, 90, 75, 40, 90, 50])
    correct_hi = np.array([
        137.361, 135.868, 104.684, 131.256, 105.294, 83.5751, 79.79, 59.965,
        30.00
    ])
    returned_hi = []
    for i in range(len(temps)):
        returned_hi.append(watch.heat_index(temps[i], rh[i]))
    returned_hi = np.asarray(returned_hi)
    npt.assert_almost_equal(returned_hi, correct_hi, 0)
示例#27
0
def append_wbz():
    #Load each ERA-Interim netcdf file, and append wbz

    start_lat = -44.525
    end_lat = -9.975
    start_lon = 111.975
    end_lon = 156.275
    domain = [start_lat, end_lat, start_lon, end_lon]
    model = "erai"
    region = "aus"
    dates = []
    for y in np.arange(1979, 2019):
        for m in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]:
            if (m != 12):
                dates.append([dt.datetime(y,m,1,0,0,0),\
                 dt.datetime(y,m+1,1,0,0,0)-dt.timedelta(hours = 6)])
            else:
                dates.append([dt.datetime(y,m,1,0,0,0),\
                 dt.datetime(y+1,1,1,0,0,0)-dt.timedelta(hours = 6)])
    for t in np.arange(0, len(dates)):
        print(str(dates[t][0]) + " - " + str(dates[t][1]))

        fname = "/g/data/eg3/ab4502/ExtremeWind/"+region+"/"+model+"/"+model+"_"+\
         dt.datetime.strftime(dates[t][0],"%Y%m%d")+"_"+\
         dt.datetime.strftime(dates[t][-1],"%Y%m%d")+".nc"

        ta,dp,hur,hgt,terrain,p,ps,wap,ua,va,uas,vas,tas,ta2d,cp,wg10,cape,lon,lat,date_list = \
         read_erai(domain,dates[t])

        dp = get_dp(ta, hur, dp_mask=False)

        agl_idx = (p <= ps)

        #Replace masked dp values
        dp = replace_dp(dp)
        try:
            prof = profile.create_profile(pres = np.insert(p[agl_idx],0,ps), \
             hght = np.insert(hgt[agl_idx],0,terrain), \
             tmpc = np.insert(ta[agl_idx],0,tas), \
             dwpc = np.insert(dp[agl_idx],0,ta2d), \
             u = np.insert(ua[agl_idx],0,uas), \
             v = np.insert(va[agl_idx],0,vas), \
             strictqc=False, omeg=np.insert(wap[agl_idx],0,wap[agl_idx][0]) )
        except:
            p = p[agl_idx]
            ua = ua[agl_idx]
            va = va[agl_idx]
            hgt = hgt[agl_idx]
            ta = ta[agl_idx]             \
               dp = dp[agl_idx]
            p[0] = ps
            ua[0] = uas
            va[0] = vas
            hgt[0] = terrain
            ta[0] = tas
            dp[0] = ta2d
            prof = profile.create_profile(pres = p, \
             hght = hgt, \
             tmpc = ta, \
             dwpc = dp, \
             u = ua, \
             v = va, \
             strictqc=False, omeg=wap[agl_idx])

        pwb0 = params.temp_lvl(prof, 0, wetbulb=True)
        hwb0 = interp.to_agl(prof, interp.hght(prof, pwb0))

        param_file = nc.Dataset(fname, "a")
        wbz_var = param_file.createVariable("wbz",float,\
        ("time","lat","lon"))
        wbz_var.units = "m"
        wbz_var.long_name = "wet_bulb_zero_height"
        wbz_var[:] = hwb0

        T1 = abs(
            thermo.wetlift(prof.pres[0], prof.tmpc[0], 600) -
            interp.temp(prof, 600))
        T2 = abs(
            thermo.wetlift(pwb0, interp.temp(prof, pwb0), sfc) - prof.tmpc[0])
        Vprime = utils.KTS2MS(13 * np.sqrt((T1 + T2) / 2) + (1 / 3 *
                                                             (Umean01)))

        Vprime_var = param_file.createVariable("Vprime",float,\
        ("time","lat","lon"))
        Vprime_var.units = "m/s"
        Vprime_var.long_name = "miller_1972_wind_speed"
        Vprime_var[:] = Vprime

        param_file.close()
示例#28
0
def do_sharppy(spc_file):
    """
    Based on the tutorial which can be found here: http://nbviewer.ipython.org/github/sharppy/SHARPpy/blob/master/tutorials/SHARPpy_basics.ipynb
    SHARPpy can be found here: https://github.com/sharppy/SHARPpy
    Credit goes to:
    Patrick Marsh (SPC)
    Kelton Halbert (OU School of Meteorology)
    Greg Blumberg (OU/CIMMS)
    Tim Supinie (OU School of Meteorology)
    
    """
    import sharppy
    import sharppy.sharptab.profile as profile
    import sharppy.sharptab.interp as interp
    import sharppy.sharptab.winds as winds
    import sharppy.sharptab.utils as utils
    import sharppy.sharptab.params as params
    import sharppy.sharptab.thermo as thermo
    import matplotlib.pyplot as plt
    from StringIO import StringIO
    from matplotlib.axes import Axes
    import matplotlib.transforms as transforms
    import matplotlib.axis as maxis
    import matplotlib.spines as mspines
    import matplotlib.path as mpath
    from matplotlib.projections import register_projection
    
    spc_file = open('skewt_data', 'r').read()


    def parseSPC(spc_file):
        ## read in the file
        data = np.array([l.strip() for l in spc_file.split('\n')])

        ## necessary index points
        title_idx = np.where( data == '%TITLE%')[0][0]
        start_idx = np.where( data == '%RAW%' )[0] + 1
        finish_idx = np.where( data == '%END%')[0]
    
        ## create the plot title
        data_header = data[title_idx + 1].split()
        location = data_header[0]+' '+data_header[1]
        time = data_header[2]
        title = location+' '+time
        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx : finish_idx][:])
        sound_data = StringIO( full_data )
    
        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )
    
        return p, h, T, Td, wdir, wspd, title
        
    pres, hght, tmpc, dwpc, wdir, wspd, title = parseSPC(spc_file)
    prof = profile.create_profile(profile='default', pres=pres, hght=hght, tmpc=tmpc, \
    dwpc=dwpc, wspd=wspd, wdir=wdir, missing=-9999, strictQC=True)
    
    sfcpcl = params.parcelx( prof, flag=1 ) # Surface Parcel
    fcstpcl = params.parcelx( prof, flag=2 ) # Forecast Parcel
    mupcl = params.parcelx( prof, flag=3 ) # Most-Unstable Parcel
    mlpcl = params.parcelx( prof, flag=4 ) # 100 mb Mean Layer Parcel
         
    msl_hght = prof.hght[prof.sfc] # Grab the surface height value
    print "SURFACE HEIGHT (m MSL):",msl_hght
    agl_hght = interp.to_agl(prof, msl_hght) # Converts to AGL
    print "SURFACE HEIGHT (m AGL):", agl_hght
    msl_hght = interp.to_msl(prof, agl_hght) # Converts to MSL
    print "SURFACE HEIGHT (m MSL):",msl_hght
    print "Most-Unstable CAPE:", mupcl.bplus # J/kg
    print "Most-Unstable CIN:", mupcl.bminus # J/kg
    print "Most-Unstable LCL:", mupcl.lclhght # meters AGL
    print "Most-Unstable LFC:", mupcl.lfchght # meters AGL
    print "Most-Unstable EL:", mupcl.elhght # meters AGL
    print "Most-Unstable LI:", mupcl.li5 # C
    
    class SkewXTick(maxis.XTick):
        def draw(self, renderer):
            if not self.get_visible(): return
            renderer.open_group(self.__name__)
    
            lower_interval = self.axes.xaxis.lower_interval
            upper_interval = self.axes.xaxis.upper_interval
    
            if self.gridOn and transforms.interval_contains(
                    self.axes.xaxis.get_view_interval(), self.get_loc()):
                self.gridline.draw(renderer)
    
            if transforms.interval_contains(lower_interval, self.get_loc()):
                if self.tick1On:
                    self.tick1line.draw(renderer)
                if self.label1On:
                    self.label1.draw(renderer)
    
            if transforms.interval_contains(upper_interval, self.get_loc()):
                if self.tick2On:
                    self.tick2line.draw(renderer)
                if self.label2On:
                    self.label2.draw(renderer)
    
            renderer.close_group(self.__name__)
    
    
    # This class exists to provide two separate sets of intervals to the tick,
    # as well as create instances of the custom tick
    class SkewXAxis(maxis.XAxis):
        def __init__(self, *args, **kwargs):
            maxis.XAxis.__init__(self, *args, **kwargs)
            self.upper_interval = 0.0, 1.0
    
        def _get_tick(self, major):
            return SkewXTick(self.axes, 0, '', major=major)
    
        @property
        def lower_interval(self):
            return self.axes.viewLim.intervalx
    
        def get_view_interval(self):
            return self.upper_interval[0], self.axes.viewLim.intervalx[1]
    
    
    # This class exists to calculate the separate data range of the
    # upper X-axis and draw the spine there. It also provides this range
    # to the X-axis artist for ticking and gridlines
    class SkewSpine(mspines.Spine):
        def _adjust_location(self):
            trans = self.axes.transDataToAxes.inverted()
            if self.spine_type == 'top':
                yloc = 1.0
            else:
                yloc = 0.0
            left = trans.transform_point((0.0, yloc))[0]
            right = trans.transform_point((1.0, yloc))[0]
    
            pts  = self._path.vertices
            pts[0, 0] = left
            pts[1, 0] = right
            self.axis.upper_interval = (left, right)
    
    
    # This class handles registration of the skew-xaxes as a projection as well
    # as setting up the appropriate transformations. It also overrides standard
    # spines and axes instances as appropriate.
    class SkewXAxes(Axes):
        # The projection must specify a name.  This will be used be the
        # user to select the projection, i.e. ``subplot(111,
        # projection='skewx')``.
        name = 'skewx'
    
        def _init_axis(self):
            #Taken from Axes and modified to use our modified X-axis
            self.xaxis = SkewXAxis(self)
            self.spines['top'].register_axis(self.xaxis)
            self.spines['bottom'].register_axis(self.xaxis)
            self.yaxis = maxis.YAxis(self)
            self.spines['left'].register_axis(self.yaxis)
            self.spines['right'].register_axis(self.yaxis)
    
        def _gen_axes_spines(self):
            spines = {'top':SkewSpine.linear_spine(self, 'top'),
                      'bottom':mspines.Spine.linear_spine(self, 'bottom'),
                      'left':mspines.Spine.linear_spine(self, 'left'),
                      'right':mspines.Spine.linear_spine(self, 'right')}
            return spines
    
        def _set_lim_and_transforms(self):
            """
            This is called once when the plot is created to set up all the
            transforms for the data, text and grids.
            """
            rot = 30
    
            #Get the standard transform setup from the Axes base class
            Axes._set_lim_and_transforms(self)
    
            # Need to put the skew in the middle, after the scale and limits,
            # but before the transAxes. This way, the skew is done in Axes
            # coordinates thus performing the transform around the proper origin
            # We keep the pre-transAxes transform around for other users, like the
            # spines for finding bounds
            self.transDataToAxes = self.transScale + (self.transLimits +
                    transforms.Affine2D().skew_deg(rot, 0))
    
            # Create the full transform from Data to Pixels
            self.transData = self.transDataToAxes + self.transAxes
    
            # Blended transforms like this need to have the skewing applied using
            # both axes, in axes coords like before.
            self._xaxis_transform = (transforms.blended_transform_factory(
                        self.transScale + self.transLimits,
                        transforms.IdentityTransform()) +
                    transforms.Affine2D().skew_deg(rot, 0)) + self.transAxes
    
    # Now register the projection with matplotlib so the user can select
    # it.
    register_projection(SkewXAxes)
    
    pcl = mupcl
    # Create a new figure. The dimensions here give a good aspect ratio
    fig = plt.figure(figsize=(6.5875, 6.2125))
    ax = fig.add_subplot(111, projection='skewx')
    ax.grid(True)
    
    pmax = 1000
    pmin = 10
    dp = -10
    presvals = np.arange(int(pmax), int(pmin)+dp, dp)
    
    # plot the moist-adiabats
    for t in np.arange(-10,45,5):
        tw = []
        for p in presvals:
            tw.append(thermo.wetlift(1000., t, p))
        ax.semilogy(tw, presvals, 'k-', alpha=.2)
    
    def thetas(theta, presvals):
        return ((theta + thermo.ZEROCNK) / (np.power((1000. / presvals),thermo.ROCP))) - thermo.ZEROCNK
    
    # plot the dry adiabats
    for t in np.arange(-50,110,10):
        ax.semilogy(thetas(t, presvals), presvals, 'r-', alpha=.2)
    
    plt.title(title, fontsize=14, loc='left')
    # Plot the data using normal plotting functions, in this case using
    # log scaling in Y, as dicatated by the typical meteorological plot
    ax.semilogy(prof.tmpc, prof.pres, 'r', lw=2)
    ax.semilogy(prof.dwpc, prof.pres, 'g', lw=2)
    ax.semilogy(pcl.ttrace, pcl.ptrace, 'k-.', lw=2)
    
    # An example of a slanted line at constant X
    l = ax.axvline(0, color='b', linestyle='--')
    l = ax.axvline(-20, color='b', linestyle='--')
    
    # Disables the log-formatting that comes with semilogy
    ax.yaxis.set_major_formatter(plt.ScalarFormatter())
    ax.set_yticks(np.linspace(100,1000,10))
    ax.set_ylim(1050,100)
    
    ax.xaxis.set_major_locator(plt.MultipleLocator(10))
    ax.set_xlim(-50,50)
    plt.show()
    
    ##PLOTS SKEWT OK ABOVE HERE ##
    """
示例#29
0
    def _parse(self):
        file_data = self._downloadFile()
        ## read in the file
        data = np.array([l.strip() for l in file_data.split('\n')])

        ## necessary index points
        start_idx = np.where(data == '<PRE>')[0]
        finish_idx = np.where(np.char.find(data, '</H3>') > -1)[0]
        time_idx = np.where(np.char.find(data, 'time') > -1)[0][0]
        latitude_idx = np.where(np.char.find(data, 'latitude') > -1)[0][0]

        ## create the plot title and time
        location = data[4].split()[1]
        time = datetime.strptime(data[time_idx].strip().split()[2],
                                 '%y%m%d/%H%M')
        latitude = data[latitude_idx].strip().split()[2]
        if time > datetime.utcnow(
        ):  #If the strptime accidently makes the sounding the future:
            # If the strptime accidently makes the sounding in the future (like with SARS archive)
            # i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
            time = datetime.strptime('19' + data[time_idx].strip().split()[2],
                                     '%y%m%d/%H%M')

## put it all together for StringIO
        data = data[10:finish_idx][:]
        data_final = []
        max = 0
        for m in data:
            while '  ' in m:
                m = m.replace('  ', ' ')
            if len(m.split(' ')) != 11:
                continue
            if int(float(m.split(' ')[1])) <= max:
                continue
            data_final.append(m)
            max = int(float(m.split(' ')[1]))
        full_data = '\n'.join(data_final)
        while '  ' in full_data:
            full_data = full_data.replace('  ', ' ')
        sound_data = StringIO(full_data.strip())
        ## read the data into arrays
        p, h, T, Td, rh, mr, wdir, wspd, ta, te, tv = np.genfromtxt(
            sound_data, delimiter=' ', comments="%", unpack=True)
        #idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        pres = p  #[idx]
        hght = h  #[idx]
        tmpc = T  #[idx]
        dwpc = Td  #[idx]
        wspd = wspd  #[idx]
        wdir = wdir  #[idx]
        wdir_final = []
        for m in wdir:
            s = '0'
            if int(m) < 360:
                s = m
            wdir_final.append(s)

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw',
                                      pres=pres,
                                      hght=hght,
                                      tmpc=tmpc,
                                      dwpc=dwpc,
                                      wdir=wdir_final,
                                      wspd=wspd,
                                      location=location,
                                      date=time,
                                      latitude=float(latitude))

        prof_coll = prof_collection.ProfCollection(
            {'': [prof]},
            [time],
        )

        prof_coll.setMeta('loc', location)
        return prof_coll
示例#30
0
def test_plotting():

    dec = spc_decoder.SPCDecoder(files[0])
    profs = dec.getProfiles()
    stn_id = dec.getStnId()

    print(profs)
    all_profs = profs._profs
    print(all_profs)
    prof = all_profs[''][0]
    dates = profs._dates
    print(dates)
    prof = profile.create_profile(pres=prof.pres, hght=prof.hght, tmpc=prof.tmpc, dwpc=prof.dwpc, wspd=prof.wspd, \
                                      wdir=prof.wdir, strictQC=False, profile='convective', date=dates[0])
    time = dates[0]
    location = "OAX"
    pb_plot = 1050
    pt_plot = 100
    dp_plot = 10
    plevs_plot = np.arange(pb_plot, pt_plot - 1, -dp_plot)
    # Open up the text file with the data in columns (e.g. the sample OAX file distributed with SHARPpy)
    title = time.strftime('%Y%m%d/%H%M') + ' ' + location + '   (Observed)'

    # Set up the figure in matplotlib.
    fig = plt.figure(figsize=(9, 8))
    gs = gridspec.GridSpec(4, 4, width_ratios=[1, 5, 1, 1])
    ax = plt.subplot(gs[0:3, 0:2], projection='skewx')
    skew.draw_title(ax, title)
    skew.draw_dry_adiabats(ax)
    skew.draw_mixing_ratio_lines(ax)
    skew.draw_moist_adiabats(ax)
    skew.draw_heights(ax, prof)
    skew.draw_effective_inflow_layer(ax, prof)

    ax.grid(True)
    plt.grid(True)

    # Plot the background variables
    presvals = np.arange(1000, 0, -10)

    ax.semilogy(prof.tmpc[~prof.tmpc.mask],
                prof.pres[~prof.tmpc.mask],
                'r',
                lw=2)
    ax.semilogy(prof.dwpc[~prof.dwpc.mask],
                prof.pres[~prof.dwpc.mask],
                'g',
                lw=2)
    ax.semilogy(prof.vtmp[~prof.dwpc.mask], prof.pres[~prof.dwpc.mask], 'r--')
    ax.semilogy(prof.wetbulb[~prof.dwpc.mask], prof.pres[~prof.dwpc.mask],
                'c-')

    # Plot the parcel trace, but this may fail.  If it does so, inform the user.
    try:
        ax.semilogy(prof.mupcl.ttrace, prof.mupcl.ptrace, 'k--')
    except:
        print("Couldn't plot parcel traces...")

    skew.plot_sig_levels(ax, prof)

    # Highlight the 0 C and -20 C isotherms.
    l = ax.axvline(0, color='b', ls='--')
    l = ax.axvline(-20, color='b', ls='--')

    # Disables the log-formatting that comes with semilogy
    ax.yaxis.set_major_formatter(ScalarFormatter())
    ax.set_yticks(np.linspace(100, 1000, 10))
    ax.set_ylim(1050, 100)

    # Plot the hodograph data.
    inset_axes = skew.draw_hodo_inset(ax, prof)
    skew.plotHodo(inset_axes, prof.hght, prof.u, prof.v, color='r')
    #inset_axes.text(srwind[0], srwind[1], 'RM', color='r', fontsize=8)
    #inset_axes.text(srwind[2], srwind[3], 'LM', color='b', fontsize=8)

    # Draw the wind barbs axis and everything that comes with it.
    ax.xaxis.set_major_locator(MultipleLocator(10))
    ax.set_xlim(-50, 50)
    ax2 = plt.subplot(gs[0:3, 2])
    ax3 = plt.subplot(gs[3, 0:3])
    skew.plot_wind_axes(ax2)
    skew.plot_wind_barbs(ax2, prof.pres, prof.u, prof.v)
    gs.update(left=0.05, bottom=0.05, top=0.95, right=1, wspace=0.025)
示例#31
0
    def _parseMember(self, text):
        data = np.array(text.split('\r\n'))
        data_idxs = []
        new_record = False
        begin_idx = 0
        member_name = data[0]
        dates = []
        # Figure out the indices for the data chunks
        for i in range(len(data)):
            if "STID" in data[i]:
                # Here is information about the record
                spl = data[i].split()

                if spl[2].strip() == "STNM":
                    station = ""  # The bufkit file has a blank space for the station name
                    wmo_id = spl[4]
                    dates.append(datetime.strptime(spl[7], '%y%m%d/%H%M'))
                else:
                    station = spl[2]
                    wmo_id = spl[5]
                    dates.append(datetime.strptime(spl[8], '%y%m%d/%H%M'))

                slat = float(data[i + 1].split()[2])
                slon = float(data[i + 1].split()[5])
                selv = float(data[i + 1].split()[8])
                stim = float(data[i + 2].split()[2])

            if data[i].find('HGHT') >= 0 and new_record == False:
                # we've found a new data chunk
                new_record = True
                begin_idx = i + 1
            elif 'STID' in data[i] and new_record == True:
                # We've found the end of the data chunk
                new_record = False
                data_idxs.append((begin_idx, i - 1))
            elif 'STN' in data[i] and new_record == True:
                # We've found the end of the last data chunk of the file
                new_record = False
                data_idxs.append((begin_idx, i))
            elif new_record == True:
                continue
                ##print data[i]

        data_idxs = data_idxs[1:]
        # Make arrays to store the data
        profiles = []

        # Parse out the profiles
        for i in range(len(data_idxs)):
            data_stuff = data[data_idxs[i][0]:data_idxs[i][1]]
            profile_length = len(data[data_idxs[i][0]:data_idxs[i][1]]) / 2

            hght = np.zeros((profile_length, ), dtype=float)
            pres = np.zeros((profile_length, ), dtype=float)
            tmpc = np.zeros((profile_length, ), dtype=float)
            dwpc = np.zeros((profile_length, ), dtype=float)
            wdir = np.zeros((profile_length, ), dtype=float)
            wspd = np.zeros((profile_length, ), dtype=float)
            omeg = np.zeros((profile_length, ), dtype=float)

            for j in np.arange(0, profile_length * 2, 2):
                if len(data_stuff[j + 1].split()) == 1:
                    hght[j / 2] = float(data_stuff[j + 1].split()[0])
                else:
                    hght[j / 2] = float(data_stuff[j + 1].split()[1])
                tmpc[j / 2] = float(data_stuff[j].split()[1])
                dwpc[j / 2] = float(data_stuff[j].split()[3])
                pres[j / 2] = float(data_stuff[j].split()[0])
                wspd[j / 2] = float(data_stuff[j].split()[6])
                wdir[j / 2] = float(data_stuff[j].split()[5])
                omeg[j / 2] = float(data_stuff[j].split()[7])

            prof = profile.create_profile(profile='raw',
                                          pres=pres,
                                          hght=hght,
                                          tmpc=tmpc,
                                          dwpc=dwpc,
                                          wdir=wdir,
                                          wspd=wspd,
                                          omeg=omeg,
                                          location=station,
                                          date=dates[i],
                                          latitude=slat)

            profiles.append(prof)

        return member_name, profiles, dates
示例#32
0
    def _parse(self):
        """
        Parse the netCDF file according to the variable naming and
        dimmensional conventions of the WRF-ARW.
        """
        ## open the file and also store the lat/lon of the selected point
        file_data = self._downloadFile()
        gridx = self._file_name[1]
        gridy = self._file_name[2]

        ## calculate the nearest grid point to the map point
        idx = self._find_nearest_point(file_data, gridx, gridy)

        ## check to see if this is a 4D netCDF4 that includes all available times.
        ## If it does, open and compute the variables as 4D variables
        if len(file_data.variables["T"][:].shape) == 4:
            ## read in the data from the WRF file and conduct necessary processing
            theta = file_data.variables["T"][:, :, idx[0], idx[1]] + 300.0
            qvapr = file_data.variables["QVAPOR"][:, :, idx[0],
                                                  idx[1]] * 10**3  #g/kg
            mpres = (file_data.variables["P"][:, :, idx[0], idx[1]] +
                     file_data.variables["PB"][:, :, idx[0], idx[1]]) * .01
            mhght = file_data.variables[
                "PH"][:, :, idx[0],
                      idx[1]] + file_data.variables["PHB"][:, :, idx[0],
                                                           idx[1]] / G
            ## unstagger the height grid
            mhght = (mhght[:, :-1, :, :] + mhght[:, 1:, :, :]) / 2.

            muwin = file_data.variables["U"][:, :, idx[0], idx[1]]
            mvwin = file_data.variables["V"][:, :, idx[0], idx[1]]

            ## convert the potential temperature to air temperature
            mtmpc = thermo.theta(1000.0, theta - 273.15, p2=mpres)
            ## convert the mixing ratio to dewpoint
            mdwpc = thermo.temp_at_mixrat(qvapr, mpres)
            ## convert the grid relative wind to earth relative
            U = muwin * file_data.variables['COSALPHA'][
                0, idx[0], idx[1]] - mvwin * file_data.variables['SINALPHA'][
                    0, idx[0], idx[1]]
            V = mvwin * file_data.variables['COSALPHA'][
                0, idx[0], idx[1]] + muwin * file_data.variables['SINALPHA'][
                    0, idx[0], idx[1]]
            ## convert from m/s to kts
            muwin = utils.MS2KTS(U)
            mvwin = utils.MS2KTS(V)

        ## if the data is not 4D, then it must be assumed that this is a file containing only a single time
        else:
            ## read in the data from the WRF file and conduct necessary processing
            theta = file_data.variables["T"][:, idx[0], idx[1]] + 300.0
            qvapr = file_data.variables["QVAPOR"][:, idx[0],
                                                  idx[1]] * 10**3  #g/kg
            mpres = (file_data.variables["P"][:, idx[0], idx[1]] +
                     file_data.variables["PB"][:, idx[0], idx[1]]) * .01
            mhght = file_data.variables["PH"][:, idx[0],
                                              idx[1]] + file_data.variables[
                                                  "PHB"][:, idx[0], idx[1]] / G
            ## unstagger the height grid
            mhght = (mhght[:-1, :, :] + mhght[1:, :, :]) / 2.

            muwin = file_data.variables["U"][:, idx[0], idx[1]]
            mvwin = file_data.variables["V"][:, idx[0], idx[1]]

            ## convert the potential temperature to air temperature
            mtmpc = thermo.theta(1000.0, theta - 273.15, p2=mpres)
            ## convert the mixing ratio to dewpoint
            mdwpc = thermo.temp_at_mixrat(qvapr, mpres)
            ## convert the grid relative wind to earth relative
            U = muwin * file_data.variables['COSALPHA'][
                0, idx[0], idx[1]] - mvwin * file_data.variables['SINALPHA'][
                    0, idx[0], idx[1]]
            V = mvwin * file_data.variables['COSALPHA'][
                0, idx[0], idx[1]] + muwin * file_data.variables['SINALPHA'][
                    0, idx[0], idx[1]]
            ## convert from m/s to kts
            muwin = utils.MS2KTS(U)
            mvwin = utils.MS2KTS(V)

        ## get the model start time of the file
        inittime = dattim.datetime.strptime(str(file_data.START_DATE),
                                            '%Y-%m-%d_%H:%M:%S')

        profiles = []
        dates = []
        ## loop over the available times

        for i in range(file_data.variables["T"][:].shape[0]):
            ## make sure the arrays are 1D
            prof_pres = mpres[i].flatten()
            prof_hght = mhght[i].flatten()
            prof_tmpc = mtmpc[i].flatten()
            prof_dwpc = mdwpc[i].flatten()
            prof_uwin = muwin[i].flatten()
            prof_vwin = mvwin[i].flatten()
            ## compute the time of the profile
            try:
                delta = dattim.timedelta(
                    minutes=int(file_data.variables["XTIME"][i]))
                curtime = inittime + delta
            except KeyError:
                var = ''.join(
                    np.asarray(file_data.variables['Times'][i], dtype=str))
                curtime = dattim.datetime.strptime(var, '%Y-%m-%d_%H:%M:%S')
            date_obj = curtime

            ## construct the profile object
            prof = profile.create_profile(profile="raw",
                                          pres=prof_pres,
                                          hght=prof_hght,
                                          tmpc=prof_tmpc,
                                          dwpc=prof_dwpc,
                                          u=prof_uwin,
                                          v=prof_vwin,
                                          location=str(gridx) + "," +
                                          str(gridy),
                                          date=date_obj,
                                          missing=-999.0,
                                          latitude=gridy,
                                          strictQC=False)

            ## append the dates and profiles
            profiles.append(prof)
            dates.append(date_obj)

        ## create a profile collection - dictionary has no key since this
        ## is not an ensemble model
        prof_coll = prof_collection.ProfCollection({'': profiles}, dates)

        return prof_coll
示例#33
0
    def _parse(self):
        file_data = self._downloadFile()
        ## read in the file
        data = np.array(
            [l.strip() for l in file_data.split('\n') if l.strip()])

        ## necessary index points
        title_idx = np.where(data == '%TITLE%')[0][0]
        start_idx = np.where(data == '%RAW%')[0][0] + 1
        finish_idx = np.where(data == '%END%')[0]
        # Made %END% unnecessary
        if finish_idx:
            finish_idx = finish_idx[0]
        else:
            finish_idx = max(n for n, line in enumerate(data)
                             if len(line.split(',')) == 6) + 1

        ## create the plot title
        data_header = data[title_idx + 1].split()
        location = data_header[0]
        time = datetime.strptime(data_header[1][:11], '%y%m%d/%H%M')
        if len(data_header) > 2:
            lat, lon = data_header[2].split(',')
            lat = float(lat)
            lon = float(lon)
        else:
            lat = 35.
            lon = -97.

        if time > datetime.utcnow() + timedelta(hours=1):
            # If the strptime accidently makes the sounding in the future (like with SARS archive)
            # i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
            time = datetime.strptime('19' + data_header[1][:11], '%Y%m%d/%H%M')

        ## put it all together for StringIO
        full_data = '\n'.join(data[start_idx:finish_idx][:])

        if not is_py3():
            sound_data = StringIO(full_data)
        else:
            sound_data = BytesIO(full_data.encode())

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt(sound_data,
                                                delimiter=',',
                                                comments="%",
                                                unpack=True)
        #       idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        pres = p  #[idx]
        hght = h  #[idx]
        tmpc = T  #[idx]
        dwpc = Td  #[idx]
        wspd = wspd  #[idx]
        wdir = wdir  #[idx]

        # Br00tal hack
        if hght[0] > 30000:
            hght[0] = -9999.00

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw',
                                      pres=pres,
                                      hght=hght,
                                      tmpc=tmpc,
                                      dwpc=dwpc,
                                      wdir=wdir,
                                      wspd=wspd,
                                      location=location,
                                      date=time,
                                      latitude=lat,
                                      missing=-9999.00)

        prof_coll = prof_collection.ProfCollection(
            {'': [prof]},
            [time],
        )

        prof_coll.setMeta('loc', location)
        prof_coll.setMeta('observed', True)
        prof_coll.setMeta('base_time', time)
        return prof_coll
def process_site(file):
    try:
        ## switch out the reading function for the anticipated data type
        data = read_wyoming_file(file)
        #data = read_sounding_file(file)

        ## depending on the data reader used, different block of code is requires
        #"""
        p = data[:, 0]
        Z = data[:, 1]
        T = data[:, 2]
        Td = data[:, 3]
        U = np.zeros(T.shape)
        V = np.zeros(T.shape)
        """
        
        p = data[:, 0]
        T = data[:, 1]
        Td = data[:, 2]
        Z = data[:, 5]
        U = np.zeros(T.shape)
        V = np.zeros(T.shape)
        """
        ## comment out this triple quote if using the above block

        dz = None
        p_og = None
        z_og = None

        p[p == 9999.0] = np.nan
        T[T == 999.0] = np.nan
        Td[Td == 999.0] = np.nan
        Z[Z == 9999.0] = np.nan

        p = np.ma.masked_invalid(p)
        T = np.ma.masked_invalid(T)
        Td = np.ma.masked_invalid(Td)
        Z = np.ma.masked_invalid(Z)

    except:
        ## read in Kevins beautiful data
        data = read_kevins_file(file)
        Z = data[:, 0]
        p = data[:, 1]
        T = data[:, 2]
        U = np.zeros(T.shape)
        V = np.zeros(T.shape)
        Td = data[:, 3]

        dz = data[:, 4][::10]
        p_og = data[:, 5][::10]
        z_og = data[:, 6][::10]

        p = np.ma.masked_invalid(p)
        T = np.ma.masked_invalid(T)
        Td = np.ma.masked_invalid(Td)
        Z = np.ma.masked_invalid(Z)
    print file

    ## create profile object for processing
    prof = profile.create_profile(profile='default',
                                  pres=p,
                                  hght=Z,
                                  tmpc=T,
                                  dwpc=Td,
                                  u=U,
                                  v=V,
                                  strictQC=False)

    if dz is not None:
        pvals = interp.pres(prof, interp.to_msl(prof, np.arange(0, 5000, 100)))
        dz = interp.generic_interp_pres(np.log10(pvals),
                                        np.log10(p_og)[::-1], dz[::-1])
    print dz
    ## return the lifted parcel indices and the bore dz
    return lift_parcels(prof), dz
示例#35
0
## get the current utc time and format it into
## a string that can be used for the SPC url.


if sys.argv[1] == "SARS":
	url = open("/Users/keltonhalbert/Downloads/snd/supercell/violent/" + sys.argv[2])
	data = np.array(url.read().split('\n'))
	title_idx = np.where( data == '%TITLE%')[0][0]
	start_idx = np.where( data == '%RAW%' )[0] + 1
	finish_idx = np.where( data == '%END%')[0]
	plot_title = data[title_idx + 1] + ' (Observed)'
	full_data = '\n'.join(data[start_idx : finish_idx][:])
	sound_data = StringIO( full_data )
	p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, delimiter=',', comments="%", unpack=True )
	prof = profile.create_profile( profile='convective', pres=p, hght=h, tmpc=T, dwpc=Td,
				wdir=wdir, wspd=wspd, location=sys.argv[1])

elif sys.argv[1] != "test":
    gmtime = datetime.datetime.utcnow()
    t_str = str( gmtime )
    year = t_str[2:4]
    month = t_str[5:7]
    day = t_str[8:10]
    hour = t_str[11:13]
    if int( hour ) > 12:
        current_ob = '12'
    else:
        current_ob = '00'
        
    obstring = year + month + day + current_ob
    obstime = datetime.datetime.strptime( obstring, '%y%m%d%H')
示例#36
0
    def _parse(self):
        global dyn_inset
	file_data = self._downloadFile()
        ## read in the file
        data = np.array([l.strip() for l in file_data.split('\n')])

        ## necessary index points
        #title_idx = np.where( 'Station:' in data )
        title_idx = np.where( np.char.find(data,'Station:') > -1)[0][0]
        date_idx = np.where( np.char.find(data,'Date: ') > -1 )[0][0]
        start_idx = np.where( np.char.find(data,'SFC') > -1 )[0][0]
        finish_idx = np.where( np.char.find(data,'TRP') > -1 )[0][0]

        ## create the plot title
        location = data[title_idx].split('Station: ')[1]
        data_header = 'Location: ' + location + ' ' + data[date_idx]
        timeStr = str(data[date_idx].split('Date: ')[1]).upper()
        time = datetime.strptime(timeStr, '%H%MZ %d %b %y')

        
        # if time > datetime.utcnow(): #If the strptime accidently makes the sounding the future:
        #     # If the strptime accidently makes the sounding in the future (like with SARS archive)
        #     # i.e. a 1957 sounding becomes 2057 sounding...ensure that it's a part of the 20th century
        #     time = datetime.strptime('19' + data_header[1][:11], '%Y%m%d/%H%M')

        # ---------------------------- Clean up the data ------------------------------#
        # Make sure we have the right number of fields.
        # Commonly at the end of the sounding, wind speed and direciton are missing.
        # This check takes care of that, eliminating a row if it's undersized.

        dirtyData = data[start_idx : (finish_idx)][:]
        cleanData = []
        for line in dirtyData:
            numItems = 0
            items = line.split(' ')
            for item in items:
                if item != '':
                    numItems += 1
            if numItems == 15:
                cleanData.append(line)
        # -----------------------------------------------------------------------------#

        ## put it all together for StringIO
        full_data = '\n'.join(cleanData)
        sound_data = StringIO( full_data )

        ## read the data into arrays
        p, h, T, Td, wdir, wspd = np.genfromtxt( sound_data, unpack=True, usecols=(1,2,3,4,8,9) )
        #idx = np.argsort(p, kind='mergesort')[::-1] # sort by pressure in case the pressure array is off.

        # ----------------------- More Cleaning ----------------------- #
        # SHARPpy doesn't like directions of 360, convert those to 0:
        wdir = [0 if x==360 else x for x in wdir]

        # If there is a duplicate height entry (common),
        # Just add an extra meter, and that'll be our little secret.  ;)
        for key,height in enumerate(h):
            if key == 0:
                continue
            if height == h[key-1]:
                h[key] += 1
        # ------------------------------------------------------------- #

        pres = p #[idx]
        hght = h #[idx]
        tmpc = T #[idx]
        dwpc = Td #[idx]
        wspd = wspd #[idx]
        wdir = wdir #[idxi]
	if dwpc[0] < 40:
		dyn_inset = 'winter'
	else:
		dyn_inset = 'severe'

        # Force latitude to be 35 N. Figure out a way to fix this later.
        prof = profile.create_profile(profile='raw', pres=pres, hght=hght, tmpc=tmpc, dwpc=dwpc, wdir=wdir, wspd=wspd, location=location, date=time, latitude=35.)
        prof_coll = prof_collection.ProfCollection({'':[ prof ]},[ time ],)
        prof_coll.setMeta('loc', location)
        return prof_coll