Beispiel #1
0
def main():
    
    
    #-----------------------
    # Date Range of interest
    #-----------------------
    iyear          = 2017
    imnth          = 6
    iday           = 1
    fyear          = 2017
    fmnth          = 8
    fday           = 31
    
    #-----------------------
    # File name for log file
    #-----------------------
    logFname = '/data1/ancillary_data/ERAdata/ncLog4.log'

    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear,imnth,iday,fyear,fmnth,fday) 
    
    #------------------------------
    # ERA Reanalysis data directory
    #------------------------------
    ERAdir = '/data1/ancillary_data/ERAdata/'
    #ERAdir = '/data1/ancillary_data/ERAdata/wind/'   ->Use for Wind in TAB

        
    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(ERAdir,exitFlg=True)
    
    #--------------------
    # Initialize log file
    #--------------------
    lFile = logging.getLogger('1')
    lFile.setLevel(logging.INFO)
    hdlr1 = logging.FileHandler(logFname,mode='w')
    fmt1  = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s','%a, %d %b %Y %H:%M:%S')
    hdlr1.setFormatter(fmt1)    
    lFile.addHandler(hdlr1)

    #------------------------------------
    # Loop through all days in date range
    #------------------------------------
    for sngDay in dRange.dateList:
        YYYY = "{0:04d}".format(sngDay.year)
        MM   = "{0:02d}".format(sngDay.month)
        DD   = "{0:02d}".format(sngDay.day)         
    
        #------------------------------
        # Name of ERA Interim GRIB file
        #------------------------------
        fName1 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pl.regn128sc.'+YYYY+MM+DD+'00'
        fName2 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pl.regn128sc.'+YYYY+MM+DD+'06'
        fName3 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pl.regn128sc.'+YYYY+MM+DD+'12'
        fName4 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pl.regn128sc.'+YYYY+MM+DD+'18'

        #------------------------------------
        #Use for Wind in TAB
        #------------------------------------
        #fName1 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pv.regn128sc.'+YYYY+MM+DD+'00'
        #fName2 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pv.regn128sc.'+YYYY+MM+DD+'06'
        #fName3 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pv.regn128sc.'+YYYY+MM+DD+'12'
        #fName4 = ERAdir + YYYY + MM +'/'+'ei.oper.an.pv.regn128sc.'+YYYY+MM+DD+'18'


        ckFile(fName1, exitFlg=True)
        ckFile(fName2, exitFlg=True)
        ckFile(fName3, exitFlg=True)
        ckFile(fName4, exitFlg=True)
    
        #------------------------------------------
        # Rename ERA Interim file (add .grb at end)
        # so ncl_convert2nc recognizes file
        #------------------------------------------
        shutil.move(fName1,fName1+'.grb')
        shutil.move(fName2,fName2+'.grb')
        shutil.move(fName3,fName3+'.grb')
        shutil.move(fName4,fName4+'.grb')
        
        #-------------------------------------
        # Run NCL program to convert to netCDF
        #-------------------------------------
        subProcRun(fName1+'.grb',ERAdir + YYYY + MM +'/',logFlg=lFile)
        subProcRun(fName2+'.grb',ERAdir + YYYY + MM +'/',logFlg=lFile)
        subProcRun(fName3+'.grb',ERAdir + YYYY + MM +'/',logFlg=lFile)
        subProcRun(fName4+'.grb',ERAdir + YYYY + MM +'/',logFlg=lFile)
    
        #-----------------
        # Remove grib file
        #-----------------
        #os.remove(fName1+'.grb')
        #os.remove(fName2+'.grb')
        #os.remove(fName3+'.grb')
        #os.remove(fName4+'.grb')
    
        
        print 'Finished processing day: {}'.format(sngDay)
Beispiel #2
0
def main():
    
    #---------
    # Location
    #---------
    loc = 'nya'
    
    #-----------------------------------------
    # Interpolation flag for NCEP re-analysis: 
    # False = Nearest point. True = linear
    #-----------------------------------------
    interpFlg = False
    
    #---------------------
    # Interpolation things
    #---------------------
    nSkip      = 3            # Number of points to skip when merging WACCM and NCEP profiles
    intrpOrder = 1            # Order of interpolation
    logFlg     = True         # Flag to do interpolation of log of water
    
    #-----------------------
    # Date Range of interest
    #-----------------------
    yyyy           = 2015
    iyear          = yyyy
    imnth          = 1
    iday           = 1
    fyear          = yyyy
    fmnth          = 12
    fday           = 31
    
    #-------------------------------
    # NCEP Reanalysis data directory
    #-------------------------------
    #NCEPTrppdir = '/Volumes/data1/ebaumer/NCEP_trpp/'
    #NCEPhgtDir  = '/Volumes/data1/ebaumer/NCEP_hgt/'
    NCEPTrppdir = '/data1/ancillary_data/NCEPdata/NCEP_trpp/'
    NCEPhgtDir  = '/data1/ancillary_data/NCEPdata/NCEP_hgt/'

    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear,imnth,iday,fyear,fmnth,fday) 
    yrList = dRange.yearList()

    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        sLat = 76.52
        sLon = 291.23               # 68.77 W = (360.0 - 68.77) = 291.23 E
        
    elif loc.lower() == 'mlo':
        sLat = 19.4
        sLon = 204.43                # 155.57 W = (360 - 155.57) = 204.43 E
        
    elif loc.lower() == 'fl0':
        sLat = 40.4
        sLon = 254.76                # 105.24 W = (360 - 105.24) = 254.76 E

    elif loc.lower() == 'nya':
        sLat = 78.92
        sLon = 11.93                # 11.93 E     
      
    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(NCEPTrppdir,exitFlg=True)
    ckDir(NCEPhgtDir,exitFlg=True)
       
    #-------------------
    # Loop through years
    #-------------------
    for year in yrList:
        
        #-------------------
        # Yearly Output File
        #-------------------
        outFile  = '/data1/ancillary_data/NCEPdata/NCEP_trpp/TropHght_'+loc.lower()+'_'+str(iyear)+'.dat'        

        #-------------------------------
        # Open and read year NetCDF file
        #-------------------------------
        trppFile  = NCEPTrppdir + 'pres.tropp.'+str(year)+'.nc'
        ghghtFile = NCEPhgtDir + 'hgt.' +str(year)+'.nc'

        #-------------------------
        # Tropopause Pressure File
        #-------------------------
        #TrppObj  = netcdf.netcdf_file(trppFile,'r',mmap=False)
        TrppObj  = nc.Dataset(trppFile,'r')
        Trpp     = TrppObj.variables['pres']                # Mean daily Pressure at Tropopause (Pascals)
        timeTrpp = TrppObj.variables['time']                # hours since 1-1-1 00:00:0.0
        latTrpp  = TrppObj.variables['lat']                 # degrees_north
        lonTrpp  = TrppObj.variables['lon']                 # degrees_east

        #-----------------------------------------------------------
        # 'Unpack' the data => (data[int])*scale_factor + add_offset
        #-----------------------------------------------------------
        #TrppData = Trpp[:,:,:] * Trpp.scale_factor + Trpp.add_offset    
        TrppData = Trpp[:]     # New files seem to automatically apply scale factor and offset
                        
        TrppData *= 0.01              # Convert [Pascals] => [mbars]
        
        #-----------------------------------------------
        # If not interpolating point in NCEP re-analysis
        # find closes lat lon indicies
        #-----------------------------------------------
        if not interpFlg:
            latind = findCls(latTrpp[:],sLat)
            lonind = findCls(lonTrpp[:],sLon)
        
        #------------------------------------------------------
        # Convert hours since 1-1-1 00:00:00 to datetime object
        # NCEP reanalysis uses udunits for encoding times,
        # meaning they don't follow standard leap year
        # convention (whatever this means?). Must follow some 
        # leap year convention, but not clear. Therefore, take
        # the first time in file as 1-1-YEAR.
        #------------------------------------------------------        
        timeHrs = timeTrpp[:] - timeTrpp[0]                      
        timeAll = np.array([dt.datetime(year,1,1)+dt.timedelta(hours=int(h)) for h in timeHrs])    # This is a datetime object
        dateAll = np.array([dt.date(d.year,d.month,d.day) for d in timeAll])                       # Convert datetime to strictly date
        
        #-------------------------
        # Geopotential Height file
        #-------------------------
        ##with netcdf.netcdf_file(ghghtFile,'r',mmap=False) as gHghtF:      # Can only be done with scipy Ver > 0.12.0
        ##gHghtF   = netcdf.netcdf_file(ghghtFile,'r',mmap=False)
        gHghtF       = nc.Dataset(ghghtFile,'r')
        hgt          = gHghtF.variables['hgt'][:]                                # Height in [meters]. Dimensions: [time][vert][lat][lon]
        PlvlHghtData = gHghtF.variables['level'][:]
        gHghtF.close()
        PlvlHghtData.astype(float)
        
        #-------------------------------------
        # Create empty Tropopause height array
        #-------------------------------------
        Trph     = np.zeros(len(dateAll))     
        TrppSite = np.zeros(len(dateAll))

        #------------------------------------------
        # Loop through all folders in specific year
        #------------------------------------------
        for day in dRange.dateList:
            
            #------------------------------
            # Find corresponding date index
            #------------------------------
            i = np.where(dateAll == day)[0]
            
            #-------------------------
            # Get hgt for specific day
            #-------------------------
            dayHghtMat = np.squeeze(hgt[i,:,:,:])
            
            #-----------------------------------------------------------
            # 'Unpack' the data => (data[int])*scale_factor + add_offset
            #-----------------------------------------------------------
            #dayHghtMat = dayHghtMat * hgt.scale_factor + hgt.add_offset
            
            #-------------------------------------------
            # For each level interpolate height based on 
            # latitude and longitude of site
            #-------------------------------------------
            dayHgt  = np.zeros(np.shape(dayHghtMat)[0])
            
            for lvl in range(0,np.shape(dayHghtMat)[0]):      
                dayHgtLvl    = np.squeeze(dayHghtMat[lvl,:,:])
                if interpFlg: dayHgt[lvl] = interp2d(lonTrpp[:],latTrpp[:],dayHgtLvl,kind='linear', bounds_error=True)(sLon,sLat)
                else:         dayHgt[lvl] = dayHgtLvl[latind,lonind]  

               
            dayHgt.astype(float)
            dayHgt  = dayHgt / 1000.0            # Convert height units [m] => [km]
            
            #-------------------------------------------------------------
            # Interpolate Tropopause pressure based on lat and lon of site
            #-------------------------------------------------------------
            TrppDay = np.squeeze(TrppData[i,:,:])
            
            if interpFlg: TrppSite[i] = interp2d(lonTrpp[:],latTrpp[:],TrppDay,kind='linear', bounds_error=True)(sLon,sLat)
            else: TrppSite[i] = TrppDay[latind,lonind] 

            #------------------------------------
            # Interpolate Tropopause pressure on 
            # height to find height of tropopuase
            #------------------------------------     
            #Trph[i] = interp1d(PlvlHghtData,dayHgt, kind='linear')(TrppSite[i])

            ###Combine lists into list of tuples          
            points = zip(PlvlHghtData, dayHgt)
            ### Sort list of tuples by x-value
            points = sorted(points, key=lambda point: point[0])
            ###Split list of tuples into two list of x values any y values
            PlvlHghtData_sort, dayHgt_sort = zip(*points)

            PlvlHghtData_sort = np.asarray(PlvlHghtData_sort)
            dayHgt_sort = np.asarray(dayHgt_sort)

            Trph[i] = interp1d(PlvlHghtData_sort,dayHgt_sort, kind='linear')(TrppSite[i])

        #----------------------------------------
        # Write Tropopause heights to yearly file
        #----------------------------------------
        with open(outFile,'w') as fopen:
            hdr = 'Date                     Tropopause Height [km]   Tropopause Pressure [mbars]\n'
            fopen.write(hdr)
            strformat = ['{'+str(i)+':<25}' for i in range(0,3)]
            strformat = ''.join(strformat).lstrip() + '\n'
            for i,indDay in enumerate(timeAll):
                daystr = "{0:04d}{1:02d}{2:02d}".format(indDay.year,indDay.month,indDay.day)
                temp   = [daystr,Trph[i],TrppSite[i]]
                fopen.write(strformat.format(*temp))
                    
        TrppObj.close()
Beispiel #3
0
def main():

    #---------
    # Location
    #---------
    loc = 'tab'
    #loc = 'mlo'

    #------------------------------------
    # Version number to append water file
    #------------------------------------
    verW = 'v3'

    #-----------------------------------------
    # Interpolation flag for NCEP re-analysis:
    # False = Nearest point. True = linear
    #-----------------------------------------
    interpFlg = True

    #---------------------
    # Interpolation things
    #---------------------
    nSkip = 3  # Number of points to skip when merging WACCM and NCEP profiles
    intrpOrder = 1  # Order of interpolation
    logFlg = True  # Flag to do interpolation of log of water

    #-----------------------
    # Date Range of interest
    #-----------------------
    iyear = 2017
    imnth = 9
    iday = 1
    fyear = 2017
    fmnth = 12
    fday = 31

    #-------------------------------
    # NCEP Reanalysis data directory
    #-------------------------------
    NCEPdirShum = '/data1/ancillary_data/NCEPdata/NCEP_Shum/'
    NCEPdirHgt = '/data1/ancillary_data/NCEPdata/NCEP_hgt/'

    #---------------
    # Data Directory
    #---------------
    dataDir = '/data1/' + loc.lower() + '/'
    #dataDir  = '/data1/iortega/WYO/'

    #-------------------------
    # WACCM monthly means file
    #-------------------------
    WACCMfile = '/data/Campaign/' + loc.upper(
    ) + '/waccm/WACCM_pTW-meanV6.' + loc.upper()

    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear, imnth, iday, fyear, fmnth, fday)

    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 95.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.0000, 2.0000, 1.0000, 0.2250
        ])
        sLat = 76.52
        sLon = 291.23  # 68.77 W = (360.0 - 68.77) = 291.23 E

    elif loc.lower() == 'mlo':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 95.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.3960
        ])

        sLat = 19.4
        sLon = 204.43  # 155.57 W = (360 - 155.57) = 204.43 E

    elif loc.lower() == 'fl0':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 94.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.0000, 2.0000, 1.6120
        ])

        sLat = 40.4
        sLon = 254.76  # 105.24 W = (360 - 105.24) = 254.76 E

        ###sLat = 42.73
        ###sLon = 253.68               #WYOBA

    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(NCEPdirShum, exitFlg=True)
    ckDir(NCEPdirHgt, exitFlg=True)
    ckDir(dataDir, exitFlg=True)
    ckFile(WACCMfile, exitFlg=True)

    #--------------------------------------------------------------------
    # Read WACCM monthly mean data. WACCM monthly mean file is ascending,
    # adjust so that it is descending. Also units are in km.
    #--------------------------------------------------------------------
    with open(WACCMfile, 'r') as fopen:
        lines = fopen.readlines()

    nlyrs = int(lines[0].strip().split()[0])
    s_ind = 3
    Z = np.flipud(
        np.array([
            float(row.strip().split()[0]) for row in lines[s_ind:nlyrs + s_ind]
        ]))
    waccmT = np.flipud(
        np.array([[float(x) for x in line.strip().split()[1:]]
                  for line in lines[s_ind:nlyrs + s_ind]]))
    s_ind = 3 + nlyrs + 2
    waccmP = np.flipud(
        np.array([[float(x) for x in line.strip().split()[1:]]
                  for line in lines[s_ind:nlyrs + s_ind]]))
    s_ind = 3 + nlyrs + 2 + nlyrs + 2
    waccmW = np.flipud(
        np.array([[float(x) for x in line.strip().split()[1:]]
                  for line in lines[s_ind:nlyrs + s_ind]]))

    #--------------------------------------------
    # Walk through first level of directories in
    # data directory and collect directory names
    #--------------------------------------------
    dirLst = []
    for drs in os.walk(dataDir).next()[1]:

        #-------------------------------------------
        # Test directory to make sure it is a number
        #-------------------------------------------
        try:
            int(drs[0:4])
        except:
            continue

        if dRange.inRange(int(drs[0:4]), int(drs[4:6]), int(drs[6:8])):
            dirLst.append(dataDir + drs + '/')

    dirLst.sort()

    #--------------------------------------------------------
    # Loop through folders for individual years. This is done
    # because NCEP NetCDF files are by year. Therefore only
    # have to open and read yearly NCEP file once
    #--------------------------------------------------------
    yrList = dRange.yearList()

    for year in yrList:

        #-----------------------------
        # Find all folders within year
        #-----------------------------
        dirListYr = np.array(
            [d for d in dirLst if int(os.path.basename(d[:-1])[0:4]) == year])

        #-------------------------------
        # Open and read year NetCDF file
        #-------------------------------
        shumFile = NCEPdirShum + 'shum.' + str(year) + '.nc'
        ghghtFile = NCEPdirHgt + 'hgt.' + str(year) + '.nc'

        #-----------------------
        # Specific humidity file
        #-----------------------
        #with netcdf.netcdf_file(shumFile,'r',mmap=False) as shumF:      # Can only be done with scipy Ver > 0.12.0
        #shumF = netcdf.netcdf_file(shumFile,'r',mmap=False)
        shumObj = nc.Dataset(shumFile, 'r')
        PlvlShum = shumObj.variables['level']  # Starts at the surface
        timeShum = shumObj.variables['time']  # hours since 1-1-1 00:00:0.0
        latShum = shumObj.variables['lat']  # degrees_north
        lonShum = shumObj.variables['lon']  # degrees_east
        shum = shumObj.variables[
            'shum']  # Units: [kg/kg]. Dimensions: [time][vert][lat][lon] Only goes to 300mb!!

        PlvlShum = PlvlShum[:]
        timeShum = timeShum[:]
        latShum = latShum[:]
        lonShum = lonShum[:]
        shum = shum[:]

        shumObj.close()

        #----------------------------------------
        # Convert Specific humidity from kg/kg to
        # molecules/molecules
        #----------------------------------------
        shum = shum * 1.608

        #-----------------------------------------------
        # If not interpolating point in NCEP re-analysis
        # find closes lat lon indicies
        #-----------------------------------------------
        if not interpFlg:
            latind = findCls(latShum, sLat)
            lonind = findCls(lonShum, sLon)

        #------------------------------------------------------
        # Convert hours since 1-1-1 00:00:00 to datetime object
        # NCEP reanalysis uses udunits for encoding times,
        # meaning they don't follow standard leap year
        # convention (whatever this means?). Must follow some
        # leap year convention, but not clear. Therefore, take
        # the first time in file as 1-1-YEAR.
        #------------------------------------------------------
        timeHrs = timeShum - timeShum[0]
        timeAll = np.array([
            dt.datetime(year, 1, 1) + dt.timedelta(hours=int(h))
            for h in timeHrs
        ])

        #-------------------------
        # Geopotential Height file
        #-------------------------
        #with netcdf.netcdf_file(ghghtFile,'r',mmap=False) as gHghtF:      # Can only be done with scipy Ver > 0.12.0
        #gHghtF = netcdf.netcdf_file(ghghtFile,'r',mmap=False)
        gHghtF = nc.Dataset(ghghtFile, 'r')
        hgt = gHghtF.variables[
            'hgt']  # Height in [meters]. Dimensions: [time][vert][lat][lon]
        PlvlHght = gHghtF.variables['level']

        hgt = hgt[:]
        PlvlHght = PlvlHght[:]

        gHghtF.close()
        #------------------------------------------
        # Loop through all folders in specific year
        #------------------------------------------
        for sngDir in dirListYr:

            #----------------------------
            # Get date in datetime format
            #----------------------------
            oneDay = dt.datetime(int(os.path.basename(sngDir[:-1])[0:4]),
                                 int(os.path.basename(sngDir[:-1])[4:6]),
                                 int(os.path.basename(sngDir[:-1])[6:8]))

            #--------------------------------------------------
            # Find month index for monthly WACCM water profiles
            #--------------------------------------------------
            mnthInd = oneDay.month - 1  # -1 because January is in the 0th column

            #--------------------------------------
            # Get hgt and specific humidity for day
            #--------------------------------------
            ind = np.where(timeAll == oneDay)[0]
            dayHghtMat = np.squeeze(hgt[ind, :, :, :])
            dayShumMat = np.squeeze(shum[ind, :, :, :])

            #-----------------------------------------------------------
            # 'Unpack' the data => (data[int])*scale_factor + add_offset
            #-----------------------------------------------------------
            #dayHghtMat = dayHghtMat * hgt.scale_factor + hgt.add_offset
            #dayShumMat = dayShumMat * shum.scale_factor + shum.add_offset

            #-----------------------------------------------------
            # For each level interpolate hgt and specific humidity
            # based on latitude and longitude of site
            #-----------------------------------------------------
            dayHgt = np.zeros(np.shape(dayShumMat)[0])
            dayShum = np.zeros(np.shape(dayShumMat)[0])
            for lvl in range(0, np.shape(dayShumMat)[0]):

                dayHgtLvl = np.squeeze(dayHghtMat[lvl, :, :])
                if interpFlg:
                    dayHgt[lvl] = interp2d(lonShum,
                                           latShum,
                                           dayHgtLvl,
                                           kind='linear',
                                           bounds_error=True)(sLon, sLat)
                else:
                    dayHgt[lvl] = dayHgtLvl[latind, lonind]

                dayShumLvl = np.squeeze(dayShumMat[lvl, :, :])
                if interpFlg:
                    dayShum[lvl] = interp2d(lonShum,
                                            latShum,
                                            dayShumLvl,
                                            kind='linear',
                                            bounds_error=True)(sLon, sLat)
                else:
                    dayShum[lvl] = dayShumLvl[latind, lonind]

            dayHgt.astype(float)
            dayShum.astype(float)
            dayHgt = dayHgt / 1000.0  # Convert height units [m] => [km]

            #---------------------------------------------------------
            # Construct specific humidity and height profiles for
            # interpolation on to the sfit input grid which is the
            # same as the WACCM height in monthly profile file.
            # NCEP reanalysis data only goes to 300mb therefore,
            # merge with monthly averaged WACCM water profiles > 300mb
            #---------------------------------------------------------
            NCEPtop = dayHgt[-1]
            topInd = np.argmin(
                abs(Z - NCEPtop)
            )  # Where top of NCEP reanalysis fits in WACCM grid height

            #Zin  = np.concatenate( ( Z[0:(topInd-nSkip)]             , np.flipud(dayHgt)) , axis=1 )
            #SHin = np.concatenate( ( waccmW[0:(topInd-nSkip),mnthInd], np.flipud(dayShum)), axis=1 )

            #Remove axis=1
            Zin = np.concatenate((Z[0:(topInd - nSkip)], np.flipud(dayHgt)))
            SHin = np.concatenate((waccmW[0:(topInd - nSkip),
                                          mnthInd], np.flipud(dayShum)))

            #--------------------------------------------------------------
            # Interpolate to specific humidity on WACCM grid. X data must
            # be increasing => flip dimensions and then flip back
            #--------------------------------------------------------------
            if logFlg:
                SHout = np.exp(
                    np.flipud(
                        intrpUniSpl(np.flipud(Zin),
                                    np.log(np.flipud(SHin)),
                                    k=intrpOrder)(np.flipud(Z))))
            else:
                SHout = np.flipud(
                    intrpUniSpl(np.flipud(Zin), np.flipud(SHin),
                                k=intrpOrder)(np.flipud(Z)))

            #---------------------
            # Write out water file
            #---------------------
            with open(sngDir + 'w-120.' + verW, 'w') as fopen:
                fopen.write(
                    '    1     H2O from NCEP reanalysis and WACCM V6 monthly mean \n'
                )

                for row in segmnt(SHout, 5):
                    strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                    fopen.write(strformat.format(*row))

            #--------------------
            # Create plots to pdf
            #COMMENTED BELOW BY IVAN: CHECK MATPLOT PLT ERROR
            #--------------------
            pdfsav = PdfPages(sngDir + 'WaterProfile.pdf')

            fig1, ax1 = plt.subplots()
            ax1.plot(SHout, Z, 'rx-', label='Interpolated SH')
            ax1.plot(waccmW[:, mnthInd], Z, 'bx-', label='WACCM V6 SH')
            ax1.plot(dayShum, dayHgt, 'kx-', label='NCEP Reanalysis SH')
            ax1.grid(True, which='both')
            ax1.legend(prop={'size': 9})
            ax1.set_ylabel('Altitude [km]')
            ax1.set_xlabel('VMR [ppv]')
            ax1.tick_params(axis='x', which='both', labelsize=8)
            ax1.set_ylim((Z[-1], 60))
            ax1.set_xlim((0, np.max((waccmW[-1, mnthInd], dayShum[-1]))))
            ax1.set_title(oneDay)

            pdfsav.savefig(fig1, dpi=250)

            fig2, ax2 = plt.subplots()
            ax2.plot(SHout, Z, 'rx-', label='Interpolated SH')
            ax2.plot(waccmW[:, mnthInd], Z, 'bx-', label='WACCM V6 SH')
            ax2.plot(dayShum, dayHgt, 'kx-', label='NCEP Reanalysis SH')
            ax2.grid(True, which='both')
            ax2.legend(prop={'size': 9})
            ax2.set_ylabel('Altitude [km]')
            ax2.set_xlabel('log VMR [ppv]')
            ax2.tick_params(axis='x', which='both', labelsize=8)
            ax2.set_xscale('log')
            ax2.set_ylim((Z[-1], 60))
            ax2.set_xlim((0, np.max((waccmW[-1, mnthInd], dayShum[-1]))))
            ax2.set_title(oneDay)

            pdfsav.savefig(fig2, dpi=250)

            pdfsav.close()

            print 'Finished processing folder: {}'.format(sngDir)
Beispiel #4
0
def main(argv):

    #------------------
    # Set default flags
    #------------------
    logFile = False
    lstFlg = False
    pauseFlg = False

    #--------------------------------
    # Retrieve command line arguments
    #--------------------------------
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'i:P:L:l?')

    except getopt.GetoptError as err:
        print str(err)
        usage()
        sys.exit()

    #-----------------------------
    # Parse command line arguments
    #-----------------------------
    for opt, arg in opts:
        # Check input file flag and path
        if opt == '-i':

            # Input file instance
            mainInF = sc.Layer1InputFile(arg)

        # Pause after skip option
        elif opt == '-P':
            if not arg or arg.startswith('-'):
                usage()
                sys.exit()
            pauseFlg = True
            try:
                nskips = int(arg) - 1
                if nskips < 0: raise ValueError
            except ValueError:
                print 'Argument for -P flag: %s, needs to be an integer > 0' % arg
                sys.exit()

        # Show all command line flags
        elif opt == '-?':
            usage()
            sys.exit()

        # Option for Log File
        elif opt == '-l':
            logFile = True

        # Option for List file
        elif opt == '-L':
            if not arg or arg.startswith('-'):
                usage()
                sys.exit()
            lstFlg = True
            lstFnameFlg = int(arg)

        else:
            print 'Unhandled option: ' + opt
            sys.exit()

    #----------------------------------------------
    # Initialize main input variables as dicitonary
    #----------------------------------------------
    mainInF.getInputs()

    #--------------------
    # Initialize log file
    #--------------------
    # Write initial log data
    if logFile:
        log_fpath = mainInF.inputs['logDirOutput']

        # check if '/' is included at end of path
        if not (log_fpath.endswith('/')):
            log_fpath = log_fpath + '/'

        # check if path is valide
        ckDir(log_fpath)

        logFile = logging.getLogger('1')
        logFile.setLevel(logging.INFO)
        hdlr1 = logging.FileHandler(log_fpath +
                                    mainInF.inputs['ctlList'][0][2] + '.log',
                                    mode='w')
        fmt1 = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s',
                                 '%a, %d %b %Y %H:%M:%S')
        hdlr1.setFormatter(fmt1)
        logFile.addHandler(hdlr1)
        logFile.info(
            '**************** Starting Logging ***********************')
        logFile.info('Input data file:        ' + mainInF.fname)
        logFile.info('Log file path:          ' + log_fpath)
        logFile.info('Station location:       ' + mainInF.inputs['loc'])

    #---------------------
    # Initialize list file
    #---------------------
    if lstFlg:
        lst_fpath = mainInF.inputs['logDirOutput']

        # check if '/' is included at end of path
        if not (lst_fpath.endswith('/')):
            lst_fpath = lst_fpath + '/'

        # check if path is valide
        ckDir(lst_fpath)
        lstFile = logging.getLogger('2')
        lstFile.setLevel(logging.INFO)
        if lstFnameFlg:
            hdlr2 = logging.FileHandler(
                lst_fpath + mainInF.inputs['ctlList'][0][2] + '.lst', mode='w')
        else:
            hdlr2 = logging.FileHandler(lst_fpath + 'testing.lst', mode='w')
        fmt2 = logging.Formatter('')
        hdlr2.setFormatter(fmt2)
        lstFile.addHandler(hdlr2)

    #-----------------------------
    # Check the existance of files
    #-----------------------------
    # Spectral Database file
    ckFile(mainInF.inputs['spcdbFile'], logFlg=logFile, exit=True)

    # WACCM profile file
    #ckFile(mainInF.inputs['WACCMfile'],logFlg=logFile,exit=True)

    # ctl files
    for ctlFile in mainInF.inputs['ctlList']:
        ckFile(ctlFile[0], logFlg=logFile, exit=True)

    #--------------------------------------------
    # Program Looping structure. See Notes
    #      Level1 - LOC             (Input/Output)
    #       Level2 - ctl file         (Output)
    #        Level3 - Spectral db     (Output)
    #          --Check I/O directory structure
    #--------------------------------------------

    # Establish Date Range
    inDateRange = sc.DateRange(mainInF.inputs['iyear'],
                               mainInF.inputs['imnth'], mainInF.inputs['iday'],
                               mainInF.inputs['fyear'],
                               mainInF.inputs['fmnth'], mainInF.inputs['fday'])

    #--------------------
    # Level 1 -- LOC
    #--------------------
    if not (isinstance(mainInF.inputs['loc'], list)):
        mainInF.inputs['loc'] = [mainInF.inputs['loc']]

    for loc in mainInF.inputs['loc']:

        #-------------------------------------------
        # Check for existance of Input folder. Also,
        # check if '/' is included at end of path
        #-------------------------------------------
        if not (mainInF.inputs['BaseDirInput'].endswith('/')):
            wrkInputDir1 = mainInF.inputs['BaseDirInput'] + '/' + loc + '/'
        else:
            wrkInputDir1 = mainInF.inputs['BaseDirInput'] + loc + '/'

        ckDir(wrkInputDir1, logFlg=logFile, exit=True)

        #-----------------------------------------------------------
        # Check for the existance of Output folder and create if DNE
        # Also, check if '/' is included at end of path
        #-----------------------------------------------------------
        if not (mainInF.inputs['BaseDirOutput'].endswith('/')):
            wrkOutputDir1 = mainInF.inputs['BaseDirOutput'] + '/'
        else:
            wrkOutputDir1 = mainInF.inputs['BaseDirOutput']

        ckDirMk(wrkOutputDir1, logFile)

        #--------------------------------------
        # Find spectral db file and initialize
        # instance and get inputs
        #--------------------------------------
        dbData = sc.DbInputFile(mainInF.inputs['spcdbFile'], logFile)
        dbData.getInputs()

        #-----------------------------
        # Initial filter of data based
        # on input date range
        #-----------------------------
        dbFltData_1 = dbData.dbFilterDate(inDateRange)

        #---------------------------------
        # Initialize error control file
        # instance and get inputs (sb.ctl)
        #---------------------------------
        if mainInF.inputs['errFlg']:
            ckFile(mainInF.inputs['sbCtlFile'], logFlg=logFile, exit=True)
            SbctlFileVars = sc.CtlInputFile(mainInF.inputs['sbCtlFile'])
            SbctlFileVars.getInputs()

        #--------------------------------------
        # Level 2 -- Loop through control files
        #--------------------------------------
        for ctl_ind, ctlFileList in enumerate(mainInF.inputs['ctlList']):

            #-----------------------------
            # Initialize ctl file instance
            # and get inputs
            #-----------------------------
            ctlFile = ctlFileList[0]
            ctlFileGlb = sc.CtlInputFile(ctlFile, logFile)
            ctlFileGlb.getInputs()

            #-----------------------------
            # Write Meta-data to list file
            #-----------------------------
            if lstFlg:
                lstFile.info('# Begin List File Meta-Data')
                lstFile.info('Start Date     = ' +
                             str(inDateRange.dateList[0]))
                lstFile.info('End Date       = ' +
                             str(inDateRange.dateList[-1]))
                lstFile.info('WACCM_File     = ' + mainInF.inputs['WACCMfile'])
                lstFile.info('ctl_File       = ' +
                             mainInF.inputs['ctlList'][ctl_ind][0])
                lstFile.info('FilterID       = ' +
                             mainInF.inputs['ctlList'][ctl_ind][1])
                lstFile.info('VersionName    = ' +
                             mainInF.inputs['ctlList'][ctl_ind][2])
                lstFile.info('Site           = ' + mainInF.inputs['loc'][0])
                lstFile.info('statnLyrs_file = ' +
                             ctlFileGlb.inputs['file.in.stalayers'][0])
                lstFile.info('primGas        = ' + ctlFileGlb.primGas)
                lstFile.info('specDBfile     = ' + mainInF.inputs['spcdbFile'])
                lstFile.info('Coadd flag     = ' +
                             str(mainInF.inputs['coaddFlg']))
                lstFile.info('nBNRfiles      = ' +
                             str(mainInF.inputs['nBNRfiles']))
                lstFile.info('ilsFlg         = ' +
                             str(mainInF.inputs['ilsFlg']))
                lstFile.info('pspecFlg       = ' +
                             str(mainInF.inputs['pspecFlg']))
                lstFile.info('refmkrFlg      = ' +
                             str(mainInF.inputs['refmkrFlg']))
                lstFile.info('sfitFlg        = ' +
                             str(mainInF.inputs['sfitFlg']))
                lstFile.info('lstFlg         = ' +
                             str(mainInF.inputs['lstFlg']))
                lstFile.info('errFlg         = ' +
                             str(mainInF.inputs['errFlg']))
                lstFile.info('zptFlg         = ' +
                             str(mainInF.inputs['zptFlg']))
                lstFile.info('refMkrLvl      = ' +
                             str(mainInF.inputs['refMkrLvl']))
                lstFile.info('wVer           = ' + str(mainInF.inputs['wVer']))
                lstFile.info('# End List File Meta-Data')
                lstFile.info('')
                lstFile.info('Date         TimeStamp    Directory ')

            #-------------------------
            # Filter spectral db based
            # on wavenumber bounds in
            # ctl file
            #-------------------------
            # Find the upper and lower bands from the ctl file
            nu = []
            for band in ctlFileGlb.inputs['band']:
                bandstr = str(int(band))
                nu.append(ctlFileGlb.inputs['band.' + bandstr +
                                            '.nu_start'][0])
                nu.append(ctlFileGlb.inputs['band.' + bandstr + '.nu_stop'][0])

            nu.sort()  # Sort wavenumbers
            nuUpper = nu[-1]  # Get upper wavenumber
            nuLower = nu[0]  # Get lower wavenumber

            # Filter spectral DB based on wave number
            dbFltData_2 = dbData.dbFilterNu(nuUpper, nuLower, dbFltData_1)

            if not (dbFltData_2):
                continue  # Test for empty dicitonary (i.e. no data)

            #------------------------------------------------------------------------------------------------
            # In addition to filtering db based on wavenumbers in ctl file one can filter spectral db based
            # on filter ID. Using this can help avoid the bug when pspec tries to apply a filter band outside
            # spectral region of a bnr file.
            #------------------------------------------------------------------------------------------------
            if mainInF.inputs['ctlList'][ctl_ind][1]:
                dbFltData_2 = dbData.dbFilterFltrID(
                    mainInF.inputs['ctlList'][ctl_ind][1], dbFltData_2)
                if not (dbFltData_2):
                    continue  # Test for empty dicitonary (i.e. no data)

            #---------------------------------------------------------------------
            # Check for the existance of Output folder <Version> and create if DNE
            #---------------------------------------------------------------------
            if mainInF.inputs['ctlList'][ctl_ind][2]:
                wrkOutputDir2 = wrkOutputDir1 + mainInF.inputs['ctlList'][
                    ctl_ind][2] + '/'
                ckDirMk(wrkOutputDir2, logFile)
            else:
                wrkOutputDir2 = wrkOutputDir1

            #-----------------------------------------------
            # Create a folder within the output directory to
            # store various input files: ctl, hbin, isotope
            #-----------------------------------------------
            ctlPath, ctlFname = os.path.split(
                mainInF.inputs['ctlList'][ctl_ind][0])
            archDir = wrkOutputDir2 + 'inputFiles' + '/'

            if ckDirMk(archDir, logFile):
                for f in glob.glob(archDir + '*'):
                    os.remove(f)

            shutil.copy(mainInF.inputs['ctlList'][ctl_ind][0],
                        archDir)  # Copy ctl file

            for file in glob.glob(ctlPath + '/*hbin*'):  # Copy hbin files
                shutil.copy(file, archDir)

            for file in glob.glob(ctlPath + '/isotope*'):  # Copy isotope file
                shutil.copy(file, archDir)

            #------------------------------------------
            # Level 3 -- Loop through spectral db lines
            #------------------------------------------
            nobs = len(dbFltData_2['Date'])
            for spcDBind in range(0, nobs):

                #-----------------------------------------------------------
                # Grab spectral data base information for specific retrieval
                #-----------------------------------------------------------
                # Get current date and time of spectral database entry
                currntDayStr = str(int(dbFltData_2['Date'][spcDBind]))
                currntDay = dt.datetime(
                    int(currntDayStr[0:4]), int(currntDayStr[4:6]),
                    int(currntDayStr[6:]),
                    int(dbFltData_2['Time'][spcDBind][0:2]),
                    int(dbFltData_2['Time'][spcDBind][3:5]),
                    int(dbFltData_2['Time'][spcDBind][6:]))
                # Get dictionary with specific date
                specDBone = dbData.dbFindDate(currntDay, fltDict=dbFltData_2)

                brkFlg = True  # Flag to break out of while statement
                while True:  # While statement is for the repeat function
                    #-------------------------------------------------------------
                    # If pause after skip flag is initialized, do several things:
                    # 1) Check if number of skips exceeds total number of filtered
                    #    observations
                    # 2) Skip to specified starting point
                    # 3) Pause after first run
                    #-------------------------------------------------------------
                    if pauseFlg and (nskips > len(dbFltData_2['Date'])):
                        print 'Specified starting point in -P option (%d) is greater than number of observations in filtered database (%d)' % (
                            nskips, nobs)
                        if logFile:
                            logFile.critical(
                                'Specified starting point in -P option (%d) is greater than number of observations in filtered database (%d)'
                                % (nskips, nobs))
                        sys.exit()

                    if pauseFlg and (spcDBind < nskips): break

                    # Get date of observations
                    daystr = str(int(dbFltData_2['Date'][spcDBind]))
                    obsDay = dt.datetime(int(daystr[0:4]), int(daystr[4:6]),
                                         int(daystr[6:]))

                    #----------------------------------------
                    # Check the existance of input and output
                    # directory structure
                    #----------------------------------------
                    # Find year month and day strings
                    yrstr = "{0:02d}".format(obsDay.year)
                    mnthstr = "{0:02d}".format(obsDay.month)
                    daystr = "{0:02d}".format(obsDay.day)
                    datestr = yrstr + mnthstr + daystr

                    # Check for existance of YYYYMMDD Input folder
                    # If this folder does not exist => there is no
                    # Data for this day
                    wrkInputDir2 = wrkInputDir1 + yrstr + mnthstr + daystr + '/'
                    ckDir(wrkInputDir2, logFlg=logFile, exit=True)

                    #-----------------------------------------
                    # Check for the existance of Output folder
                    # <Date>.<TimeStamp> and create if DNE
                    #-----------------------------------------
                    wrkOutputDir3 = wrkOutputDir2 + datestr + '.' + "{0:06}".format(
                        int(dbFltData_2['TStamp'][spcDBind])) + '/'

                    if ckDirMk(wrkOutputDir3, logFile):
                        # Remove all files in Output directory if previously exists!!
                        for f in glob.glob(wrkOutputDir3 + '*'):
                            os.remove(f)

                    #-------------------------------
                    # Copy relavent files from input
                    # directory to output directoy
                    #-------------------------------
                    #-----------------------------------
                    # Copy control file to Output folder
                    # First check if location to copy ctl is
                    # the same location as original ctl file
                    #-----------------------------------
                    try:
                        shutil.copyfile(mainInF.inputs['ctlList'][ctl_ind][0],
                                        wrkOutputDir3 + 'sfit4.ctl')
                    except IOError:
                        print 'Unable to copy template ctl file to working directory: %s' % wrkOutputDir3
                        if logFile:
                            logFile.critical(
                                'Unable to copy template ctl file to working directory: %s'
                                % wrkOutputDir3)
                        sys.exit()

                    #-------------------------------------
                    # Copy sb.ctl file to output directory
                    # if error analysis is chosen
                    #-------------------------------------
                    if mainInF.inputs['errFlg']:
                        try:
                            shutil.copyfile(mainInF.inputs['sbCtlFile'],
                                            wrkOutputDir3 + 'sb.ctl')
                        except IOError:
                            print 'Unable to copy template sb.ctl file to working directory: %s' % wrkOutputDir3
                            if logFile:
                                logFile.critical(
                                    'Unable to copy template sb.ctl file to working directory: %s'
                                    % wrkOutputDir3)
                            sys.exit()

                    #----------------------------------
                    # Copy hbin details to output folder
                    # ** Assuming that the hbin.dtl and
                    # hbin.input files are in the same
                    # location as the global ctl file
                    #----------------------------------
                    try:
                        shutil.copyfile(ctlPath + '/hbin.dtl', wrkOutputDir3 +
                                        '/hbin.dtl')  # Copy hbin.dtl file
                    except IOError:
                        print 'Unable to copy file: %s' % (ctlPath +
                                                           '/hbin.dtl')
                        if logFile: logFile.error(IOError)

                    try:
                        shutil.copyfile(ctlPath + '/hbin.input',
                                        wrkOutputDir3 +
                                        '/hbin.input')  # Copy hbin.input file
                    except IOError:
                        print 'Unable to copy file: %s' % (ctlPath +
                                                           '/hbin.input')
                        if logFile: logFile.error(IOError)

                    # Create instance of local control file (ctl file in working directory)
                    ctlFileLcl = sc.CtlInputFile(wrkOutputDir3 + 'sfit4.ctl',
                                                 logFile)

                    #-------------------------------------------------
                    # Determine whether to use ILS file. Empty string
                    # '' => no ILS file.
                    #-------------------------------------------------
                    if mainInF.inputs['ilsDir'] and mainInF.inputs['ilsFlg']:

                        #-------------------------------------------
                        # Determine if ilsDir is a file or directory
                        #-------------------------------------------
                        # If directory.....
                        if os.path.isdir(mainInF.inputs['ilsDir']):

                            # Determine which ILS file to use
                            ilsFileList = glob.glob(mainInF.inputs['ilsDir'] +
                                                    'ils*')

                            # Create a date list of ils files present
                            ilsYYYYMMDD = []
                            for ilsFile in ilsFileList:
                                ilsFileNpath = os.path.basename(ilsFile)
                                match = re.match(
                                    r'\s*ils(\d\d\d\d)(\d\d)(\d\d).*',
                                    ilsFileNpath)
                                ilsYYYYMMDD.append([
                                    int(match.group(1)),
                                    int(match.group(2)),
                                    int(match.group(3))
                                ])

                            ilsDateList = [
                                dt.date(ilsyear, ilsmonth, ilsday)
                                for ilsyear, ilsmonth, ilsday in ilsYYYYMMDD
                            ]

                            # Find the ils date nearest to the current day
                            nearstDay = sc.nearestDate(ilsDateList,
                                                       obsDay.year,
                                                       obsDay.month,
                                                       obsDay.day)
                            nearstDayMnth = "{0:02d}".format(nearstDay.month)
                            nearstDayYr = "{0:02d}".format(nearstDay.year)
                            nearstDayDay = "{0:02d}".format(nearstDay.day)
                            nearstDaystr = nearstDayYr + nearstDayMnth + nearstDayDay

                            # Get File path and name for nearest ils file
                            for ilsFile in ilsFileList:
                                if nearstDaystr in os.path.basename(ilsFile):
                                    ilsFname = ilsFile

                        # If file.....
                        elif os.path.isfile(mainInF.inputs['ilsDir']):
                            ilsFname = mainInF.inputs['ilsDir']

                        if logFile: logFile.info('Using ils file: ' + ilsFname)

                        # Replace ils file name in local ctl file (within working directory)
                        teststr = [
                            r'file.in.modulation_fcn', r'file.in.phase_fcn'
                        ]
                        repVal = [ilsFname, ilsFname]
                        ctlFileLcl.replVar(teststr, repVal)

                    # Write FOV from spectral database file to ctl file (within working directory)
                    ctlFileLcl.replVar([r'band\.\d+\.omega'],
                                       [str(specDBone['FOV'])])

                    #---------------------------
                    # Message strings for output
                    #---------------------------
                    msgstr1 = mainInF.inputs['ctlList'][ctl_ind][0]
                    msgstr2 = datestr + '.' + "{0:06}".format(
                        int(dbFltData_2['TStamp'][spcDBind]))

                    #----------------------------#
                    #                            #
                    #      --- Run pspec---      #
                    #                            #
                    #----------------------------#
                    if mainInF.inputs['pspecFlg']:
                        print '*****************************************************'
                        print 'Running PSPEC for ctl file: %s' % msgstr1
                        print 'Processing spectral observation date: %s' % msgstr2
                        print '*****************************************************'

                        rtn = t15ascPrep(dbFltData_2, wrkInputDir2,
                                         wrkOutputDir3, mainInF, spcDBind,
                                         ctl_ind, logFile)

                        if logFile:
                            logFile.info('Ran PSPEC for ctl file: %s' %
                                         msgstr1)
                            logFile.info(
                                'Processed spectral observation date: %s' %
                                msgstr2)

                            #----------------------------#
                            #                            #
                            #    --- Run Refmaker---     #
                            #                            #
                            #----------------------------#
                    if mainInF.inputs['refmkrFlg']:
                        #-------------
                        # Run Refmaker
                        #-------------
                        print '*****************************************************'
                        print 'Running REFMKRNCAR for ctl file: %s' % msgstr1
                        print 'Processing spectral observation date: %s' % msgstr2
                        print '*****************************************************'

                        rtn = refMkrNCAR(wrkInputDir2, mainInF.inputs['WACCMfile'], wrkOutputDir3, \
                                         mainInF.inputs['refMkrLvl'], mainInF.inputs['wVer'], mainInF.inputs['zptFlg'],\
                                         dbFltData_2, spcDBind, logFile)
                        if logFile:
                            logFile.info('Ran REFMKRNCAR for ctl file: %s' %
                                         msgstr1)
                            logFile.info(
                                'Processed spectral observation date: %s' %
                                msgstr2)

                            #----------------------------#
                            #                            #
                            #      --- Run sfit4---      #
                            #                            #
                            #----------------------------#

                    #--------------
                    # Call to sfit4
                    #--------------
                    if mainInF.inputs['sfitFlg']:
                        print '*****************************************************'
                        print 'Running SFIT4 for ctl file: %s' % msgstr1
                        print 'Processing spectral observation date: %s' % msgstr2
                        print 'Ouput Directory: %s' % wrkOutputDir3
                        print '*****************************************************'

                        if logFile:
                            logFile.info('Ran SFIT4 for ctl file: %s' %
                                         msgstr1)
                            logFile.info(
                                'Processed spectral observation date: %s' %
                                msgstr2)

                        #------------------------------
                        # Change working directory to
                        # output directory to run pspec
                        #------------------------------
                        try:
                            os.chdir(wrkOutputDir3)
                        except OSError as errmsg:
                            if logFile: logFile.error(errmsg)
                            sys.exit()

                        #---------------------
                        # Run sfit4 executable
                        #---------------------
                        sc.subProcRun([mainInF.inputs['binDir'] + 'sfit4'],
                                      logFile)

                        #if ( stderr is None or not stderr):
                        #if log_flg:
                        #logFile.info('Finished running sfit4\n' + stdout)
                        #else:
                        #print 'Error running sfit4!!!'
                        #if log_flg:
                        #logFile.error('Error running sfit4 \n' + stdout)
                        #sys.exit()

                        #-----------------------------------
                        # Change permissions of all files in
                        # working directory
                        #-----------------------------------
                        for f in glob.glob(wrkOutputDir3 + '*'):
                            os.chmod(f, 0777)

                        #----------------------------------------------
                        # If succesfull run, write details to list file
                        #----------------------------------------------
                        if lstFlg:
                            fname = wrkOutputDir3 + 'sfit4.dtl'
                            cmpltFlg = False
                            with open(fname, 'r') as fopen:
                                for ind, line in enumerate(
                                        reversed(fopen.readlines())):
                                    if ind < 10:
                                        if r'RDRV: DONE.' in line:
                                            cmpltFlg = True
                                    else:
                                        break

                            if cmpltFlg and lstFile:
                                lstFile.info(
                                    "{0:<13}".format(
                                        int(dbFltData_2['Date'][spcDBind])) +
                                    "{0:06}".format(
                                        int(dbFltData_2['TStamp'][spcDBind])) +
                                    '       ' + wrkOutputDir3)

                                #----------------------------#
                                #                            #
                                #   --- Error Analysis ---   #
                                #                            #
                                #----------------------------#
                        if mainInF.inputs['errFlg']:
                            if logFile:
                                logFile.info('Ran SFIT4 for ctl file: %s' %
                                             msgstr1)

                            #-----------------------------------
                            # Enter into Error Analysis function
                            #-----------------------------------
                            rtn = errAnalysis(ctlFileGlb, SbctlFileVars,
                                              wrkOutputDir3, logFile)

                        #---------------------------
                        # Continuation for Pause flg
                        #---------------------------
                        if pauseFlg:
                            while True:
                                user_input = raw_input(
                                    'Paused processing....\n Enter: 0 to exit, -1 to repeat, 1 to continue to next, 2 to continue all\n >>> '
                                )
                                try:
                                    user_input = int(user_input)
                                    if not any(user_input == val
                                               for val in [-1, 0, 1, 2]):
                                        raise ValueError
                                    break
                                except ValueError:
                                    print 'Please enter -1, 0, 1, or 2'

                            if user_input == 0: sys.exit()  # Exit program
                            elif user_input == 1:
                                brkFlg = True  # Exit while loop (Do not repeat)
                            elif user_input == 2:  # Stop pause and exit while loop
                                pauseFlg = False
                                brkFlg = True
                            elif user_input == -1:  # Repeat loop
                                brkFlg = False
                                # Need to implement functionality to recopy ctl file, bnr file, etc

                        #-----------------------
                        # Exit out of while loop
                        #-----------------------
                        if brkFlg: break
Beispiel #5
0
def main():

    #---------
    # Location
    #---------
    loc = 'tab'

    #-----------------------------------------
    # Interpolation flag for NCEP re-analysis:
    # False = Nearest point. True = linear
    #-----------------------------------------
    interpFlg = False

    #---------------------
    # Interpolation things
    #---------------------
    nSkip = 3  # Number of points to skip when merging WACCM and NCEP profiles
    intrpOrder = 1  # Order of interpolation
    logFlg = True  # Flag to do interpolation of log of water

    #-----------------------
    # Date Range of interest
    #-----------------------
    year = 2014

    iyear = year
    imnth = 1
    iday = 1
    fyear = year
    fmnth = 10
    fday = 1

    #-------------------------------------
    # Parameters for potential temperature
    #-------------------------------------
    thetaTrgt = 380.0  # Potential Temperature [K]
    P0 = 1000.0  # Referenc Pressure [mbars]
    R_Cp = 0.286  # R/Cp for air

    #-------------------------------
    # NCEP Reanalysis data directory
    #-------------------------------
    NCEPTempdir = '/Volumes/data1/ebaumer/NCEP_Temp/'
    NCEPhgtDir = '/Volumes/data1/ebaumer/NCEP_hgt/'

    #--------------
    # Out directory
    #--------------
    outDir = '/Volumes/data1/ebaumer/NCEP_Temp/'
    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear, imnth, iday, fyear, fmnth, fday)
    yrList = dRange.yearList()

    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        sLat = 76.52
        sLon = 291.23  # 68.77 W = (360.0 - 68.77) = 291.23 E

    elif loc.lower() == 'mlo':
        sLat = 19.4
        sLon = 204.43  # 155.57 W = (360 - 155.57) = 204.43 E

    elif loc.lower() == 'fl0':
        sLat = 40.4
        sLon = 254.76  # 105.24 W = (360 - 105.24) = 254.76 E

    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(NCEPTempdir, exitFlg=True)
    ckDir(NCEPhgtDir, exitFlg=True)

    #-------------------
    # Loop through years
    #-------------------
    for year in yrList:

        #-------------------
        # Yearly Output File
        #-------------------
        outFile = outDir + '380K_theta_' + loc.lower() + '_' + str(
            iyear) + '.dat'

        #-------------------------------
        # Open and read year NetCDF file
        #-------------------------------
        trppFile = NCEPTempdir + 'air.' + str(year) + '.nc'
        ghghtFile = NCEPhgtDir + 'hgt.' + str(year) + '.nc'

        #-------------------------
        # Tropopause Pressure File
        #-------------------------
        #with netcdf.netcdf_file(shumFile,'r',mmap=False) as shumF:      # Can only be done with scipy Ver > 0.12.0
        TempObj = nc.Dataset(trppFile, 'r')
        Temp = TempObj.variables[
            'air'][:]  # Mean daily Pressure at Tropopause (Pascals)
        timeTrpp = TempObj.variables['time'][:]  # hours since 1-1-1 00:00:0.0
        latTrpp = TempObj.variables['lat'][:]  # degrees_north
        lonTrpp = TempObj.variables['lon'][:]  # degrees_east
        TempObj.close()

        #-----------------------------------------------------------
        # 'Unpack' the data => (data[int])*scale_factor + add_offset
        #-----------------------------------------------------------
        #TempData = Temp[:,:,:] * Temp.scale_factor + Temp.add_offset

        #-----------------------------------------------
        # If not interpolating point in NCEP re-analysis
        # find closes lat lon indicies
        #-----------------------------------------------
        if not interpFlg:
            latind = findCls(latTrpp, sLat)
            lonind = findCls(lonTrpp, sLon)

        #------------------------------------------------------
        # Convert hours since 1-1-1 00:00:00 to datetime object
        # NCEP reanalysis uses udunits for encoding times,
        # meaning they don't follow standard leap year
        # convention (whatever this means?). Must follow some
        # leap year convention, but not clear. Therefore, take
        # the first time in file as 1-1-YEAR.
        #------------------------------------------------------
        timeHrs = timeTrpp - timeTrpp[0]
        timeAll = np.array([
            dt.datetime(year, 1, 1) + dt.timedelta(hours=int(h))
            for h in timeHrs
        ])  # This is a datetime object
        dateAll = np.array([dt.date(d.year, d.month, d.day) for d in timeAll
                            ])  # Convert datetime to strictly date

        #-------------------------
        # Geopotential Height file
        #-------------------------
        #with netcdf.netcdf_file(ghghtFile,'r',mmap=False) as gHghtF:      # Can only be done with scipy Ver > 0.12.0
        gHghtF = nc.Dataset(ghghtFile, 'r')
        hgt = gHghtF.variables[
            'hgt'][:]  # Height in [meters]. Dimensions: [time][vert][lat][lon]
        PlvlHghtData = gHghtF.variables['level'][:]
        gHghtF.close()
        PlvlHghtData.astype(float)

        #-------------------------------------------------
        # Calculate coefficient for potential temperature:
        #               Po   (R/Cp)
        #  Theta = T *(-----)
        #                P
        #-------------------------------------------------
        theta_coef = (P0 / PlvlHghtData)**(R_Cp)  #???????????????

        #-------------------------------------
        # Create empty Tropopause height array
        #-------------------------------------
        theta_hgt = np.zeros(len(dateAll))

        #------------------------------------------
        # Loop through all folders in specific year
        #------------------------------------------
        for day in dRange.dateList:

            #------------------------------
            # Find corresponding date index
            #------------------------------
            i = np.where(dateAll == day)[0]

            #-------------------------
            # Get hgt for specific day
            #-------------------------
            dayHghtMat = np.squeeze(hgt[i, :, :, :])

            #-----------------------------
            # Get Theta for a specific day
            #-----------------------------
            dayTempMat = np.squeeze(Temp[i, :, :, :])

            #-------------------------------------------
            # For each level interpolate height and Theta
            # based on latitude and longitude of site
            #-------------------------------------------
            dayHgt = np.zeros(np.shape(dayHghtMat)[0])
            thetaLvl = np.zeros(np.shape(dayHghtMat)[0])

            for lvl in range(0, np.shape(dayHghtMat)[0]):
                #-------
                # Height
                #-------
                dayHgtLvl = np.squeeze(dayHghtMat[lvl, :, :])
                if interpFlg:
                    dayHgt[lvl] = interp2d(lonTrpp[:],
                                           latTrpp[:],
                                           dayHgtLvl,
                                           kind='linear',
                                           bounds_error=True)(sLon, sLat)
                else:
                    dayHgt[lvl] = dayHgtLvl[latind, lonind]

                #------
                # Theta
                #------
                TempDayLvl = np.squeeze(dayTempMat[lvl, :, :])
                if interpFlg:
                    thetaLvl[lvl] = interp2d(lonTrpp[:],
                                             latTrpp[:],
                                             TempDayLvl,
                                             kind='linear',
                                             bounds_error=True)(sLon, sLat)
                else:
                    thetaLvl[lvl] = TempDayLvl[latind, lonind]

            dayHgt.astype(float)
            dayHgt = dayHgt / 1000.0  # Convert height units [m] => [km]
            thetaLvl.astype(float)
            thetaLvl *= theta_coef  # Apply theta coefficient Temperature => Theta

            #------------------------------------
            # Interpolate Tropopause pressure on
            # height to find height of tropopuase
            #------------------------------------
            theta_hgt[i] = interp1d(thetaLvl, dayHgt, kind='linear')(thetaTrgt)

        #----------------------------------------
        # Write Tropopause heights to yearly file
        #----------------------------------------
        with open(outFile, 'w') as fopen:
            hdr = 'Date           380K Potential Temperature Height [km]\n'
            fopen.write(hdr)
            strformat = '{0:15}{1:<38}\n'
            for i, indDay in enumerate(timeAll):
                daystr = "{0:04d}{1:02d}{2:02d}".format(
                    indDay.year, indDay.month, indDay.day)
                temp = [daystr, theta_hgt[i]]
                fopen.write(strformat.format(*temp))
Beispiel #6
0
def main():
    
    #---------
    # Location
    #---------
    loc = 'tab'
    
    #-----------------------
    # Date Range of interest
    #-----------------------
    iyear          = 2013
    imnth          = 1
    iday           = 1
    fyear          = 2013
    fmnth          = 12
    fday           = 31
    
    #---------------
    # Data Directory
    #---------------
    dataDir  = '/Volumes/data1/'+loc.lower()+'/'
    
    #-----------------------------
    # Names of output figure files
    #-----------------------------
    figFnameQ = '/Users/ebaumer/Data/Q_'+loc.lower()+'.pdf'
    figFnameT = '/Users/ebaumer/Data/T_'+loc.lower()+'.pdf'
    
    figFnameQcor = '/Users/ebaumer/Data/Qcorr_'+loc.lower()+'.pdf'
    figFnameTcor = '/Users/ebaumer/Data/Tcorr_'+loc.lower()+'.pdf'
    
    figFnameSTD  = '/Users/ebaumer/Data/STD_'+loc.lower()+'.pdf'
    fdataSTD     = '/Users/ebaumer/Data/STD_'+loc.lower()+'.dat'
    
    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear,imnth,iday,fyear,fmnth,fday) 

    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(dataDir,exitFlg=True)
    
    #---------------------
    # Interpolation things
    #---------------------
    intrpOrder = 1            # Order of interpolation
    logFlg     = False        # Flag to do interpolation of log of water    
    
    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        Z = np.array([  44.0000,     42.0000,     40.0000,     38.0000,     36.0000, 
                        34.0000,     32.0000,     30.0000,     28.0000,     26.0000, 
                        24.0000,     22.0000,     20.0000,     18.0000,     16.0000, 
                        14.0000,     12.0000,     10.0000,      9.0000,      8.0000, 
                         7.0000,      6.0000,      5.0000,      4.0000,      3.0000, 
                         2.0000,      1.0000,      0.2250   ])
        sLat = 76.52
        sLon = 291.23               # 68.77 W = (360.0 - 68.77) = 291.23 E
        
    elif loc.lower() == 'mlo':
        Z = np.array([  44.0000,     42.0000,     40.0000,     38.0000,     36.0000, 
                        34.0000,     32.0000,     30.0000,     28.0000,     26.0000, 
                        24.0000,     22.0000,     20.0000,     18.0000,     16.0000, 
                        14.0000,     12.0000,     10.0000,      9.0000,      8.0000, 
                         7.0000,      6.0000,      5.0000,      4.0000,      3.3960])
        
        sLat = 19.4
        sLon = 204.43                # 155.57 W = (360 - 155.57) = 204.43 E
        
    elif loc.lower() == 'fl0':
        Z = np.array([ 120.0000,    110.0000,    100.0000,     94.0000,     90.0000, 
                        85.0000,     80.0000,     75.0000,     70.0000,     65.0000, 
                        60.0000,     55.0000,     50.0000,     48.0000,     46.0000, 
                        44.0000,     42.0000,     40.0000,     38.0000,     36.0000, 
                        34.0000,     32.0000,     30.0000,     28.0000,     26.0000, 
                        24.0000,     22.0000,     20.0000,     18.0000,     16.0000, 
                        14.0000,     12.0000,     10.0000,      9.0000,      8.0000, 
                         7.0000,      6.0000,      5.0000,      4.0000,      3.0000, 
                         2.0000,      1.6120])
        
        sLat = 40.4
        sLon = 254.76                # 105.24 W = (360 - 105.24) = 254.76 E      
    
    
    #--------------------------------------------
    # Walk through first level of directories in
    # data directory and collect directory names 
    #--------------------------------------------
    dirLst = []
    for drs in os.walk(dataDir).next()[1]: 
        
        #-------------------------------------------
        # Test directory to make sure it is a number
        #-------------------------------------------
        try:    int(drs[0:4])
        except: continue

        if dRange.inRange( int(drs[0:4]), int(drs[4:6]), int(drs[6:8]) ): dirLst.append(dataDir+drs+'/')            
    
    dirLst.sort()    
    
    #------------------------------------------
    # Loop through all folders in specific year
    #------------------------------------------   
    firstFlg = True
    
    for sngDir in dirLst:
        
        #----------------------------
        # Get date in datetime format
        #----------------------------
        yrstr   = os.path.basename(sngDir[:-1])[0:4]
        mnthstr = os.path.basename(sngDir[:-1])[4:6]
        daystr  = os.path.basename(sngDir[:-1])[6:8]
        oneDay  = dt.datetime(int(yrstr),int(mnthstr),int(daystr))

        #----------------------------------------------------------------
        # Open and read ERA Interim Q and T values for 6 hourly time step
        #----------------------------------------------------------------
        Q_file = sngDir + 'ERA_Interm_Q.' + yrstr + mnthstr + daystr
        T_file = sngDir + 'ERA_Interm_T.' + yrstr + mnthstr + daystr
        
        ckFile(Q_file,exitFlg=True)
        ckFile(T_file,exitFlg=True)
                           
        with open(Q_file,'r') as fopen: lines = fopen.readlines()
        alt_temp  = np.array([ float(x.strip().split(',')[0]) for x in lines[2:] ])
        Q_daily_temp = np.array([ float(x.strip().split(',')[1]) for x in lines[2:] ])
        Q_00_temp = np.array([ float(x.strip().split(',')[2]) for x in lines[2:] ])
        Q_06_temp = np.array([ float(x.strip().split(',')[3]) for x in lines[2:] ])
        Q_12_temp = np.array([ float(x.strip().split(',')[4]) for x in lines[2:] ])
        Q_18_temp = np.array([ float(x.strip().split(',')[5]) for x in lines[2:] ])
        
        with open(T_file,'r') as fopen: lines = fopen.readlines()
        T_daily_temp = np.array([ float(x.strip().split(',')[1]) for x in lines[2:] ])
        T_00_temp = np.array([ float(x.strip().split(',')[2]) for x in lines[2:] ])
        T_06_temp = np.array([ float(x.strip().split(',')[3]) for x in lines[2:] ])
        T_12_temp = np.array([ float(x.strip().split(',')[4]) for x in lines[2:] ])
        T_18_temp = np.array([ float(x.strip().split(',')[5]) for x in lines[2:] ])        
              
        #--------------------------------------------------------------
        # Interpolate to specific humidity on WACCM grid. X data must 
        # be increasing => flip dimensions and then flip back
        #--------------------------------------------------------------
        Q_daily_temp = interpAlt(alt_temp, Z, Q_daily_temp, intrpOrder, logFlg)
        Q_00_temp = interpAlt(alt_temp, Z, Q_00_temp, intrpOrder, logFlg)
        Q_06_temp = interpAlt(alt_temp, Z, Q_06_temp, intrpOrder, logFlg)
        Q_12_temp = interpAlt(alt_temp, Z, Q_12_temp, intrpOrder, logFlg)
        Q_18_temp = interpAlt(alt_temp, Z, Q_18_temp, intrpOrder, logFlg)
        
        T_daily_temp = interpAlt(alt_temp, Z, T_daily_temp, intrpOrder, logFlg)
        T_00_temp = interpAlt(alt_temp, Z, T_00_temp, intrpOrder, logFlg)
        T_06_temp = interpAlt(alt_temp, Z, T_06_temp, intrpOrder, logFlg)
        T_12_temp = interpAlt(alt_temp, Z, T_12_temp, intrpOrder, logFlg)
        T_18_temp = interpAlt(alt_temp, Z, T_18_temp, intrpOrder, logFlg)        
        
        if firstFlg:
            firstFlg = False
            Q_daily = Q_daily_temp
            Q_00 = Q_00_temp
            Q_06 = Q_06_temp
            Q_12 = Q_12_temp
            Q_18 = Q_18_temp
            T_daily = T_daily_temp
            T_00 = T_00_temp
            T_06 = T_06_temp
            T_12 = T_12_temp
            T_18 = T_18_temp          
        else:
            Q_daily = np.vstack((Q_daily,Q_daily_temp))
            Q_00 = np.vstack((Q_00,Q_00_temp))
            Q_06 = np.vstack((Q_06,Q_06_temp))
            Q_12 = np.vstack((Q_12,Q_12_temp))
            Q_18 = np.vstack((Q_18,Q_18_temp))
            T_daily = np.vstack((T_daily,T_daily_temp))
            T_00 = np.vstack((T_00,T_00_temp))
            T_06 = np.vstack((T_06,T_06_temp))
            T_12 = np.vstack((T_12,T_12_temp))
            T_18 = np.vstack((T_18,T_18_temp))           
        
    
    #----------------
    # Find Statistics
    #----------------
    Q_daily_mean = np.mean(Q_daily,axis=0)
    Q_daily_std  = np.std(Q_daily,axis=0)
    Q_00_mean = np.mean(Q_00,axis=0)
    Q_00_std  = np.std(Q_00,axis=0)
    Q_06_mean = np.mean(Q_06,axis=0)
    Q_06_std  = np.std(Q_06,axis=0)
    Q_12_mean = np.mean(Q_12,axis=0)
    Q_12_std  = np.std(Q_12,axis=0)
    Q_18_mean = np.mean(Q_18,axis=0)
    Q_18_std  = np.std(Q_18,axis=0)  
    
    T_daily_mean = np.mean(T_daily,axis=0)
    T_daily_std  = np.std(T_daily,axis=0)    
    T_00_mean = np.mean(T_00,axis=0)
    T_00_std  = np.std(T_00,axis=0)
    T_06_mean = np.mean(T_06,axis=0)
    T_06_std  = np.std(T_06,axis=0)
    T_12_mean = np.mean(T_12,axis=0)
    T_12_std  = np.std(T_12,axis=0)
    T_18_mean = np.mean(T_18,axis=0)
    T_18_std  = np.std(T_18,axis=0)        
    
    T_diff00 = abs(T_daily - T_00)    
    T_diff06 = abs(T_daily - T_06)
    T_diff12 = abs(T_daily - T_12)
    T_diff18 = abs(T_daily - T_18)
    T_diffAll= np.vstack((T_diff00,T_diff06,T_diff12,T_diff18))
    T_diffSTD= np.std(T_diffAll,axis=0)
    
    Q_diff00 = abs(Q_daily - Q_00)    
    Q_diff06 = abs(Q_daily - Q_06)
    Q_diff12 = abs(Q_daily - Q_12)
    Q_diff18 = abs(Q_daily - Q_18)
    Q_diffAll= np.vstack((Q_diff00,Q_diff06,Q_diff12,Q_diff18))
    Q_diffSTD= np.std(Q_diffAll,axis=0)    
    
    #----------------------------
    # Flatten arrays to correlate
    #----------------------------
    Q_daily_flat = Q_daily.flatten()
    Q_00_flat    = Q_00.flatten()
    Q_06_flat    = Q_06.flatten()
    Q_12_flat    = Q_12.flatten()
    Q_18_flat    = Q_18.flatten()
    
    T_daily_flat = T_daily.flatten()
    T_00_flat    = T_00.flatten()
    T_06_flat    = T_06.flatten()
    T_12_flat    = T_12.flatten()
    T_18_flat    = T_18.flatten()
    
    alt_mat      = np.array([Z]*np.shape(Q_00)[0])
    alt_flat     = alt_mat.flatten()
                                     
    #-------------------------------------------
    # Calculate Pearsons correlation coefficient
    #-------------------------------------------
    (R_Q00,_) = pearsonr(Q_00_flat, Q_daily_flat)    
    (R_Q06,_) = pearsonr(Q_06_flat, Q_daily_flat)    
    (R_Q12,_) = pearsonr(Q_12_flat, Q_daily_flat)    
    (R_Q18,_) = pearsonr(Q_18_flat, Q_daily_flat)    
    
    (R_T00,_) = pearsonr(T_00_flat, T_daily_flat)    
    (R_T06,_) = pearsonr(T_06_flat, T_daily_flat)    
    (R_T12,_) = pearsonr(T_12_flat, T_daily_flat)    
    (R_T18,_) = pearsonr(T_18_flat, T_daily_flat)     
    
    #------------------------
    # Calculate RMSE and bias
    #------------------------
    RMSE_Q00 = rmse(Q_00_flat, Q_daily_flat)
    RMSE_Q06 = rmse(Q_06_flat, Q_daily_flat)
    RMSE_Q12 = rmse(Q_12_flat, Q_daily_flat)
    RMSE_Q18 = rmse(Q_18_flat, Q_daily_flat)
    
    RMSE_T00 = rmse(T_00_flat, T_daily_flat)
    RMSE_T06 = rmse(T_06_flat, T_daily_flat)
    RMSE_T12 = rmse(T_12_flat, T_daily_flat)
    RMSE_T18 = rmse(T_18_flat, T_daily_flat)    
    
    bias_Q00 = bias(Q_00_flat, Q_daily_flat)
    bias_Q06 = bias(Q_06_flat, Q_daily_flat)
    bias_Q12 = bias(Q_12_flat, Q_daily_flat)
    bias_Q18 = bias(Q_18_flat, Q_daily_flat)
    
    bias_T00 = bias(T_00_flat, T_daily_flat)
    bias_T06 = bias(T_06_flat, T_daily_flat)
    bias_T12 = bias(T_12_flat, T_daily_flat)
    bias_T18 = bias(T_18_flat, T_daily_flat) 
    
    print 'R_Q00 = {}'.format(R_Q00)
    print 'R_Q06 = {}'.format(R_Q06)
    print 'R_Q12 = {}'.format(R_Q12)
    print 'R_Q18 = {}'.format(R_Q18)
    
    print 'R_T00 = {}'.format(R_T00)
    print 'R_T06 = {}'.format(R_T06)
    print 'R_T12 = {}'.format(R_T12)
    print 'R_T18 = {}'.format(R_T18)    
    
    print 'RMSE_Q00 = {}'.format(RMSE_Q00)
    print 'RMSE_Q06 = {}'.format(RMSE_Q06)
    print 'RMSE_Q12 = {}'.format(RMSE_Q12)
    print 'RMSE_Q18 = {}'.format(RMSE_Q18)    
    
    print 'RMSE_T00 = {}'.format(RMSE_T00)
    print 'RMSE_T06 = {}'.format(RMSE_T06)
    print 'RMSE_T12 = {}'.format(RMSE_T12)
    print 'RMSE_T18 = {}'.format(RMSE_T18)      
    
    print 'bias_Q00 = {}'.format(bias_Q00)
    print 'bias_Q06 = {}'.format(bias_Q06)
    print 'bias_Q12 = {}'.format(bias_Q12)
    print 'bias_Q18 = {}'.format(bias_Q18)    
    
    print 'bias_T00 = {}'.format(bias_T00)
    print 'bias_T06 = {}'.format(bias_T06)
    print 'bias_T12 = {}'.format(bias_T12)
    print 'bias_T18 = {}'.format(bias_T18)      
    
    
    #-----------
    # Write data
    #-----------
    with open(fdataSTD,'w') as fopen:
        fopen.write('Standard Deviation of the absolute value of the Temperature and water profile diurnal cycle\n')
        fopen.write('{0:>20s}{1:>20s}{2:>20s}\n'.format('Height [km]','Temperature [km]','Water [VMR]'))
    
        for row in zip(*(Z,T_diffSTD,Q_diffSTD)):
            strformat = ','.join('{:>20.7E}' for i in row) + ', \n'
            fopen.write(strformat.format(*row))
    
    
    #----------
    # Plot data
    #----------
    pdfsav = PdfPages(figFnameSTD)
    fig,(ax1,ax2) = plt.subplots(1,2,sharey=True)

    ax1.plot(T_diffSTD/T_daily_mean*100.0,Z)
    ax2.plot(Q_diffSTD/Q_daily_mean*100.0,Z)
         
    ax1.grid(True,which='both')   
    ax2.grid(True,which='both')
    ax1.set_ylabel('Altitude [km]')
    ax1.set_xlabel('Temperature [% of Daily Mean]')
    ax2.set_xlabel('Water [% of Daily Mean]')   
    #plt.suptitle('Standard Deviation of Diurnal Cycle for Temperature and Water Profiles \n Mauna Loa, 2013 ',multialignment='center')
    
    ax1.tick_params(axis='x',which='both',labelsize=8)
    ax2.tick_params(axis='x',which='both',labelsize=7)
    

    pdfsav.savefig(fig,dpi=350)    
    pdfsav.close()    
    
    # Q Corr
    pdfsav = PdfPages(figFnameQcor)
    fig,((ax1,ax2),(ax3,ax4))  = plt.subplots(2,2)

    cbar = ax1.scatter(Q_00_flat,Q_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax1.plot(ax1.get_xlim(), ax1.get_ylim(), ls="--", c=".3")
    ax2.scatter(Q_06_flat,Q_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax2.plot(ax2.get_xlim(), ax2.get_ylim(), ls="--", c=".3")
    ax3.scatter(Q_12_flat,Q_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax3.plot(ax3.get_xlim(), ax3.get_ylim(), ls="--", c=".3")
    ax4.scatter(Q_18_flat,Q_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax4.plot(ax4.get_xlim(), ax4.get_ylim(), ls="--", c=".3")
    
    ax1.set_ylabel('Q at 00 UTC',fontsize=8)
    ax1.set_xlabel('Q Daily Avg',fontsize=8)
    
    ax2.set_ylabel('Q at 06 UTC',fontsize=8)
    ax2.set_xlabel('Q Daily Avg',fontsize=8)
    
    ax3.set_ylabel('Q at 12 UTC',fontsize=8)
    ax3.set_xlabel('Q Daily Avg',fontsize=8)
    
    ax4.set_ylabel('Q at 18 UTC',fontsize=8)
    ax4.set_xlabel('Q Daily Avg',fontsize=8)    

    ax1.text(0.05,0.95,'R = {:4.3f}'.format(R_Q00),fontsize=8,transform=ax1.transAxes)
    ax2.text(0.05,0.95,'R = {:4.3f}'.format(R_Q06),fontsize=8,transform=ax2.transAxes)
    ax3.text(0.05,0.95,'R = {:4.3f}'.format(R_Q12),fontsize=8,transform=ax3.transAxes)
    ax4.text(0.05,0.95,'R = {:4.3f}'.format(R_Q18),fontsize=8,transform=ax4.transAxes)

    ax1.text(0.05,0.87,'RMSE = {0:3.2e} ({1:5.2f}%)'.format(RMSE_Q00,RMSE_Q00/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax1.transAxes)
    ax2.text(0.05,0.87,'RMSE = {0:3.2e} ({1:5.2f}%)'.format(RMSE_Q06,RMSE_Q06/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax2.transAxes)
    ax3.text(0.05,0.87,'RMSE = {0:3.2e} ({1:5.2f}%)'.format(RMSE_Q12,RMSE_Q12/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax3.transAxes)
    ax4.text(0.05,0.87,'RMSE = {0:3.2e} ({1:5.2f}%)'.format(RMSE_Q18,RMSE_Q18/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax4.transAxes)

    ax1.text(0.05,0.79,'Bias = {0:3.2e} ({1:5.2f}%)'.format(bias_Q00,bias_Q00/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax1.transAxes)
    ax2.text(0.05,0.79,'Bias = {0:3.2e} ({1:5.2f}%)'.format(bias_Q06,bias_Q06/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax2.transAxes)
    ax3.text(0.05,0.79,'Bias = {0:3.2e} ({1:5.2f}%)'.format(bias_Q12,bias_Q12/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax3.transAxes)
    ax4.text(0.05,0.79,'Bias = {0:3.2e} ({1:5.2f}%)'.format(bias_Q18,bias_Q18/np.mean(Q_daily_flat)*100.),fontsize=8,transform=ax4.transAxes)
    
    ax1.grid(True,which='both')
    ax2.grid(True,which='both')
    ax3.grid(True,which='both')
    ax4.grid(True,which='both')
    ax1.tick_params(axis='x',which='both',labelsize=6)
    ax1.tick_params(axis='y',which='both',labelsize=6)    
    ax2.tick_params(axis='x',which='both',labelsize=6)
    ax2.tick_params(axis='y',which='both',labelsize=6) 
    ax3.tick_params(axis='x',which='both',labelsize=6)
    ax3.tick_params(axis='y',which='both',labelsize=6) 
    ax4.tick_params(axis='x',which='both',labelsize=6)
    ax4.tick_params(axis='y',which='both',labelsize=6)     
    
    fig.subplots_adjust(right=0.85)
    cbar_ax = fig.add_axes([0.9, 0.1, 0.03, 0.8])    
    cbarObj = fig.colorbar(cbar,cax=cbar_ax)
    cbarObj.ax.tick_params(labelsize=8)
    cbarObj.solids.set_rasterized(True) 
    
    cbarObj.set_label('Altitude [km]',fontsize=8)
    pdfsav.savefig(fig,dpi=350)
    pdfsav.close()
    
    # T Corr
    pdfsav = PdfPages(figFnameTcor)
    fig,((ax1,ax2),(ax3,ax4))  = plt.subplots(2,2)

    cbar = ax1.scatter(T_00_flat,T_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax1.plot(ax1.get_xlim(), ax1.get_ylim(), ls="--", c=".3")
    ax2.scatter(T_06_flat,T_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax2.plot(ax2.get_xlim(), ax2.get_ylim(), ls="--", c=".3")
    ax3.scatter(T_12_flat,T_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax3.plot(ax3.get_xlim(), ax3.get_ylim(), ls="--", c=".3")
    ax4.scatter(T_18_flat,T_daily_flat,c=alt_flat,edgecolors='none',vmin=np.min(Z),vmax=np.max(Z))
    ax4.plot(ax4.get_xlim(), ax4.get_ylim(), ls="--", c=".3")
    
    ax1.set_ylabel('T at 00 UTC',fontsize=8)
    ax1.set_xlabel('T Daily Avg',fontsize=8)
    
    ax2.set_ylabel('T at 06 UTC',fontsize=8)
    ax2.set_xlabel('T Daily Avg',fontsize=8)
    
    ax3.set_ylabel('T at 12 UTC',fontsize=8)
    ax3.set_xlabel('T Daily Avg',fontsize=8)
    
    ax4.set_ylabel('T at 18 UTC',fontsize=8)
    ax4.set_xlabel('T Daily Avg',fontsize=8)    

    ax1.text(0.05,0.95,'R = {:4.3f}'.format(R_T00),fontsize=8,transform=ax1.transAxes)
    ax2.text(0.05,0.95,'R = {:4.3f}'.format(R_T06),fontsize=8,transform=ax2.transAxes)
    ax3.text(0.05,0.95,'R = {:4.3f}'.format(R_T12),fontsize=8,transform=ax3.transAxes)
    ax4.text(0.05,0.95,'R = {:4.3f}'.format(R_T18),fontsize=8,transform=ax4.transAxes)

    ax1.text(0.05,0.87,'RMSE = {0:5.3f} ({1:5.2f}%)'.format(RMSE_T00,RMSE_T00/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax1.transAxes)
    ax2.text(0.05,0.87,'RMSE = {0:5.3f} ({1:5.2f}%)'.format(RMSE_T06,RMSE_T06/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax2.transAxes)
    ax3.text(0.05,0.87,'RMSE = {0:5.3f} ({1:5.2f}%)'.format(RMSE_T12,RMSE_T12/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax3.transAxes)
    ax4.text(0.05,0.87,'RMSE = {0:5.3f} ({1:5.2f}%)'.format(RMSE_T18,RMSE_T18/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax4.transAxes)

    ax1.text(0.05,0.79,'Bias = {0:5.3f} ({1:5.2f}%)'.format(bias_T00,bias_T00/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax1.transAxes)
    ax2.text(0.05,0.79,'Bias = {0:5.3f} ({1:5.2f}%)'.format(bias_T06,bias_T06/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax2.transAxes)
    ax3.text(0.05,0.79,'Bias = {0:5.3f} ({1:5.2f}%)'.format(bias_T12,bias_T12/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax3.transAxes)
    ax4.text(0.05,0.79,'Bias = {0:5.3f} ({1:5.2f}%)'.format(bias_T18,bias_T18/np.mean(T_daily_flat)*100.),fontsize=8,transform=ax4.transAxes)

    
    ax1.grid(True,which='both')
    ax2.grid(True,which='both')
    ax3.grid(True,which='both')
    ax4.grid(True,which='both')
    ax1.tick_params(axis='x',which='both',labelsize=7)
    ax1.tick_params(axis='y',which='both',labelsize=7)    
    ax2.tick_params(axis='x',which='both',labelsize=7)
    ax2.tick_params(axis='y',which='both',labelsize=7) 
    ax3.tick_params(axis='x',which='both',labelsize=7)
    ax3.tick_params(axis='y',which='both',labelsize=7) 
    ax4.tick_params(axis='x',which='both',labelsize=7)
    ax4.tick_params(axis='y',which='both',labelsize=7)        
    
    fig.subplots_adjust(right=0.85)
    cbar_ax = fig.add_axes([0.9, 0.1, 0.03, 0.8])    
    cbarObj = fig.colorbar(cbar,cax=cbar_ax)
    cbarObj.solids.set_rasterized(True)
    
    cbarObj.set_label('Altitude [km]',fontsize=8)
    cbarObj.ax.tick_params(labelsize=8)
    pdfsav.savefig(fig,dpi=350)
    pdfsav.close()    
    
    # Q
    pdfsav = PdfPages(figFnameQ)
    fig,(ax1,ax2) = plt.subplots(1,2)
    
    ax1.plot(Q_daily_mean,Z,label='Daily')
    ax2.plot(Q_daily_mean,Z,label='Daily')
    #ax1.fill_betweenx(Z,Q_daily_mean-Q_daily_std,Q_daily_mean+Q_daily_std,alpha=0.5,color='0.75')

    ax1.plot(Q_00_mean,Z,label='00 UTC')
    ax2.plot(Q_00_mean,Z,label='00 UTC')
    #ax1.fill_betweenx(Z,Q_00_mean-Q_00_std,Q_00_mean+Q_00_std,alpha=0.5,color='0.75')

    ax1.plot(Q_06_mean,Z,label='06 UTC')
    ax2.plot(Q_06_mean,Z,label='06 UTC')
    #ax1.fill_betweenx(Z,Q_06_mean-Q_06_std,Q_06_mean+Q_06_std,alpha=0.5,color='0.75')
    
    ax1.plot(Q_12_mean,Z,label='12 UTC')
    ax2.plot(Q_12_mean,Z,label='12 UTC')
    #ax1.fill_betweenx(Z,Q_12_mean-Q_12_std,Q_12_mean+Q_12_std,alpha=0.5,color='0.75')
    
    ax1.plot(Q_18_mean,Z,label='18 UTC')
    ax2.plot(Q_18_mean,Z,label='18 UTC')
    #ax1.fill_betweenx(Z,Q_18_mean-Q_18_std,Q_18_mean+Q_18_std,alpha=0.5,color='0.75')    
         
    ax1.grid(True,which='both')
    ax1.legend(prop={'size':9})   
    ax2.grid(True,which='both')
    ax2.legend(prop={'size':9})     
    ax1.set_ylabel('Altitude [km]')
    ax1.set_xlabel('Log VMR')
    ax1.set_xscale('log')
    ax2.set_xlabel('Log VMR')
    ax2.set_xscale('log')    
    ax2.set_ylim([0,10])
    
    ax1.tick_params(axis='x',which='both',labelsize=8)
    ax2.tick_params(axis='x',which='both',labelsize=8)
    #plt.suptitle('Mean and Standard Deviation of Water Profiles Derived from ERA Interim \n Mauna Loa 2013',multialignment='center')

    pdfsav.savefig(fig,dpi=350)
    pdfsav.close()
    
    # T      
    pdfsav = PdfPages(figFnameT)
    fig,(ax1,ax2) = plt.subplots(1,2)

    ax1.plot(T_daily_mean,Z,label='Daily')
    ax2.plot(T_daily_mean,Z,label='Daily')
    #ax1.fill_betweenx(Z,T_daily_mean-T_daily_std,T_daily_mean+T_daily_std,alpha=0.5,color='0.75')

    ax1.plot(T_00_mean,Z,label='00 UTC')
    ax2.plot(T_00_mean,Z,label='00 UTC')
    #ax1.fill_betweenx(Z,T_00_mean-T_00_std,T_00_mean+T_00_std,alpha=0.5,color='0.75')

    ax1.plot(T_06_mean,Z,label='06 UTC')
    ax2.plot(T_06_mean,Z,label='06 UTC')
    #ax1.fill_betweenx(Z,T_06_mean-T_06_std,T_06_mean+T_06_std,alpha=0.5,color='0.75')
    
    ax1.plot(T_12_mean,Z,label='12 UTC')
    ax2.plot(T_12_mean,Z,label='12 UTC')
    #ax1.fill_betweenx(Z,T_12_mean-T_12_std,T_12_mean+T_12_std,alpha=0.5,color='0.75')
    
    ax1.plot(T_18_mean,Z,label='18 UTC')
    ax2.plot(T_18_mean,Z,label='18 UTC')
    #ax1.fill_betweenx(Z,T_18_mean-T_18_std,T_18_mean+T_18_std,alpha=0.5,color='0.75')   
         
    ax1.grid(True,which='both')
    ax1.legend(prop={'size':9})   
    ax2.grid(True,which='both')
    ax2.legend(prop={'size':9})     
    ax1.set_ylabel('Altitude [km]')
    ax1.set_xlabel('Temperature [K]')
    ax2.set_xlabel('Temperature [K]')   
    ax2.set_ylim([0,10])
    
    ax1.tick_params(axis='x',which='both',labelsize=8)
    ax2.tick_params(axis='x',which='both',labelsize=8)
    #plt.suptitle('Mean and Standard Deviation of Temperature Profiles Derived from ERA Interim \n Mauna Loa 2013',multialignment='center')

    pdfsav.savefig(fig,dpi=350)    
    pdfsav.close()
Beispiel #7
0
def main():

    #---------
    # Location
    #---------
    loc = 'tab'

    #------------------------------------
    # Version number to append water file
    #------------------------------------
    verW = 'v4'  #version for daily Profile
    verW_t = 'v66'  #version for 6-hourly profiles

    #-----------------------------------------
    # Interpolation flag for NCEP re-analysis:
    # False = Nearest point. True = linear
    #-----------------------------------------
    interpFlg = False

    #---------------------
    # Interpolation things
    #---------------------
    nSkip = 3  # Number of points to skip when merging WACCM and NCEP profiles
    intrpOrder = 1  # Order of interpolation
    logFlg = False  # Flag to do interpolation of log of water

    #-----------------------
    # Date Range of interest
    #-----------------------
    iyear = 2017
    imnth = 6
    iday = 1
    fyear = 2017
    fmnth = 12
    fday = 31

    #------------------------------
    # ERA Reanalysis data directory
    #------------------------------
    ERAdir = '/data1/ancillary_data/ERAdata/'

    #---------------
    # Data Directory
    #---------------
    dataDir = '/data1/' + loc.lower() + '/'

    #-------------------------
    # WACCM monthly means file
    #-------------------------
    WACCMfile = '/data/Campaign/' + loc.upper(
    ) + '/waccm/WACCM_pTW-meanV6.' + loc.upper()

    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear, imnth, iday, fyear, fmnth, fday)

    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 95.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.0000, 2.0000, 1.0000, 0.2250
        ])
        sLat = 76.52
        sLon = 291.23  # 68.77 W = (360.0 - 68.77) = 291.23 E

    elif loc.lower() == 'mlo':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 95.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.3960
        ])

        sLat = 19.4
        sLon = 204.43  # 155.57 W = (360 - 155.57) = 204.43 E

    elif loc.lower() == 'fl0':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 94.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.0000, 2.0000, 1.6120
        ])

        sLat = 40.4
        sLon = 254.76  # 105.24 W = (360 - 105.24) = 254.76 E

    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(ERAdir, exitFlg=True)
    ckDir(dataDir, exitFlg=True)
    ckFile(WACCMfile, exitFlg=True)

    #--------------------------------------------------------------------
    # Read WACCM monthly mean data. WACCM monthly mean file is ascending,
    # adjust so that it is descending. Also units are in km.
    #--------------------------------------------------------------------
    with open(WACCMfile, 'r') as fopen:
        lines = fopen.readlines()

    nlyrs = int(lines[0].strip().split()[0])
    s_ind = 3
    Z = np.flipud(
        np.array([
            float(row.strip().split()[0]) for row in lines[s_ind:nlyrs + s_ind]
        ]))
    waccmT = np.flipud(
        np.array([[float(x) for x in line.strip().split()[1:]]
                  for line in lines[s_ind:nlyrs + s_ind]]))
    s_ind = 3 + nlyrs + 2
    waccmP = np.flipud(
        np.array([[float(x) for x in line.strip().split()[1:]]
                  for line in lines[s_ind:nlyrs + s_ind]]))
    s_ind = 3 + nlyrs + 2 + nlyrs + 2
    waccmW = np.flipud(
        np.array([[float(x) for x in line.strip().split()[1:]]
                  for line in lines[s_ind:nlyrs + s_ind]]))

    #--------------------------------------------
    # Walk through first level of directories in
    # data directory and collect directory names
    #--------------------------------------------
    dirLst = []
    for drs in os.walk(dataDir).next()[1]:

        #-------------------------------------------
        # Test directory to make sure it is a number
        #-------------------------------------------
        try:
            int(drs[0:4])
        except:
            continue

        if dRange.inRange(int(drs[0:4]), int(drs[4:6]), int(drs[6:8])):
            dirLst.append(dataDir + drs + '/')

    dirLst.sort()

    #------------------------------------------
    # Loop through all folders in specific year
    #------------------------------------------
    for sngDir in dirLst:

        #----------------------------
        # Get date in datetime format
        #----------------------------
        oneDay = dt.datetime(int(os.path.basename(sngDir[:-1])[0:4]),
                             int(os.path.basename(sngDir[:-1])[4:6]),
                             int(os.path.basename(sngDir[:-1])[6:8]))

        #--------------------------------------------------
        # Find month index for monthly WACCM water profiles
        #--------------------------------------------------
        mnthInd = oneDay.month - 1  # -1 because January is in the 0th column

        #-------------------------------------------
        # Open daily ERA interm files 00, 06, 12, 18
        #-------------------------------------------
        YYYY = "{0:04d}".format(oneDay.year)
        MM = "{0:02d}".format(oneDay.month)
        DD = "{0:02d}".format(oneDay.day)
        ERA_F1 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '00.nc'
        ERA_F2 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '06.nc'
        ERA_F3 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '12.nc'
        ERA_F4 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '18.nc'

        f1 = netcdf.netcdf_file(ERA_F1, 'r', mmap=False)
        f2 = netcdf.netcdf_file(ERA_F2, 'r', mmap=False)
        f3 = netcdf.netcdf_file(ERA_F3, 'r', mmap=False)
        f4 = netcdf.netcdf_file(ERA_F4, 'r', mmap=False)

        #-----------------------------------
        # Lat and lon should be the same for
        # all files. Just grab once
        #-----------------------------------
        lat = f1.variables['g4_lat_1']
        lon = f1.variables['g4_lon_2']
        Plvl = f1.variables['lv_ISBL0']
        nlvls = np.shape(Plvl[:])[0]

        Q_00 = f1.variables['Q_GDS4_ISBL']
        Q_06 = f2.variables['Q_GDS4_ISBL']
        Q_12 = f3.variables['Q_GDS4_ISBL']
        Q_18 = f4.variables['Q_GDS4_ISBL']

        Z_00 = f1.variables['Z_GDS4_ISBL']
        Z_06 = f2.variables['Z_GDS4_ISBL']
        Z_12 = f3.variables['Z_GDS4_ISBL']
        Z_18 = f4.variables['Z_GDS4_ISBL']

        PV_00 = f1.variables['PV_GDS4_ISBL']
        PV_06 = f2.variables['PV_GDS4_ISBL']
        PV_12 = f3.variables['PV_GDS4_ISBL']
        PV_18 = f4.variables['PV_GDS4_ISBL']

        T_00 = f1.variables['T_GDS4_ISBL']
        T_06 = f2.variables['T_GDS4_ISBL']
        T_12 = f3.variables['T_GDS4_ISBL']
        T_18 = f4.variables['T_GDS4_ISBL']

        f1.close()
        f2.close()
        f3.close()
        f4.close()

        #-----------------------------------------------
        # If not interpolating point in NCEP re-analysis
        # find closes lat lon indicies
        #-----------------------------------------------
        if not interpFlg:
            latind = findCls(lat[:], sLat)
            lonind = findCls(lon[:], sLon)

        #-----------------------------------------------------
        # For each level interpolate hgt and specific humidity
        # based on latitude and longitude of site
        #-----------------------------------------------------
        Hgt_00 = np.zeros(nlvls)
        Hgt_06 = np.zeros(nlvls)
        Hgt_12 = np.zeros(nlvls)
        Hgt_18 = np.zeros(nlvls)

        Qint_00 = np.zeros(nlvls)
        Qint_06 = np.zeros(nlvls)
        Qint_12 = np.zeros(nlvls)
        Qint_18 = np.zeros(nlvls)

        Tint_00 = np.zeros(nlvls)
        Tint_06 = np.zeros(nlvls)
        Tint_12 = np.zeros(nlvls)
        Tint_18 = np.zeros(nlvls)

        PVint_00 = np.zeros(nlvls)
        PVint_06 = np.zeros(nlvls)
        PVint_12 = np.zeros(nlvls)
        PVint_18 = np.zeros(nlvls)

        for lvl in range(0, nlvls):
            HgtOneLvl_00 = np.squeeze(Z_00[lvl, :, :])
            HgtOneLvl_06 = np.squeeze(Z_06[lvl, :, :])
            HgtOneLvl_12 = np.squeeze(Z_12[lvl, :, :])
            HgtOneLvl_18 = np.squeeze(Z_18[lvl, :, :])

            Q_OneLvl_00 = np.squeeze(Q_00[lvl, :, :])
            Q_OneLvl_06 = np.squeeze(Q_06[lvl, :, :])
            Q_OneLvl_12 = np.squeeze(Q_12[lvl, :, :])
            Q_OneLvl_18 = np.squeeze(Q_18[lvl, :, :])

            T_OneLvl_00 = np.squeeze(T_00[lvl, :, :])
            T_OneLvl_06 = np.squeeze(T_06[lvl, :, :])
            T_OneLvl_12 = np.squeeze(T_12[lvl, :, :])
            T_OneLvl_18 = np.squeeze(T_18[lvl, :, :])

            PV_OneLvl_00 = np.squeeze(PV_00[lvl, :, :])
            PV_OneLvl_06 = np.squeeze(PV_06[lvl, :, :])
            PV_OneLvl_12 = np.squeeze(PV_12[lvl, :, :])
            PV_OneLvl_18 = np.squeeze(PV_18[lvl, :, :])

            if interpFlg:
                Hgt_00[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_00,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)
                Hgt_06[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_06,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)
                Hgt_12[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_12,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)
                Hgt_18[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_18,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)

                Qint_00[lvl] = interp2d(lon[:],
                                        lat[:],
                                        Q_OneLvl_00,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Qint_06[lvl] = interp2d(lon[:],
                                        lat[:],
                                        Q_OneLvl_06,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Qint_12[lvl] = interp2d(lon[:],
                                        lat[:],
                                        Q_OneLvl_12,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Qint_18[lvl] = interp2d(lon[:],
                                        lat[:],
                                        Q_OneLvl_18,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)

                Tint_00[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_00,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Tint_06[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_06,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Tint_12[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_12,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Tint_18[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_18,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)

                PVint_00[lvl] = interp2d(lon[:],
                                         lat[:],
                                         PV_OneLvl_00,
                                         kind='linear',
                                         bounds_error=True)(sLon, sLat)
                PVint_06[lvl] = interp2d(lon[:],
                                         lat[:],
                                         PV_OneLvl_06,
                                         kind='linear',
                                         bounds_error=True)(sLon, sLat)
                PVint_12[lvl] = interp2d(lon[:],
                                         lat[:],
                                         PV_OneLvl_12,
                                         kind='linear',
                                         bounds_error=True)(sLon, sLat)
                PVint_18[lvl] = interp2d(lon[:],
                                         lat[:],
                                         PV_OneLvl_18,
                                         kind='linear',
                                         bounds_error=True)(sLon, sLat)

            else:
                Hgt_00[lvl] = HgtOneLvl_00[latind, lonind]
                Hgt_06[lvl] = HgtOneLvl_06[latind, lonind]
                Hgt_12[lvl] = HgtOneLvl_12[latind, lonind]
                Hgt_18[lvl] = HgtOneLvl_18[latind, lonind]

                Qint_00[lvl] = Q_OneLvl_00[latind, lonind]
                Qint_06[lvl] = Q_OneLvl_06[latind, lonind]
                Qint_12[lvl] = Q_OneLvl_12[latind, lonind]
                Qint_18[lvl] = Q_OneLvl_18[latind, lonind]

                Tint_00[lvl] = T_OneLvl_00[latind, lonind]
                Tint_06[lvl] = T_OneLvl_06[latind, lonind]
                Tint_12[lvl] = T_OneLvl_12[latind, lonind]
                Tint_18[lvl] = T_OneLvl_18[latind, lonind]

                PVint_00[lvl] = PV_OneLvl_00[latind, lonind]
                PVint_06[lvl] = PV_OneLvl_06[latind, lonind]
                PVint_12[lvl] = PV_OneLvl_12[latind, lonind]
                PVint_18[lvl] = PV_OneLvl_18[latind, lonind]

        #----------------------------------------
        # Convert Specific humidity from kg/kg to
        # molecules/molecules
        #----------------------------------------
        Qint_00 = Qint_00 * 1.608
        Qint_06 = Qint_06 * 1.608
        Qint_12 = Qint_12 * 1.608
        Qint_18 = Qint_18 * 1.608

        #-------------------------------------
        # Create daily averages of 00,06,12,18
        #-------------------------------------
        Q_day = np.mean(np.vstack((Qint_00, Qint_06, Qint_12, Qint_18)),
                        axis=0)
        Z_day = np.mean(np.vstack((Hgt_00, Hgt_06, Hgt_12, Hgt_18)), axis=0)
        T_day = np.mean(np.vstack((Tint_00, Tint_06, Tint_12, Tint_18)),
                        axis=0)
        PV_day = np.mean(np.vstack((PVint_00, PVint_06, PVint_12, PVint_18)),
                         axis=0)

        #--------------------------------
        # Convert Height [m^2 s^-2] => km
        #--------------------------------
        Z_day = Z_day / 9.81 / 1000.0

        #---------------------------------------------------------
        # Construct specific humidity and height profiles for
        # interpolation on to the sfit input grid which is the
        # same as the WACCM height in monthly profile file.
        # NCEP reanalysis data only goes to 300mb therefore,
        # merge with monthly averaged WACCM water profiles > 300mb
        #---------------------------------------------------------
        ERAtop = Z_day[0]
        topInd = np.argmin(abs(
            Z -
            ERAtop))  # Where top of NCEP reanalysis fits in WACCM grid height

        #Zin  = np.concatenate( ( Z[0:(topInd-nSkip)]             , Z_day), axis=1 )
        #SHin = np.concatenate( ( waccmW[0:(topInd-nSkip),mnthInd], Q_day), axis=1 )

        #Remove axis=0

        Zin = np.concatenate((Z[0:(topInd - nSkip)], Z_day))

        SHin = np.concatenate((waccmW[0:(topInd - nSkip), mnthInd], Q_day))

        SHin_00 = np.concatenate((waccmW[0:(topInd - nSkip),
                                         mnthInd], Qint_00))
        SHin_06 = np.concatenate((waccmW[0:(topInd - nSkip),
                                         mnthInd], Qint_06))
        SHin_12 = np.concatenate((waccmW[0:(topInd - nSkip),
                                         mnthInd], Qint_12))
        SHin_18 = np.concatenate((waccmW[0:(topInd - nSkip),
                                         mnthInd], Qint_18))

        #--------------------------------------------------------------
        # Interpolate to specific humidity on WACCM grid. X data must
        # be increasing => flip dimensions and then flip back
        #--------------------------------------------------------------
        if logFlg:
            SHout = np.exp(
                np.flipud(
                    intrpUniSpl(np.flipud(Zin),
                                np.log(np.flipud(SHin)),
                                k=intrpOrder)(np.flipud(Z))))
            SHout_00 = np.exp(
                np.flipud(
                    intrpUniSpl(np.flipud(Zin),
                                np.log(np.flipud(SHin_00)),
                                k=intrpOrder)(np.flipud(Z))))
            SHout_06 = np.exp(
                np.flipud(
                    intrpUniSpl(np.flipud(Zin),
                                np.log(np.flipud(SHin_06)),
                                k=intrpOrder)(np.flipud(Z))))
            SHout_12 = np.exp(
                np.flipud(
                    intrpUniSpl(np.flipud(Zin),
                                np.log(np.flipud(SHin_12)),
                                k=intrpOrder)(np.flipud(Z))))
            SHout_18 = np.exp(
                np.flipud(
                    intrpUniSpl(np.flipud(Zin),
                                np.log(np.flipud(SHin_18)),
                                k=intrpOrder)(np.flipud(Z))))

        else:
            SHout = np.flipud(
                intrpUniSpl(np.flipud(Zin), np.flipud(SHin),
                            k=intrpOrder)(np.flipud(Z)))
            SHout_00 = np.flipud(
                intrpUniSpl(np.flipud(Zin), np.flipud(SHin_00),
                            k=intrpOrder)(np.flipud(Z)))
            SHout_06 = np.flipud(
                intrpUniSpl(np.flipud(Zin), np.flipud(SHin_06),
                            k=intrpOrder)(np.flipud(Z)))
            SHout_12 = np.flipud(
                intrpUniSpl(np.flipud(Zin), np.flipud(SHin_12),
                            k=intrpOrder)(np.flipud(Z)))
            SHout_18 = np.flipud(
                intrpUniSpl(np.flipud(Zin), np.flipud(SHin_18),
                            k=intrpOrder)(np.flipud(Z)))

        #------------------------
        # remove v4 for 6-hourly files
        #------------------------
        waterFiles_test = glob.glob(sngDir + 'w-120*' + verW)
        waterFiles_test = [
            i for i in waterFiles_test if len(os.path.basename(i)) > 10
        ]

        if len(waterFiles_test) >= 1:
            for f in waterFiles_test:
                os.remove(f)

        #---------------------
        # Write out water file at 00
        #---------------------
        with open(sngDir + 'w-120.' + YYYY + MM + DD + '.000000.' + verW_t,
                  'w') as fopen:
            fopen.write(
                '    1     H2O from ERA reanalysis and WACCM V6 monthly mean \n'
            )

            for row in segmnt(SHout_00, 5):
                strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #---------------------
        # Write out water file at 06
        #---------------------
        with open(sngDir + 'w-120.' + YYYY + MM + DD + '.060000.' + verW_t,
                  'w') as fopen:
            fopen.write(
                '    1     H2O from ERA reanalysis and WACCM V6 monthly mean \n'
            )

            for row in segmnt(SHout_06, 5):
                strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #---------------------
        # Write out water file at 12
        #---------------------
        with open(sngDir + 'w-120.' + YYYY + MM + DD + '.120000.' + verW_t,
                  'w') as fopen:
            fopen.write(
                '    1     H2O from ERA reanalysis and WACCM V6 monthly mean \n'
            )

            for row in segmnt(SHout_12, 5):
                strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #---------------------
        # Write out water file at 18
        #---------------------
        with open(sngDir + 'w-120.' + YYYY + MM + DD + '.180000.' + verW_t,
                  'w') as fopen:
            fopen.write(
                '    1     H2O from ERA reanalysis and WACCM V6 monthly mean \n'
            )

            for row in segmnt(SHout_18, 5):
                strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #---------------------
        # Write out water file Daily
        #---------------------
        with open(sngDir + 'w-120.' + verW, 'w') as fopen:
            fopen.write(
                '    1     H2O from ERA reanalysis and WACCM V6 monthly mean \n'
            )

            for row in segmnt(SHout, 5):
                strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #------------------------------
        # Write out Temperature, water,
        # and PV every 6 hours
        #------------------------------
        with open(sngDir + 'ERA_Interm_PV.' + YYYY + MM + DD, 'w') as fopen:
            fopen.write(
                'Daily Averaged and 6 hourly PV from ERA Interim [K m^2 kg^-1 s^-1] \n'
            )
            fopen.write(
                '{0:>20s}{1:>20s}{2:>20s}{3:>20s}{4:>20s}{5:>20s}\n'.format(
                    'Height [km]', 'PV[K m^2 kg^-1 s^-1]', 'PV at 00',
                    'PV at 06', 'PV at 12', 'PV at 18'))

            for row in zip(*(Z_day, PV_day, PVint_00, PVint_06, PVint_12,
                             PVint_18)):
                strformat = ','.join('{:>20.7E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        with open(sngDir + 'ERA_Interm_T.' + YYYY + MM + DD, 'w') as fopen:
            fopen.write(
                'Daily averaged and 6 Hourly Temperature from ERA Interim [K] \n'
            )
            fopen.write(
                '{0:>15s}{1:>15s}{2:>15s}{3:>15s}{4:>15s}{5:>15s}\n'.format(
                    'Height [km]', 'T [K]', 'T at 00', 'T at 06', 'T at 12',
                    'T at 18'))

            for row in zip(*(Z_day, T_day, Tint_00, Tint_06, Tint_12,
                             Tint_18)):
                strformat = ','.join('{:>15.7E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        with open(sngDir + 'ERA_Interm_Q.' + YYYY + MM + DD, 'w') as fopen:
            fopen.write(
                'Daily averaged and 6 hourly specific humidity Q from ERA Interim [VMR] \n'
            )
            fopen.write(
                '{0:>15s}{1:>15s}{2:>15s}{3:>15s}{4:>15s}{5:>15s}\n'.format(
                    'Height [km]', 'Q [VMR]', 'Q at 00', 'Q at 06', 'Q at 12',
                    'Q at 18'))

            for row in zip(*(Z_day, Q_day, Qint_00, Qint_06, Qint_12,
                             Qint_18)):
                strformat = ','.join('{:>15.7E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #--------------------
        # Create plots to pdf
        #--------------------
        pdfsav = PdfPages(sngDir + 'WaterProfile_ERA.pdf')

        fig1, ax1 = plt.subplots()
        ax1.plot(SHout_00, Z, color='green', label='Interpolated SH-00')
        ax1.plot(SHout_06, Z, color='gold', label='Interpolated SH-06')
        ax1.plot(SHout_12, Z, color='cyan', label='Interpolated SH-12')
        ax1.plot(SHout_18, Z, color='gray', label='Interpolated SH-18')

        ax1.plot(SHout, Z, 'rx-', label='Interpolated SH-Mean')
        ax1.plot(Q_day, Z_day, 'kx-', label='ERA Reanalysis SH-Mean')
        ax1.plot(waccmW[:, mnthInd], Z, 'bx-', label='WACCM V6 SH')
        ax1.grid(True, which='both')
        ax1.legend(prop={'size': 9})
        ax1.set_ylabel('Altitude [km]')
        ax1.set_xlabel('VMR [ppv]')
        ax1.tick_params(axis='x', which='both', labelsize=8)
        ax1.set_ylim((Z[-1], 80))
        #ax1.set_xlim((0,np.max((waccmW[-1,mnthInd],Q_day[-1]))))
        ax1.set_title(YYYY + '-' + MM + '-' + DD)

        pdfsav.savefig(fig1, dpi=250)

        fig2, ax2 = plt.subplots()
        ax2.plot(SHout_00, Z, color='green', label='Interpolated SH-00')
        ax2.plot(SHout_06, Z, color='gold', label='Interpolated SH-06')
        ax2.plot(SHout_12, Z, color='cyan', label='Interpolated SH-12')
        ax2.plot(SHout_18, Z, color='gray', label='Interpolated SH-18')

        ax2.plot(SHout, Z, 'rx-', label='Interpolated SH-Mean')
        ax2.plot(Q_day, Z_day, 'kx-', label='ERA Reanalysis SH-Mean')
        ax2.plot(waccmW[:, mnthInd], Z, 'bx-', label='WACCM V6 SH')
        ax2.grid(True, which='both')
        ax2.legend(prop={'size': 9})
        ax2.set_ylabel('Altitude [km]')
        ax2.set_xlabel('log VMR [ppv]')
        ax2.tick_params(axis='x', which='both', labelsize=8)
        ax2.set_xscale('log')
        ax2.set_ylim((Z[-1], 80))
        #ax2.set_xlim((0,np.max((waccmW[-1,mnthInd],Q_day[-1]))))
        ax2.set_title(YYYY + '-' + MM + '-' + DD)

        pdfsav.savefig(fig2, dpi=250)

        pdfsav.close()

        print 'Finished processing folder: {}'.format(sngDir)
Beispiel #8
0
def main(argv):
    
    #-----------------------------
    # Initializations and defaults
    #-----------------------------
    ckopus = '/data/bin/ckopus'          # Default path for ckopus executable. This is used for commandline option to 
                                         # just create a list of folders with OPUS data in them                                  
    datapath = False
    
                                                #---------------------------------#
                                                # Retrieve command line arguments #
                                                #---------------------------------#
    #------------------------------------------------------------------------------------------------------------#                                             
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'i:D:')

    except getopt.GetoptError as err:
        print str(err)
        usage()
        sys.exit()
        
    #-----------------------------
    # Parse command line arguments
    #-----------------------------
    for opt, arg in opts:
        
        #-----------
        # Input file
        #-----------
        if opt == '-i':           
            inputFile = arg
            
            # Check if file exists
            ckFile(inputFile)
    
        #-------------------------------
        # Option to just create a list
        # of folders that have OPUS files
        #-------------------------------
        elif opt == '-D':
            datapath = arg
            
            # check if '/' is included at end of path
            if not( datapath.endswith('/') ):
                datapath += '/'
                
            # Check if directory exists
            ckDir(datapath)
            
            print 'Searching for ckopus executable file specified in mkSpecDB.py'
            print 'If not found, please change path under Initializations and defaults in python program'
            ckFile(ckopus)                       # Check if ckopus executable file given above under Initializations and defaults
                                                 # exists
            print 'ckopus executable found'                                     

        #------------------
        # Unhandled options
        #------------------
        else:
            print 'Unhandled option: ' + opt
            usage()
            sys.exit()
    #------------------------------------------------------------------------------------------------------------#                       

    #------------------------------------
    # If the option to just create a list 
    # of folders with data then execute 
    # the following
    #------------------------------------
    if datapath:
        fname  = datapath + 'Fldrs_with_OPUS_' + dt.datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.list'
        
        with open(fname,'w') as fopen:
            for dirs in os.walk(datapath).next()[1]:
                                        
                #--------------------------------------------------------
                # Test if file is opus type. If the file in not opus type
                # a return code of 1 or 3 is kicked back when the -C
                # option is used
                #--------------------------------------------------------
                # Gather all files within the day directory
                # to test if these are OPUS files
                #------------------------------------------
                fnames = findFiles(datapath+dirs)          
                for indvfile in fnames:
                    if checkOPUS(ckopus,indvfile):                     
                        fopen.write('%s\n' % dirs)
                        break
                    
        print 'Finished creating processed folder list....'
        sys.exit()
        
    #----------------
    # Read input file
    #----------------
    DBinputs = {}
    execfile(inputFile, DBinputs)
    if '__builtins__' in DBinputs:
        del DBinputs['__builtins__']       
        
    #-----------------------------------
    # Check the existance of directories
    # and files given in input file
    #-----------------------------------
    # Directory for days processed file
    if DBinputs['DaysProcFlg']:                            
        if not ckDir(DBinputs['DaysProcDir']):
            sys.exit()
        # check if '/' is included at end of path
        if not( DBinputs['DaysProcDir'].endswith('/') ):
            DBinputs['DaysProcDir'] = DBinputs['DaysProcDir'] + '/'    
            
    # Base directory for data    
    if not ckDir(DBinputs['dataBaseDir']):                     
        sys.exit()
    # check if '/' is included at end of path
    if not( DBinputs['dataBaseDir'].endswith('/') ):
        DBinputs['dataBaseDir'] = DBinputs['dataBaseDir'] + '/'          
        
    # ckopus executable file    
    ckFile(DBinputs['Fckopus'])                            

    # Directory of output spectral database file
    if not ckDir(os.path.dirname(DBinputs['outputDBfile'])):   
        sys.exit()
                          
    #-------------------
    # Call to date class
    #-------------------
    DOI      = sc.DateRange(DBinputs['iyear'],DBinputs['imnth'],DBinputs['iday'],      # Create a dateRange instance object
                         DBinputs['fyear'],DBinputs['fmnth'],DBinputs['fday'])      
    daysList = DOI.dateList                                                         # Create a list of days within date range
    
    #---------------------------------------------
    # Determine if spectral DB file already exists
    #---------------------------------------------
    if os.path.isfile(DBinputs['outputDBfile']):
        wmode = 'a'
    else:
        wmode = 'w'
    
    #--------------------------------
    # Open Spectral DB file for write
    #--------------------------------   
    with open(DBinputs['outputDBfile'],wmode) as fopen:        
        #-------------------------------------------
        # If spectral DB does not exist write header
        #-------------------------------------------  
        rtn = sp.Popen( [DBinputs['Fckopus'],'-H'], stdout=sp.PIPE, stderr=sp.PIPE )
        stdoutHeader, stderr = rtn.communicate()        
        outstrHdr            = stdoutHeader.strip().split()

        if (wmode == 'w'):                       
            strformat = ['{0:<15}'] + [' {'+str(i)+':<12}' for i in range(1,len(outstrHdr))]
            strformat = ''.join(strformat).lstrip().rstrip() + '\n'            
            
            fopen.write(strformat.format(*outstrHdr))
            
        #-----------------------------------------    
        # Search each day directory for data files 
        #-----------------------------------------
        # Initialize process list
        procList = []
        for indvday in daysList:
                        
            # Find year month and day strings
            yrstr   = "{0:02d}".format(indvday.year)
            mnthstr = "{0:02d}".format(indvday.month)
            daystr  = "{0:02d}".format(indvday.day)  
            yyyymmddstr = yrstr + mnthstr + daystr
            
            dayDir = DBinputs['dataBaseDir'] + yyyymmddstr + '/'
            
            if not ckDir(dayDir):
                continue
            
            #------------------------------------------
            # Gather all files within the day directory
            # to test if these are OPUS files
            #------------------------------------------
            fnames = findFiles(dayDir)
                  
            #-----------------------------
            # Loop through all files found
            #-----------------------------
            for indvfile in fnames:
                
                #--------------------------------------------------------
                # Test if file is opus type. If the file in not opus type
                # a return code of 1 or 3 is kicked back when the -C
                # option is used
                #--------------------------------------------------------
                if not checkOPUS(DBinputs['Fckopus'],indvfile):
                    continue
                
                procList.extend([os.path.dirname(indvfile)])
                
                #--------------------------------------
                # For found spectral data files run 
                # ckopus with -D option to get one line 
                # parameter list for database
                #--------------------------------------
                if ( DBinputs['SBlockType'] and isinstance(DBinputs['SBlockType'],str) ):
                    SBlockTemp = DBinputs['SBlockType']
                else:
                    SBlockTemp = 'NONE'                 
                    
                paramList = [DBinputs['Fckopus'],'-S'+DBinputs['loc'],'-D'+SBlockTemp]   # Build initial parameter list for ckopus call
                paramList.extend(DBinputs['ckopusFlgs'])                                 # Add flags from input file to parameter list
                paramList.append(indvfile)                                               # Add OPUS filename to parameter list
                
                #if (DBinputs['loc'].lower() == 'mlo') and (indvday < dt.date(1995,01,01)):
                    #paramList = [DBinputs['Fckopus'],'-S'+DBinputs['loc'],'-U','-t-150',indvfile]
                #else:    
                    #paramList = [DBinputs['Fckopus'],'-S'+DBinputs['loc'],'-D',indvfile]
                    
                rtn = sp.Popen( paramList, stdout=sp.PIPE, stderr=sp.PIPE )
                stdoutParam, stderr = rtn.communicate()
                
                # Some OPUS files may not contain any data and therefore
                # pass back an error
                if 'error' in stdoutParam or not(stdoutParam):
                    continue
                                
                #-----------------------------------------------------
                # Determine if OPUS file is in correct date folder. If 
                # not move to correct folder. This does not guarantee
                # that the file in the wrong directory will be processed
                # if the code has already processed the correct directory
                # the file will not process (Need to fix!!!!!!!!!)
                #-----------------------------------------------------
                # Date from ckopus
                opusFileDate = stdoutParam.strip().split()[5]
                
                # Compare with current directory date
                if opusFileDate == yyyymmddstr: 
                    #---------------------------------------------------
                    # Parse output from ckopus to write to database file
                    # and remove path from filename/path
                    #---------------------------------------------------
                    outstr    = stdoutParam.strip().split()
                    outstr[0] = os.path.split(outstr[0])[1]
                    strformat = ['{0:<15}'] + [' {'+str(i)+':<12}' for i in range(1,len(outstr))]
                    strformat = ''.join(strformat).lstrip().rstrip() + '\n'
                                
                    fopen.write(strformat.format(*outstr))
                                    
                    #fopen.write( stdoutParam )
                else:
                    newDayDir = DBinputs['dataBaseDir'] + opusFileDate + '/'
                    print 'File: ' + dayDir + os.path.basename(indvfile) + ' In wrong directory. Moving to: ' + newDayDir + os.path.basename(indvfile)
                    if ckDir(newDayDir): shutil.move(dayDir + os.path.basename(indvfile), newDayDir + os.path.basename(indvfile))
                    else:              
                        print 'Creating directory: ' + newDayDir 
                        os.mkdir(newDayDir)
                        shutil.move(dayDir + os.path.basename(indvfile), newDayDir + os.path.basename(indvfile))
                    continue
                
                #-----------------------------------
                # Run ckopus to create bnr file type
                # Grab SBlock and TStamp to convert 
                # to bnr type
                #-----------------------------------
                if DBinputs['bnrWriteFlg']:
                    # Get SBlock and TStamp from ckopus -D <file>
                    if not ('SBlock' in locals()):
                        stdoutHeader = stdoutHeader.strip().split()
                        indSBlock    = stdoutHeader.index('SBlock')
                        indTStamp    = stdoutHeader.index('TStamp')
                        
                    singleDBline = stdoutParam.strip().split()
                    TStamp       = singleDBline[indTStamp]
                    
                    if ( DBinputs['SBlockType'] and isinstance(DBinputs['SBlockType'],str) ):
                        SBlock = DBinputs['SBlockType']
                    else:
                        SBlock = singleDBline[indSBlock] 
                    
                    #----------------------------------
                    # Run ckopus to convert to bnr type
                    #----------------------------------
                    #--------------------------------------------
                    # Make sure cwd is same location as OPUS file
                    #--------------------------------------------
                    fpath,_ = os.path.split(indvfile)
                    
                    if not fpath == os.getcwd():
                        os.chdir(fpath)
                        
                    #-------------------------------------------
                    # For mlo prior to 1995 a utc offset must be
                    # applied through ckopus flags
                    #-------------------------------------------
                    paramList = [DBinputs['Fckopus'],'-S'+DBinputs['loc']]   # Build initial parameter list for ckopus call
                    paramList.append('-' + DBinputs['bnrType'] + SBlock)     # Add bnr and spectral block type
                    paramList.extend(DBinputs['ckopusFlgs'])                 # Add flags from input file to parameter list
                    paramList.append(indvfile)                               # Add OPUS filename to parameter list                    
                    
                    
                    #if (DBinputs['loc'].lower() == 'mlo') and (indvday < dt.date(1995,01,01)):
                        #paramList    = [DBinputs['Fckopus'],'-S'+DBinputs['loc'],'-U','-t-150','-'+DBinputs['bnrType']+SBlock,indvfile]
                    #else:    
                        #paramList    = [DBinputs['Fckopus'],'-S'+DBinputs['loc'],'-'+DBinputs['bnrType']+SBlock,indvfile]
                    
                    rtn = sp.Popen( paramList, stdout=sp.PIPE, stderr=sp.PIPE )
                    stdoutParam, stderr = rtn.communicate()       
                    
                    #-----------------------------------------------------
                    # Find name of file. This may not correspond to SBlock
                    # name if SBlock name is set to NONE. Name of file
                    # is printed in stderr of subprocess
                    #-----------------------------------------------------
                    ind = stderr.find('Closed bnr file:')
                    bnrFname = stderr[ind:].strip().split()[-1]
                                        
                    #------------------------------------------------
                    # Change name of file to correspond to time stamp
                    #------------------------------------------------
                    if os.path.isfile('/'.join([fpath,bnrFname])):
                        shutil.move(fpath+'/'+bnrFname, fpath+'/'+TStamp+'.bnr')
                    else:
                        print 'Unable to move file: %s to %s' %(indvfile,fpath+'/'+TStamp+'.bnr')
                    
    #-------------------------------------------
    # Write list of folders that where processed
    #-------------------------------------------
    if DBinputs['DaysProcFlg']:
        #------------------------------        
        # Create a unique ordered set
        #------------------------------
        procList = list(set(procList))    
        procList.sort()
        fProcname = DBinputs['DaysProcDir'] + 'FldrsProc_' + dt.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')+'.list'
        with open(fProcname,'w') as fopen:
            for item in procList:
                fopen.write('%s\n' % item)
Beispiel #9
0
def main(argv):

    #---------------------------------#
    # Retrieve command line arguments #
    #---------------------------------#
    #------------------------------------------------------------------------------------------------------------#
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'i:D:')

    except getopt.GetoptError as err:
        print str(err)
        usage()
        sys.exit()

    #-----------------------------
    # Parse command line arguments
    #-----------------------------
    for opt, arg in opts:

        #-----------
        # Input file
        #-----------
        if opt == '-i':
            inputFile = arg

        #------------------
        # Unhandled options
        #------------------
        else:
            print 'Unhandled option: ' + opt
            usage()
            sys.exit()
    #------------------------------------------------------------------------------------------------------------#

    #----------------
    # Read input file
    #----------------
    # Input file instance
    DBinputs = sc.Layer1InputFile(inputFile)
    DBinputs.getInputs()

    #-----------------------------------
    # Check the existance of directories
    # and files given in input file
    #-----------------------------------
    # Directory for days processed file
    if DBinputs.inputs['DaysProcFlg']:
        ckDir(DBinputs.inputs['DaysProcDir'], exit=True)

        # check if '/' is included at end of path
        if not (DBinputs.inputs['DaysProcDir'].endswith('/')):
            DBinputs.inputs[
                'DaysProcDir'] = DBinputs.inputs['DaysProcDir'] + '/'

    # Base directory for data
    ckDir(DBinputs.inputs['dataBaseDir'], exit=True)

    # check if '/' is included at end of path
    if not (DBinputs.inputs['dataBaseDir'].endswith('/')):
        DBinputs.inputs['dataBaseDir'] = DBinputs.inputs['dataBaseDir'] + '/'

    # Check Coadd executable file
    ckFile(DBinputs.inputs['coaddex'], exit=True)

    # Check input spectral database file
    ckFile(DBinputs.inputs['inputDBfile'], exit=True)

    # Directory of output spectral database file
    ckDir(os.path.dirname(DBinputs.inputs['outputDBfile']), exit=True)

    #-------------------------------------
    # Read original spectral database file
    #-------------------------------------
    OrgSpecDB = sc.DbInputFile(DBinputs.inputs['inputDBfile'])
    OrgSpecDB.getInputs()

    #------------------------------------
    # Initialize processed directory list
    #------------------------------------
    procList = []

    #--------------------------------------
    # Open Coadd Spectral DB file for write
    #--------------------------------------
    with open(DBinputs.inputs['outputDBfile'], 'w') as fopen:
        #----------------------------------------
        # Write header to Coadd Spectral DataBase
        # This should be the same as the Original
        # spectral database
        #----------------------------------------
        with open(DBinputs.inputs['inputDBfile'], 'r') as fopen2:
            firstLn = fopen2.readline()

        #-----------------------------------------------
        # Now that we have two files for coadded we need
        # to add another column for Filename1
        #-----------------------------------------------
        firstLn = 'Filename1        ' + firstLn.replace(
            'Filename', 'Filename2')
        fopen.write(firstLn)

        #----------------------------------------------
        # Loop through entries in the original spectral
        # database to find coadded files
        #----------------------------------------------
        for ind, fName in enumerate(OrgSpecDB.dbInputs['Filename']):

            #------------------------------------------------
            # Check if this is a potential file for coadding:
            #  -- The number of FLSCN must be half of EXSCN
            #------------------------------------------------
            flscn = int(OrgSpecDB.dbInputs['FLSCN'][ind])
            exscn = int(OrgSpecDB.dbInputs['EXSCN'][ind])
            gfw = int(OrgSpecDB.dbInputs['GFW'][ind])
            gbw = int(OrgSpecDB.dbInputs['GBW'][ind])

            #if (exscn/flscn != 2 ): continue
            if (np.true_divide(exscn, flscn) != 2): continue

            #------------------------------------------------------
            # Extract the base file name and the date for filtering
            #------------------------------------------------------
            baseName = OrgSpecDB.dbInputs['Filename'][ind].strip().split(
                '.')[0]
            extName = OrgSpecDB.dbInputs['Filename'][ind].strip().split('.')[1]
            yyyymmdd = str(int(OrgSpecDB.dbInputs['Date'][ind]))
            yearstr = yyyymmdd[0:4]
            monthstr = yyyymmdd[4:6]
            daystr = yyyymmdd[6:]
            curDate = dt.date(int(yearstr), int(monthstr), int(daystr))

            #print ind, baseName, extName, yyyymmdd
            #-------------------------------------------------------
            # Construct the bnr file name of the first file to coadd
            #-------------------------------------------------------
            hh1 = OrgSpecDB.dbInputs['Time'][ind][0:2]
            mm1 = OrgSpecDB.dbInputs['Time'][ind][3:5]
            ss1 = OrgSpecDB.dbInputs['Time'][ind][6:]
            bnrFname1 = hh1 + mm1 + ss1 + '.bnr'

            #-----------------------------------------------
            # Construct directory location of bnr files from
            # date and input file information
            #-----------------------------------------------
            dayDir = DBinputs.inputs[
                'dataBaseDir'] + yearstr + monthstr + daystr + '/'
            ckDir(dayDir, exit=True)

            procList.append(dayDir)

            #----------------------------------------------------
            # Filter original specDB based on current day of file
            #----------------------------------------------------
            inDateRange = sc.DateRange(int(yearstr), int(monthstr),
                                       int(daystr), int(yearstr),
                                       int(monthstr), int(daystr))
            flt1_OrgSpecDB = OrgSpecDB.dbFilterDate(inDateRange)

            #---------------------------------------------
            # Find filenames that match base and have the
            # extension of (n+1), where n is the extension
            # of the original filename
            #---------------------------------------------
            # Construct file name of coadded partner

            newFname = baseName + '.' + str(int(extName) + 1)
            indFind = [
                i for i, dum in enumerate(flt1_OrgSpecDB['Filename'])
                if dum.endswith(newFname)
            ]
            if not indFind: continue
            if len(indFind) > 1:
                print 'More than one match found for: ' + newFname + ' Date: ' + yearstr + monthstr + daystr + ' ERROR!!'
                sys.exit()
            indFind = indFind[0]

            #-----------------------------------------
            # Check if this a valid coadded file pair:
            # Number of GFW in first file must equal
            # the number of GBW in second file
            #-----------------------------------------
            if (OrgSpecDB.dbInputs['GFW'][ind] !=
                    flt1_OrgSpecDB['GBW'][indFind]):
                continue

            #--------------------------------------------------------
            # Construct the bnr file name of the second file to coadd
            #--------------------------------------------------------
            hh2 = flt1_OrgSpecDB['Time'][indFind][0:2]
            mm2 = flt1_OrgSpecDB['Time'][indFind][3:5]
            ss2 = flt1_OrgSpecDB['Time'][indFind][6:]
            bnrFname2 = hh2 + mm2 + ss2 + '.bnr'

            #---------------------------------------------------------
            # Check if the second coadded file is within 10 minutes of
            # the first. This will ensure that the files are pairs
            #---------------------------------------------------------
            time1 = float(hh1) * 60.0 + float(mm1) + float(ss2) / 60.0
            time2 = float(hh2) * 60.0 + float(mm2) + float(ss2) / 60.0
            if abs(time2 - time1) > 10: continue

            #------------------------------------------------------------
            # Construct the coadd input file in the appropriate directory
            #------------------------------------------------------------
            with open(dayDir + 'coad.i', 'w') as coaddInput:
                coaddInput.write('2\n')
                coaddInput.write(dayDir + bnrFname1 + '\n')
                coaddInput.write(dayDir + bnrFname2 + '\n')

            #--------------------------------------------------
            # Call coadd executable, rename bnr file, and read
            # coadd output file for coadd spectral database
            # Make sure cwd is same location as OPUS file
            #--------------------------------------------------
            if not dayDir == os.getcwd():
                os.chdir(dayDir)

            paramList = [
                DBinputs.inputs['coaddex'], '-S' + DBinputs.inputs['loc'],
                '-TOPU', '-c'
            ]
            rtn = sp.Popen(paramList, stdout=sp.PIPE, stderr=sp.PIPE)
            stdoutCom, stderrCom = rtn.communicate()

            #----------------------------
            # Read output file from coadd
            #----------------------------
            with open(dayDir + 'coad.out', 'r') as fopen3:
                coadOut = fopen3.readlines()

            #--------------------------------------
            # Check if coad successfully ran. A
            # sucessful run happens when at the
            # end of coad.err the following line
            # is present: Closed bnr file: temp.bnr
            #--------------------------------------
            with open(dayDir + 'coad.err', 'r') as fopenCerr:
                coadErr = fopenCerr.readlines()
            if (not coadErr) or (not 'Closed bnr file: temp.bnr'
                                 in coadErr[-1]):
                print 'Error processing coad files for {}'.format(dayDir)
                print coadErr
                continue

            szaOut = coadOut[0].strip()
            opdOut = coadOut[1].strip()
            fovOut = coadOut[2].strip()
            apdOut = coadOut[3].strip()
            lwavOut = coadOut[4].strip()
            uwavOut = coadOut[5].strip()
            TstampOut = coadOut[6].strip()
            hdrStrOut = coadOut[7].strip()
            azmOut = coadOut[8].strip()
            durOut = coadOut[9].strip()
            roeOut = coadOut[10].strip()
            maxYout = coadOut[11].strip()
            minYout = coadOut[12].strip()

            yearNew = TstampOut.split()[0]
            monthNew = TstampOut.split()[1]
            dayNew = TstampOut.split()[2]
            hhNew = TstampOut.split()[3]
            mmNew = TstampOut.split()[4]
            ssNew = TstampOut.split()[5].split('.')[0]
            dateNew = yearNew + monthNew + dayNew
            TstampNew = hhNew + mmNew + ssNew
            TstampNewf = hhNew + ':' + mmNew + ':' + ssNew

            #-----------------------------------------------
            # Change name of new bnr file based on timestamp
            #-----------------------------------------------
            if os.path.isfile(dayDir + 'temp.bnr'):
                shutil.move(dayDir + 'temp.bnr', dayDir + TstampNew + '.bnrc')
            else:
                print 'Unable to move file: %s to %s' % (dayDir + TstampNew +
                                                         '.bnrc')

            #------------------------------------------------------------------------
            # Find the averages for surface temperature, pressure and RH measurements
            #------------------------------------------------------------------------
            # Check if any measurements are missing
            HouseTemp1 = OrgSpecDB.dbInputs['HouseTemp'][ind]
            HouseTemp2 = flt1_OrgSpecDB['HouseTemp'][indFind]
            HousePres1 = OrgSpecDB.dbInputs['HousePres'][ind]
            HousePres2 = flt1_OrgSpecDB['HousePres'][indFind]
            HouseRH1 = OrgSpecDB.dbInputs['HouseRH'][ind]
            HouseRH2 = flt1_OrgSpecDB['HouseRH'][indFind]

            ExtStatTemp1 = OrgSpecDB.dbInputs['ExtStatTemp'][ind]
            ExtStatTemp2 = flt1_OrgSpecDB['ExtStatTemp'][indFind]
            ExtStatPres1 = OrgSpecDB.dbInputs['ExtStatPres'][ind]
            ExtStatPres2 = flt1_OrgSpecDB['ExtStatPres'][indFind]
            ExtStatRH1 = OrgSpecDB.dbInputs['ExtStatRH'][ind]
            ExtStatRH2 = flt1_OrgSpecDB['ExtStatRH'][indFind]

            HouseTempNew = findAvgTPRH(HouseTemp1, HouseTemp2, -9999)
            HousePresNew = findAvgTPRH(HousePres1, HousePres2, -9999)
            HouseRHNew = findAvgTPRH(HouseRH1, HouseRH2, -99)
            ExtStatTempNew = findAvgTPRH(ExtStatTemp1, ExtStatTemp2, -9999)
            ExtStatPresNew = findAvgTPRH(ExtStatPres1, ExtStatPres2, -9999)
            ExtStatRHNew = findAvgTPRH(ExtStatRH1, ExtStatRH2, -99)

            #--------------------------------------------
            # Write new line to coadded spectral database
            #--------------------------------------------
            # [ OPUS_filename1, OPUS_filename2, Site, SBlock, TOffs, TStamp, Date, Time, SNR, N_Lat, W_Lon, Alt, SAzm,
            #   SZen, ROE, Dur, Reso, Apd, FOV, LWN, HWN, Flt, MaxY, MinY, FLSCN, EXSCN, GFW, GBW, HouseTemp, HousePres,
            #   HouseRH, ExtStatTemp, ExtStatPres, ExtStatRH, Ext_Solar_Sens, Quad_Sens, Det_Intern_T_Swtch              ]

            try:
                Flt = int(OrgSpecDB.dbInputs['Flt'][ind])
            except ValueError:
                Flt = OrgSpecDB.dbInputs['Flt'][ind]

            coaddLine = [
                OrgSpecDB.dbInputs['Filename'][ind], newFname,
                OrgSpecDB.dbInputs['Site'][ind],
                OrgSpecDB.dbInputs['SBlock'][ind], '-999', TstampNew, dateNew,
                TstampNewf, '-999', OrgSpecDB.dbInputs['N_Lat'][ind],
                OrgSpecDB.dbInputs['W_Lon'][ind],
                OrgSpecDB.dbInputs['Alt'][ind], azmOut, szaOut, roeOut, durOut,
                OrgSpecDB.dbInputs['Reso'][ind], apdOut, fovOut, lwavOut,
                uwavOut, Flt, maxYout, minYout, '-999',
                int(OrgSpecDB.dbInputs['EXSCN'][ind]),
                int(OrgSpecDB.dbInputs['GFW'][ind]),
                int(flt1_OrgSpecDB['GBW'][indFind]), HouseTempNew,
                HousePresNew, HouseRHNew, ExtStatTempNew, ExtStatPresNew,
                ExtStatRHNew, '-9999', '-9999', '-9999'
            ]

            strformat = ['{0:<17}', '{1:<15}'] + [
                ' {' + str(i) + ':<12}' for i in range(2, len(coaddLine))
            ]
            strformat = ''.join(strformat).lstrip().rstrip() + '\n'

            fopen.write(strformat.format(*coaddLine))

            #----------------------------------------------
            # Remove the second coadded file in the orginal
            # spectral database dictionary so we do loop
            # on this file. First find where second file
            # maps to in original spectral DB dictionary
            #----------------------------------------------
            # Loop through origin dictionary
            indMap = [i for i,(Tstamp,date) in enumerate(itertools.izip(OrgSpecDB.dbInputs['TStamp'],OrgSpecDB.dbInputs['Date'])) \
                      if (flt1_OrgSpecDB['TStamp'][indFind] == Tstamp and flt1_OrgSpecDB['Date'][indFind] == date)]
            indMap = indMap[0]
            # Remove index from original DB dictionary
            for k in OrgSpecDB.dbInputs:
                del OrgSpecDB.dbInputs[k][indMap]

    #-------------------------------------------
    # Write list of folders that where processed
    #-------------------------------------------
    if DBinputs.inputs['DaysProcFlg']:
        #------------------------------
        # Create a unique ordered set
        #------------------------------
        procList = list(set(procList))
        procList.sort()
        fProcname = DBinputs.inputs[
            'DaysProcDir'] + 'FldrsProc_' + dt.datetime.now().strftime(
                '%Y-%m-%d_%H-%M-%S') + '.list'
        with open(fProcname, 'w') as fopen:
            for item in procList:
                fopen.write('%s\n' % item)
Beispiel #10
0
def main():

    #---------
    # Location
    #---------
    loc = 'tab'

    #-----------------------------------------
    # Interpolation flag for NCEP re-analysis:
    # False = Nearest point. True = linear
    #-----------------------------------------
    interpFlg = False

    #---------------------
    # Interpolation things
    #---------------------
    nSkip = 3  # Number of points to skip when merging WACCM and NCEP profiles
    intrpOrder = 1  # Order of interpolation
    logFlg = True  # Flag to do interpolation of log of water

    #-----------------------
    # Date Range of interest
    #-----------------------
    year = 2014

    iyear = year
    imnth = 1
    iday = 1
    fyear = year
    fmnth = 12
    fday = 31

    #-------------------------------------
    # Parameters for potential temperature
    #-------------------------------------
    thetaTrgt = 380.0  # Potential Temperature [K]
    P0 = 1000.0  # Referenc Pressure [mbars]
    R_Cp = 0.286  # R/Cp for air

    #---------------------------
    # ERA Interim data directory
    #---------------------------
    ERAdir = '/Volumes/data1/ebaumer/ERAdata/'

    #--------------
    # Out directory
    #--------------
    outDir = '/Volumes/data1/ebaumer/ERAdata/'

    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear, imnth, iday, fyear, fmnth, fday)
    dayLst = dRange.dateList

    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        sLat = 76.52
        sLon = 291.23  # 68.77 W = (360.0 - 68.77) = 291.23 E

    elif loc.lower() == 'mlo':
        sLat = 19.4
        sLon = 204.43  # 155.57 W = (360 - 155.57) = 204.43 E

    elif loc.lower() == 'fl0':
        sLat = 40.4
        sLon = 254.76  # 105.24 W = (360 - 105.24) = 254.76 E

    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(ERAdir, exitFlg=True)
    ckDir(outDir, exitFlg=True)

    #-------------------
    # Yearly Output File
    #-------------------
    outFile = outDir + 'ERA_380K_theta_' + loc.lower() + '_' + str(
        year) + '.dat'

    #-------------------------------------
    # Create empty Tropopause height array
    #-------------------------------------
    theta_hgt = np.zeros(len(dayLst))

    #------------------
    # Loop through days
    #------------------
    for sngDay in dayLst:

        #-------------------------------------------
        # Open daily ERA interm files 00, 06, 12, 18
        #-------------------------------------------
        YYYY = "{0:04d}".format(sngDay.year)
        MM = "{0:02d}".format(sngDay.month)
        DD = "{0:02d}".format(sngDay.day)
        ERA_F1 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '00.nc'
        ERA_F2 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '06.nc'
        ERA_F3 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '12.nc'
        ERA_F4 = ERAdir + YYYY + MM + '/' + 'ei.oper.an.pl.regn128sc.' + YYYY + MM + DD + '18.nc'

        f1 = netcdf.netcdf_file(ERA_F1, 'r', mmap=False)
        f2 = netcdf.netcdf_file(ERA_F2, 'r', mmap=False)
        f3 = netcdf.netcdf_file(ERA_F3, 'r', mmap=False)
        f4 = netcdf.netcdf_file(ERA_F4, 'r', mmap=False)

        #-----------------------------------
        # Lat and lon should be the same for
        # all files. Just grab once
        #-----------------------------------
        lat = f1.variables['g4_lat_1']
        lon = f1.variables['g4_lon_2']
        Plvl = f1.variables['lv_ISBL0']
        nlvls = np.shape(Plvl[:])[0]

        Z_00 = f1.variables['Z_GDS4_ISBL']
        Z_06 = f2.variables['Z_GDS4_ISBL']
        Z_12 = f3.variables['Z_GDS4_ISBL']
        Z_18 = f4.variables['Z_GDS4_ISBL']

        T_00 = f1.variables['T_GDS4_ISBL']
        T_06 = f1.variables['T_GDS4_ISBL']
        T_12 = f1.variables['T_GDS4_ISBL']
        T_18 = f1.variables['T_GDS4_ISBL']

        f1.close()
        f2.close()
        f3.close()
        f4.close()

        #-----------------------------------------------
        # If not interpolating point in NCEP re-analysis
        # find closes lat lon indicies
        #-----------------------------------------------
        if not interpFlg:
            latind = findCls(lat[:], sLat)
            lonind = findCls(lon[:], sLon)

        #-----------------------------------------------------
        # For each level interpolate hgt and specific humidity
        # based on latitude and longitude of site
        #-----------------------------------------------------
        Hgt_00 = np.zeros(nlvls)
        Hgt_06 = np.zeros(nlvls)
        Hgt_12 = np.zeros(nlvls)
        Hgt_18 = np.zeros(nlvls)

        Tint_00 = np.zeros(nlvls)
        Tint_06 = np.zeros(nlvls)
        Tint_12 = np.zeros(nlvls)
        Tint_18 = np.zeros(nlvls)

        for lvl in range(0, nlvls):
            HgtOneLvl_00 = np.squeeze(Z_00[lvl, :, :])
            HgtOneLvl_06 = np.squeeze(Z_06[lvl, :, :])
            HgtOneLvl_12 = np.squeeze(Z_12[lvl, :, :])
            HgtOneLvl_18 = np.squeeze(Z_18[lvl, :, :])

            T_OneLvl_00 = np.squeeze(T_00[lvl, :, :])
            T_OneLvl_06 = np.squeeze(T_06[lvl, :, :])
            T_OneLvl_12 = np.squeeze(T_12[lvl, :, :])
            T_OneLvl_18 = np.squeeze(T_18[lvl, :, :])

            if interpFlg:
                Hgt_00[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_00,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)
                Hgt_06[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_06,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)
                Hgt_12[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_12,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)
                Hgt_18[lvl] = interp2d(lon[:],
                                       lat[:],
                                       HgtOneLvl_18,
                                       kind='linear',
                                       bounds_error=True)(sLon, sLat)

                Tint_00[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_00,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Tint_06[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_06,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Tint_12[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_12,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)
                Tint_18[lvl] = interp2d(lon[:],
                                        lat[:],
                                        T_OneLvl_18,
                                        kind='linear',
                                        bounds_error=True)(sLon, sLat)

            else:
                Hgt_00[lvl] = HgtOneLvl_00[latind, lonind]
                Hgt_06[lvl] = HgtOneLvl_06[latind, lonind]
                Hgt_12[lvl] = HgtOneLvl_12[latind, lonind]
                Hgt_18[lvl] = HgtOneLvl_18[latind, lonind]

                Tint_00[lvl] = T_OneLvl_00[latind, lonind]
                Tint_06[lvl] = T_OneLvl_06[latind, lonind]
                Tint_12[lvl] = T_OneLvl_12[latind, lonind]
                Tint_18[lvl] = T_OneLvl_18[latind, lonind]

        #-------------------------------------
        # Create daily averages of 00,06,12,18
        #-------------------------------------
        Z_day = np.mean(np.vstack((Hgt_00, Hgt_06, Hgt_12, Hgt_18)), axis=0)
        T_day = np.mean(np.vstack((Tint_00, Tint_06, Tint_12, Tint_18)),
                        axis=0)

        #--------------------------------
        # Convert Height [m^2 s^-2] => km
        #--------------------------------
        Z_day = Z_day / 9.81 / 1000.0

        #---------------------------------------------------------
        # Calculate potential temperature for each pressure level:
        #               Po   (R/Cp)
        #  Theta = T *(-----)
        #                P
        #---------------------------------------------------------
        thetaLvl = T_day * (P0 / Plvl[:])**(R_Cp)

        #------------------------------------
        # Interpolate Tropopause pressure on
        # height to find height of tropopuase
        #------------------------------------
        theta_hgt[i] = interp1d(thetaLvl, Z_day, kind='linear')(thetaTrgt)

    #----------------------------------------
    # Write Tropopause heights to yearly file
    #----------------------------------------
    with open(outFile, 'w') as fopen:
        hdr = 'Date           380K Potential Temperature Height [km]\n'
        fopen.write(hdr)
        strformat = '{0:15}{1:<38}\n'
        for i, indDay in enumerate(timeAll):
            daystr = "{0:04d}{1:02d}{2:02d}".format(indDay.year, indDay.month,
                                                    indDay.day)
            temp = [daystr, theta_hgt[i]]
            fopen.write(strformat.format(*temp))
Beispiel #11
0
def main():

    #---------
    # Location
    #---------
    loc = 'tab'

    #------------------------------------
    # Version number to append water file
    #------------------------------------
    verW = 'v5'

    #------
    # Flags
    #------
    pltFlg = True

    #-----------------------
    # Date Range of interest
    #-----------------------
    iyear = 2017
    imnth = 1
    iday = 1
    fyear = 2017
    fmnth = 12
    fday = 31

    #---------------------------------------
    # Size of altitude grid for each station
    #---------------------------------------
    #---------------------------------------
    # Altitude levels for different stations
    #---------------------------------------
    if loc.lower() == 'tab':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 95.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.0000, 2.0000, 1.0000, 0.2250
        ])

    elif loc.lower() == 'mlo':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 95.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.3960
        ])

    elif loc.lower() == 'fl0':
        Z = np.array([
            120.0000, 110.0000, 100.0000, 94.0000, 90.0000, 85.0000, 80.0000,
            75.0000, 70.0000, 65.0000, 60.0000, 55.0000, 50.0000, 48.0000,
            46.0000, 44.0000, 42.0000, 40.0000, 38.0000, 36.0000, 34.0000,
            32.0000, 30.0000, 28.0000, 26.0000, 24.0000, 22.0000, 20.0000,
            18.0000, 16.0000, 14.0000, 12.0000, 10.0000, 9.0000, 8.0000,
            7.0000, 6.0000, 5.0000, 4.0000, 3.0000, 2.0000, 1.6120
        ])

    nlvls = len(Z)

    #---------------
    # Data Directory
    #---------------
    dataDir = '/data1/' + loc.lower() + '/'

    #---------------------
    # Establish date range
    #---------------------
    dRange = sc.DateRange(iyear, imnth, iday, fyear, fmnth, fday)

    #----------------------------
    # File and directory checking
    #----------------------------
    ckDir(dataDir, exitFlg=True)

    #--------------------------------------------
    # Walk through first level of directories in
    # data directory and collect directory names
    #--------------------------------------------
    dirLst = []
    for drs in os.walk(dataDir).next()[1]:

        #-------------------------------------------
        # Test directory to make sure it is a number
        #-------------------------------------------
        try:
            int(drs[0:4])
        except:
            continue

        if dRange.inRange(int(drs[0:4]), int(drs[4:6]), int(drs[6:8])):
            dirLst.append(dataDir + drs + '/')

    dirLst.sort()

    #-----------------------------------------------
    # Loop through all directories within time frame
    #-----------------------------------------------
    for sngDir in dirLst:

        #----------------------------
        # Get date in datetime format
        #----------------------------
        oneDay = dt.datetime(int(os.path.basename(sngDir[:-1])[0:4]),
                             int(os.path.basename(sngDir[:-1])[4:6]),
                             int(os.path.basename(sngDir[:-1])[6:8]))

        #---------------------------------------------
        # Search for all individual retrieval profiles
        #---------------------------------------------
        zptFiles = glob.glob(sngDir + 'w-120.*.v99')

        nfiles = len(zptFiles)
        if nfiles == 0: continue

        sngH2O = np.zeros((nfiles, nlvls))

        for i, sngFile in enumerate(zptFiles):
            with open(sngFile, 'r') as fopen:
                lines = fopen.readlines()

            sngH2O[i, :] = np.array([
                float(x) for line in lines[1:]
                for x in line.strip().split(',')[:-1]
            ])

        dailyH2O = np.mean(sngH2O, axis=0)

        #---------------------
        # Write out water file
        #---------------------
        with open(sngDir + 'w-120.' + verW, 'w') as fopen:
            #fopen.write('    1     Daily H2O profile from individual retrievals \n')
            fopen.write(
                '    1     H2O Daily profile from individual retrievals \n')

            for row in segmnt(dailyH2O, 5):
                strformat = ','.join('{:>12.4E}' for i in row) + ', \n'
                fopen.write(strformat.format(*row))

        #--------------------
        # Create plots to pdf
        #--------------------
        if pltFlg:

            pdfsav = PdfPages(sngDir + 'DailyH2Oprf_v5.pdf')

            fig1, ax1 = plt.subplots()
            ax1.plot(dailyH2O, Z, label='Daily Averaged Retrieved H2O')

            for i, sngFile in enumerate(zptFiles):
                ax1.plot(sngH2O[i, :], Z, label=os.path.basename(sngFile))

            ax1.grid(True, which='both')
            ax1.legend(prop={'size': 9})
            ax1.set_ylabel('Altitude [km]')
            ax1.set_xlabel('VMR [ppv]')
            ax1.tick_params(axis='x', which='both', labelsize=8)
            ax1.set_ylim((Z[-1], 60))
            ax1.set_title(oneDay)

            pdfsav.savefig(fig1, dpi=250)

            pdfsav.close()

        print 'Finished processing folder: {}'.format(sngDir)