Ejemplo n.º 1
0
def dload_grib(date_list, hour, grib_source='ECMWF', weather_dir='./'):
    '''Download weather re-analysis grib files using PyAPS
    Inputs:
        date_list   : list of string in YYYYMMDD format
        hour        : string in HH:MM or HH format
        grib_source : string, 
        weather_dir : string,
    Output:
        grib_file_list : list of string
    '''

    ## Grib data directory
    weather_dir = os.path.abspath(weather_dir)
    grib_dir = weather_dir+'/'+grib_source
    if not os.path.isdir(grib_dir):
        print('making directory: '+grib_dir)
        os.makedirs(grib_dir)

    ## Date list to grib file list
    grib_file_list = []
    for d in date_list:
        if   grib_source == 'ECMWF':  grib_file = grib_dir+'/ERA-Int_'+d+'_'+hour+'.grb'
        elif grib_source == 'ERA'  :  grib_file = grib_dir+'/ERA_'+d+'_'+hour+'.grb'
        elif grib_source == 'MERRA':  grib_file = grib_dir+'/merra-'+d+'-'+hour+'.hdf'
        elif grib_source == 'NARR' :  grib_file = grib_dir+'/narr-a_221_'+d+'_'+hour+'00_000.grb'
        grib_file_list.append(grib_file)

    ## Get date list to download (skip already downloaded files)
    grib_file_existed = ut.get_file_list(grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode([os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [i for i in grib_file_existed if os.path.getsize(i) != grib_filesize_mode]
        print('number of grib files existed    : %d' % len(grib_file_existed))
        print('file size mode: %d' % grib_filesize_mode)
        if grib_file_corrupted:
            print('------------------------------------------------------------------------------')
            print('corrupted grib files detected! Delete them and re-download...')
            print('number of grib files corrupted  : %d' % len(grib_file_corrupted))
            for i in grib_file_corrupted:
                rmCmd = 'rm '+i
                print(rmCmd)
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print('------------------------------------------------------------------------------')
    grib_file2download = sorted(list(set(grib_file_list) - set(grib_file_existed)))
    date_list2download = [str(re.findall('\d{8}', i)[0]) for i in grib_file2download]
    print('number of grib files to download: %d' % len(date_list2download))
    print('------------------------------------------------------------------------------\n')

    ## Download grib file using PyAPS
    if   grib_source == 'ECMWF':  pa.ECMWFdload(date_list2download, hour, grib_dir)
    elif grib_source == 'ERA'  :  pa.ERAdload(  date_list2download, hour, grib_dir)
    elif grib_source == 'MERRA':  pa.MERRAdload(date_list2download, hour, grib_dir)
    elif grib_source == 'NARR' :  pa.NARRdload( date_list2download, hour, grib_dir)

    return grib_file_existed
Ejemplo n.º 2
0
def dload_grib_files(grib_file_list, trop_model='ERA5', snwe=None):
    """Download weather re-analysis grib files using PyAPS
    Parameters: grib_file_list : list of string of grib files
    Returns:    grib_file_list : list of string
    """
    print(
        '\n------------------------------------------------------------------------------'
    )
    print('downloading weather model data using PyAPS ...')

    # Get date list to download (skip already downloaded files)
    grib_file_exist = check_exist_grib_file(grib_file_list, print_msg=True)
    grib_file2dload = sorted(list(set(grib_file_list) - set(grib_file_exist)))
    date_list2dload = [str(re.findall('\d{8}', i)[0]) for i in grib_file2dload]
    print('number of grib files to download: %d' % len(date_list2dload))
    print(
        '------------------------------------------------------------------------------\n'
    )

    # Download grib file using PyAPS
    if len(date_list2dload) > 0:
        hour = re.findall('\d{8}[-_]\d{2}',
                          grib_file2dload[0])[0].replace('-',
                                                         '_').split('_')[1]
        grib_dir = os.path.dirname(grib_file2dload[0])

        # try 3 times to download, then use whatever downloaded to calculate delay
        i = 0
        while i < 3:
            i += 1
            try:
                if trop_model in ['ERA5', 'ERAINT']:
                    pa.ECMWFdload(date_list2dload,
                                  hour,
                                  grib_dir,
                                  model=trop_model,
                                  snwe=snwe,
                                  flist=grib_file2dload)

                elif trop_model == 'MERRA':
                    pa.MERRAdload(date_list2dload, hour, grib_dir)

                elif trop_model == 'NARR':
                    pa.NARRdload(date_list2dload, hour, grib_dir)
            except:
                pass

    grib_file_list = check_exist_grib_file(grib_file_list, print_msg=False)
    return grib_file_list
Ejemplo n.º 3
0
def main(argv):

    DelayType = 'comb'

    try:
        opts, args = getopt.getopt(argv, "f:d:s:h:D:i:")
    except getopt.GetoptError:
        Usage()
        sys.exit(1)

    for opt, arg in opts:
        if opt == '-f': timeSeriesFile = arg
        elif opt == '-d': demFile = arg
        elif opt == '-s': atmSource = arg
        elif opt == '-h': hr = arg
        elif opt == '-D': DelayType = arg
        elif opt == '-i': inc_angle = arg

    try:
        timeSeriesFile
        demFile
    except:
        Usage()
        sys.exit(1)

    demFile = ut.check_variable_name(demFile)
    demCoord = ut.radar_or_geo(demFile)

    h5timeseries = h5py.File(timeSeriesFile)
    yref = h5timeseries['timeseries'].attrs['ref_y']
    xref = h5timeseries['timeseries'].attrs['ref_x']

    ###############################################################
    #incidence angle to map the zenith delay to the slant delay

    try:
        inc_angle
    except:
        print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
        print 'WARNING:'
        print 'incedence angle is not specified >>>> Average look angle is used ... '
        print 'For more precise results use input option -i to introduce the incidence angle file or average incodence angle'
        print '++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
        input_inc_angle = 'None'

        wavelength = float(h5timeseries['timeseries'].attrs['WAVELENGTH'])
        inc_angle1 = float(h5timeseries['timeseries'].attrs['LOOK_REF1'])
        inc_angle2 = float(h5timeseries['timeseries'].attrs['LOOK_REF2'])
        inc_angle = (inc_angle1 + inc_angle2) / 2.0
        print '*******************************************************************************'
        print 'Near Look Angle: ' + str(inc_angle1)
        print 'Far  Look Angle:' + str(inc_angle2)
        print 'Average Look Angle (used in pyaps to calculate delay): ' + str(
            inc_angle)
        print 'Acquisition time is : ' + hr
        print '*******************************************************************************'
        inc_angle = str(inc_angle)

    if os.path.isfile(inc_angle):
        incidenceFile = inc_angle
        h5incidence = h5py.File(incidenceFile, 'r')
        iset = h5incidence['mask'].get('mask')
        inc_angle = iset[0:iset.shape[0], 0:iset.shape[1]]
        h5incidence.close()
    else:
        inc_angle = float(inc_angle)
        print 'incidence angle = ' + str(inc_angle)

    inc_angle = inc_angle * pi / 180.0
    ################################################################

    dateList = h5timeseries['timeseries'].keys()

    if atmSource in ['ecmwf', 'ECMWF']:
        gribSource = 'ECMWF'
        if not os.path.isdir('ECMWF'):
            print 'making directory: ECMWF'
            os.mkdir('ECMWF')

        ecmwf_file = []
        for d in dateList:
            ecm = './ECMWF/ERA-Int_' + d + '_' + hr + '.grb'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.ECMWFdload([d], hr, './ECMWF/')
            else:
                print ecm + ' already exists.'

    elif atmSource in ['narr', 'NARR']:
        gribSource = 'NARR'
        if not os.path.isdir('NARR'):
            print 'making directory: NARR'
            os.mkdir('NARR')

        ecmwf_file = []
        for d in dateList:
            ecm = './NARR/narr-a_221_' + d + '_' + hr + '00_000.grb'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.NARRdload([d], hr, './NARR/')
            else:
                print ecm + ' already exists.'

    elif atmSource in ['era', 'ERA']:
        gribSource = 'ERA'
        if not os.path.isdir('ERA'):
            print 'making directory: ERA'
            os.mkdir('ERA')

        ecmwf_file = []
        for d in dateList:
            ecm = './ERA/ERA_' + d + '_' + hr + '.grb'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.ERAdload([d], hr, './ERA/')
            else:
                print ecm + ' already exists.'

    elif atmSource in ['merra', 'MERRA']:
        gribSource = 'MERRA'
        if not os.path.isdir('MERRA'):
            print 'making directory: MERRA'
            os.mkdir('MERRA')

        ecmwf_file = []
        for d in dateList:
            ecm = './MERRA/merra-' + d + '-' + hr + '.hdf'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.MERRAdload([d], hr, './MERRA/')
            else:
                print ecm + ' already exists.'

    else:
        Usage()
        sys.exit(1)

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'
    h5phsName = atmSource + '.h5'
    h5phs = h5py.File(h5phsName, 'w')
    outName = timeSeriesFile.replace(
        '.h5', '_') + atmSource + '.h5'  #Yunjun, Feb 15, 2015
    #outName=timeSeriesFile.replace('.h5','')+'_tropCorPyAPS.h5'
    h5apsCor = h5py.File(outName, 'w')
    group = h5apsCor.create_group('timeseries')
    group_phs = h5phs.create_group('timeseries')

    #if 'X_FIRST' in  h5timeseries['timeseries'].attrs.keys():
    #   demCoord='geo'
    #   print 'The coordinate system is : Geo'
    #else:
    #   demCoord='radar'
    #   print 'The coordinate system is : radar'

    print ecmwf_file[0]
    if demCoord == 'radar':
        aps1 = pa.PyAPS_rdr(str(ecmwf_file[0]),
                            demFile,
                            grib=gribSource,
                            verb=True,
                            Del=DelayType)
    else:
        aps1 = pa.PyAPS_geo(str(ecmwf_file[0]),
                            demFile,
                            grib=gribSource,
                            verb=True,
                            Del=DelayType)

    phs1 = zeros((aps1.ny, aps1.nx))
    aps1.getdelay(phs1)
    phs1 = (phs1 - phs1[yref, xref]) / cos(inc_angle)
    dset = group.create_dataset(dateList[0],
                                data=phs1 - phs1,
                                compression='gzip')

    for i in range(1, len(ecmwf_file)):
        ecm = ecmwf_file[i]
        print ecm
        if demCoord == 'radar':
            aps = pa.PyAPS_rdr(str(ecm),
                               demFile,
                               grib=gribSource,
                               verb=True,
                               Del=DelayType)
        else:
            aps = pa.PyAPS_geo(str(ecm),
                               demFile,
                               grib=gribSource,
                               verb=True,
                               Del=DelayType)
        phs = zeros((aps.ny, aps.nx))
        aps.getdelay(phs)
        phs = (phs - phs[yref, xref]) / cos(inc_angle)
        phs = phs - phs1
        dset = group_phs.create_dataset(dateList[i],
                                        data=phs,
                                        compression='gzip')
        dset1 = h5timeseries['timeseries'].get(dateList[i])
        data1 = dset1[0:dset1.shape[0], 0:dset1.shape[1]]
        dset = group.create_dataset(dateList[i],
                                    data=data1 + phs,
                                    compression='gzip')

    for key, value in h5timeseries['timeseries'].attrs.iteritems():
        group.attrs[key] = value
        group_phs.attrs[key] = value

    dset1 = h5timeseries['mask'].get('mask')
    Mask = dset1[0:dset1.shape[0], 0:dset1.shape[1]]
    group = h5apsCor.create_group('mask')
    dset = group.create_dataset('mask', data=Mask, compression='gzip')
Ejemplo n.º 4
0
def download_atmosphereModel(inps):  #EMRE
    S, N, W, E = [val for val in inps.bbox.split()]  #EMRE
    start_year, start_month, start_day = [
        int(val) for val in inps.start_date.split('-')
    ]  #EMRE
    end_year, end_month, end_day = [
        int(val) for val in inps.end_date.split('-')
    ]  #EMRE
    start_date = date(start_year, start_month, start_day)  #EMRE
    end_date = date(end_year, end_month, end_day)  #EMRE
    d1 = start_date
    d2 = end_date

    dateList = []
    #    d1 = datetime.date(2002,1,1)
    #    d2 = datetime.date(2002,2,1)
    diff = d2 - d1
    day_step = 1  #EMRE
    hr = '18'
    for i in range(0, diff.days + 1, day_step):
        dd = (d1 + datetime.timedelta(i)).isoformat()
        dateList.append(dd.replace('-', ''))

#    if atmSource in ['ecmwf','ECMWF']:
    if inps.data_source in ['ecmwf', 'ECMWF']:  #EMRE
        gribSource = 'ECMWF'
        if not os.path.isdir('ECMWF'):
            print 'making directory: ECMWF'
            os.mkdir('ECMWF')

        ecmwf_file = []
        for d in dateList:
            ecm = './ECMWF/ERA-Int_' + d + '_' + hr + '.grb'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.ECMWFdload([d], hr, './ECMWF/')
            else:
                print ecm + ' already exists.'

#    elif atmSource in ['narr','NARR']:
    elif inps in ['narr', 'NARR']:  #EMRE
        gribSource = 'NARR'
        if not os.path.isdir('NARR'):
            print 'making directory: NARR'
            os.mkdir('NARR')

        ecmwf_file = []
        for d in dateList:
            ecm = './NARR/narr-a_221_' + d + '_' + hr + '00_000.grb'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.NARRdload([d], hr, './NARR/')
            else:
                print ecm + ' already exists.'

#    elif atmSource in ['era','ERA']:
    elif inps in ['era', 'ERA']:
        gribSource = 'ERA'
        if not os.path.isdir('ERA'):
            print 'making directory: ERA'
            os.mkdir('ERA')

        ecmwf_file = []
        for d in dateList:
            ecm = './ERA/ERA_' + d + '_' + hr + '.grb'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.ERAdload([d], hr, './ERA/')
            else:
                print ecm + ' already exists.'


#    elif atmSource in ['merra','MERRA']:
    elif inps in ['merra', 'MERRA']:
        gribSource = 'MERRA'
        if not os.path.isdir('MERRA'):
            print 'making directory: MERRA'
            os.mkdir('MERRA')

        ecmwf_file = []
        for d in dateList:
            ecm = './MERRA/merra-' + d + '-' + hr + '.hdf'
            ecmwf_file.append(ecm)
            print[d]
            if not os.path.isfile(ecm):
                pa.MERRAdload([d], hr, './MERRA/')
            else:
                print ecm + ' already exists.'

    else:
        #       Usage();
        print 'FAILED'  #EMRE
        sys.exit(1)

    print '*******************************************************************************'
    print 'Calculating delay for each epoch.'
    h5phs = h5py.File('aps.h5', 'w')
    outName = 'ECMWF.h5'
    #h5apsCor=h5py.File(outName,'w')
    # group=h5apsCor.create_group('timeseries')
    group_phs = h5phs.create_group('timeseries')

    demCoord = 'geo'
    print ecmwf_file[0]
    #demCoord='radar'
    if demCoord == 'radar':
        aps1 = pa.PyAPS_rdr(str(ecmwf_file[0]),
                            demFile,
                            grib=gribSource,
                            verb=True,
                            Del=DelayType)
    else:
        aps1 = pa.PyAPS_geo(str(ecmwf_file[0]),
                            demFile,
                            grib=gribSource,
                            verb=True,
                            Del=DelayType)

    phs1 = np.zeros((aps1.ny, aps1.nx))
    print aps1.ny
    print aps1.nx
    # aps1.getdelay(phs1,inc=inc_angle,wvl=wavelength)
    aps1.getdelay(phs1)
    dset = group_phs.create_dataset(dateList[0], data=phs1, compression='gzip')
    #    phs1=(phs1 - phs1[yref,xref])*wavelength/(4*np.pi)

    # dset = group.create_dataset(dateList[0], data= phs1- phs1, compression='gzip')

    for i in range(1, len(ecmwf_file)):
        ecm = ecmwf_file[i]
        print ecm
        if demCoord == 'radar':
            aps = pa.PyAPS_rdr(str(ecm),
                               demFile,
                               grib=gribSource,
                               verb=True,
                               Del=DelayType)
        else:
            aps = pa.PyAPS_geo(str(ecm),
                               demFile,
                               grib=gribSource,
                               verb=True,
                               Del=DelayType)
        phs = np.zeros((aps.ny, aps.nx))
        #  aps.getdelay(phs,inc=inc_angle,wvl=wavelength)
        aps.getdelay(phs)
        #  phs=(phs - phs[yref,xref])*wavelength/(4*np.pi)
        #  phs=phs-phs1
        dset = group_phs.create_dataset(dateList[i],
                                        data=phs,
                                        compression='gzip')
    #   dset1 = h5timeseries['timeseries'].get(dateList[i])
    #   data1 = dset1[0:dset1.shape[0],0:dset1.shape[1]]
    #   dset = group.create_dataset(dateList[i], data= data1+phs, compression='gzip')

    for key, value in h5timeseries['timeseries'].attrs.iteritems():
        # group.attrs[key] = value
        group_phs.attrs[key] = value
Ejemplo n.º 5
0
import pyaps as pa
import numpy as np
import matplotlib.pyplot as plt
import sys

print '------------------------------------------------'
print 'You are using PyAPS from %s' % pa.__file__
print '------------------------------------------------'

print 'Testing Download Methods'
pa.ECMWFdload(['20040526', '20030426'], '12', './ECMWF/')

print 'Testing MERRA Downloads'
pa.MERRAdload(['20040526', '20030426'], '12', './MERRA/')

print 'Downloads OK'
print '------------------------------------------------'
print '------------------------------------------------'

print 'Testing ECMWF in Radar geometry, with a RMG dem'
aps1 = pa.PyAPS_rdr('ECMWF/ERA-Int_20030426_12.grb',
                    'dem_16rlks.hgt',
                    grib='ECMWF',
                    verb=True)
aps2 = pa.PyAPS_rdr('ECMWF/ERA-Int_20040526_12.grb',
                    'dem_16rlks.hgt',
                    grib='ECMWF',
                    verb=True)

print 'With Lat Lon files'
phs1 = np.zeros((aps1.ny, aps1.nx))
Ejemplo n.º 6
0
def main(argv):

    inps = cmdLineParse()
    inps.dem_file = ut.get_file_list([inps.dem_file])[0]
    inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
    atr = readfile.read_attribute(inps.timeseries_file)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Get Acquisition time
    inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                             inps.grib_source)
    print 'Time of cloest vailable product: ' + inps.hour

    if not os.path.isdir(inps.grib_source):
        print 'making directory: ' + inps.grib_source
        os.mkdir(inps.grib_source)

    ## Loop to download
    inps.grib_file_list = []
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    dateList = sorted(h5timeseries['timeseries'].keys())
    for d in dateList:
        print[d]
        if inps.grib_source == 'ECMWF':
            grib_file = './ECMWF/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = './ERA/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = './MERRA/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = './NARR/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

        if os.path.isfile(grib_file):
            print grib_file + ' already exists.'
        else:
            if inps.grib_source == 'ECMWF':
                pa.ECMWFdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'ERA':
                pa.ERAdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'MERRA':
                pa.MERRAdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'NARR':
                pa.NARRdload([d], inps.hour, './' + inps.grib_source + '/')

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data + phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()
    print 'Done.'

    return
Ejemplo n.º 7
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in atr_dem.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(
                inps.dem_file)[0] + '4pyaps' + atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Grib data directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(
                inps.dem_file)) + '/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: ' + inps.weather_dir
    grib_dir = inps.weather_dir + '/' + inps.grib_source
    if not os.path.isdir(grib_dir):
        print 'making directory: ' + grib_dir
        os.makedirs(grib_dir)

    ## Get Acquisition time
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                                 inps.grib_source)
    print 'Time of cloest available product: ' + inps.hour

    ## Get grib file list and date list
    inps.grib_file_list = []
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print 'read date list info from: ' + inps.timeseries_file
    else:
        dateList = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print 'read date list info from: ' + inps.date_list_file

    for d in dateList:
        if inps.grib_source == 'ECMWF':
            grib_file = grib_dir + '/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = grib_dir + '/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = grib_dir + '/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = grib_dir + '/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

    ## Get date list to download
    grib_file_existed = ut.get_file_list(inps.grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode(
            [os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [
            i for i in grib_file_existed
            if os.path.getsize(i) != grib_filesize_mode
        ]
        print 'number of grib files existed    : %d' % len(grib_file_existed)
        print 'file size mode: %d' % grib_filesize_mode
        if grib_file_corrupted:
            print '------------------------------------------------------------------------------'
            print 'corrupted grib files detected! Delete them and re-download...'
            print 'number of grib files corrupted  : %d' % len(
                grib_file_corrupted)
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print rmCmd
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print '------------------------------------------------------------------------------'
    grib_file2download = sorted(
        list(set(inps.grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print 'number of grib files to download: %d' % len(date_list2download)
    print '------------------------------------------------------------------------------\n'

    ## Download grib file using PyAPS
    if inps.grib_source == 'ECMWF':
        pa.ECMWFdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'ERA':
        pa.ERAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'MERRA':
        pa.MERRAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'NARR':
        pa.NARRdload(date_list2download, inps.hour, grib_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data - phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm ' + inps.dem_file + ' ' + inps.dem_file + '.rsc '
        print rmCmd
        os.system(rmCmd)

    print 'Done.'

    return
Ejemplo n.º 8
0
def dload_grib_pyaps(date_list, hour, trop_model='ECMWF', weather_dir='./'):
    """Download weather re-analysis grib files using PyAPS
    Inputs:
        date_list   : list of string in YYYYMMDD format
        hour        : string in HH:MM or HH format
        trop_model : string, 
        weather_dir : string,
    Output:
        grib_file_list : list of string
    """
    print(
        '*' * 50 +
        '\nDownloading weather model data using PyAPS (Jolivet et al., 2011, GRL) ...'
    )
    # Grib data directory
    grib_dir = weather_dir + '/' + trop_model
    if not os.path.isdir(grib_dir):
        os.makedirs(grib_dir)
        print('making directory: ' + grib_dir)

    # Date list to grib file list
    grib_file_list = date_list2grib_file(date_list, hour, trop_model, grib_dir)

    # Get date list to download (skip already downloaded files)
    grib_file_existed = ut.get_file_list(grib_file_list)
    if grib_file_existed:
        grib_filesize_digit = ut.most_common(
            [len(str(os.path.getsize(i))) for i in grib_file_existed])
        grib_filesize_max2 = ut.most_common(
            [str(os.path.getsize(i))[0:2] for i in grib_file_existed])
        grib_file_corrupted = [
            i for i in grib_file_existed
            if (len(str(os.path.getsize(i))) != grib_filesize_digit
                or str(os.path.getsize(i))[0:2] != grib_filesize_max2)
        ]
        print('file size mode: %se%d bytes' %
              (grib_filesize_max2, grib_filesize_digit - 2))
        print('number of grib files existed    : %d' % len(grib_file_existed))
        if grib_file_corrupted:
            print(
                '------------------------------------------------------------------------------'
            )
            print(
                'corrupted grib files detected! Delete them and re-download...'
            )
            print('number of grib files corrupted  : %d' %
                  len(grib_file_corrupted))
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print(rmCmd)
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print(
                '------------------------------------------------------------------------------'
            )
    grib_file2download = sorted(
        list(set(grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print('number of grib files to download: %d' % len(date_list2download))
    print(
        '------------------------------------------------------------------------------\n'
    )

    # Download grib file using PyAPS
    if trop_model == 'ECMWF': pa.ECMWFdload(date_list2download, hour, grib_dir)
    elif trop_model == 'MERRA':
        pa.MERRAdload(date_list2download, hour, grib_dir)
    elif trop_model == 'NARR':
        pa.NARRdload(date_list2download, hour, grib_dir)
    elif trop_model == 'ERA':
        pa.ERAdload(date_list2download, hour, grib_dir)
    elif trop_model == 'MERRA1':
        pa.MERRA1dload(date_list2download, hour, grib_dir)
    return grib_file_list
Ejemplo n.º 9
0
def dload_grib(date_list, hour, grib_source='ECMWF', weather_dir='./'):
    '''Download weather re-analysis grib files using PyAPS
    Inputs:
        date_list   : list of string in YYYYMMDD format
        hour        : string in HH:MM or HH format
        grib_source : string, 
        weather_dir : string,
    Output:
        grib_file_list : list of string
    '''
    ## Grib data directory
    weather_dir = os.path.abspath(weather_dir)
    grib_dir = weather_dir + '/' + grib_source
    if not os.path.isdir(grib_dir):
        print 'making directory: ' + grib_dir
        os.makedirs(grib_dir)

    ## Date list to grib file list
    grib_file_list = date_list2grib_file(date_list, hour, grib_source,
                                         grib_dir)

    ## Get date list to download (skip already downloaded files)
    grib_file_existed = ut.get_file_list(grib_file_list)
    if grib_file_existed:
        grib_filesize_digit = ut.mode(
            [len(str(os.path.getsize(i))) for i in grib_file_existed])
        grib_filesize_max2 = ut.mode(
            [str(os.path.getsize(i))[0:2] for i in grib_file_existed])
        grib_file_corrupted = [i for i in grib_file_existed if (len(str(os.path.getsize(i))) != grib_filesize_digit or\
                                                                str(os.path.getsize(i))[0:2] != grib_filesize_max2)]
        print 'file size mode: %se%d bytes' % (grib_filesize_max2,
                                               grib_filesize_digit - 2)
        print 'number of grib files existed    : %d' % len(grib_file_existed)
        if grib_file_corrupted:
            print '------------------------------------------------------------------------------'
            print 'corrupted grib files detected! Delete them and re-download...'
            print 'number of grib files corrupted  : %d' % len(
                grib_file_corrupted)
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print rmCmd
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print '------------------------------------------------------------------------------'
    grib_file2download = sorted(
        list(set(grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print 'number of grib files to download: %d' % len(date_list2download)
    print '------------------------------------------------------------------------------\n'

    ## Download grib file using PyAPS
    if grib_source == 'ECMWF':
        pa.ECMWFdload(date_list2download, hour, grib_dir)
    elif grib_source == 'ERA':
        pa.ERAdload(date_list2download, hour, grib_dir)
    elif grib_source == 'NARR':
        pa.NARRdload(date_list2download, hour, grib_dir)
    elif grib_source == 'MERRA':
        pa.MERRAdload(date_list2download, hour, grib_dir)
    elif grib_source == 'MERRA1':
        pa.MERRA1dload(date_list2download, hour, grib_dir)

    return grib_file_existed