Пример #1
0
def main(argv):
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    lat = float(argv[0])
    lon = float(argv[1])

    try:
        trans_file = argv[2]
    except:
        trans_file = ut.get_file_list('geomap*.trans')[0]

    try:
        radar_file = argv[3]
    except:
        radar_file = 'unwrapIfgram.h5'

    atr_rdr = readfile.read_attribute(radar_file)

    print('input geo coord: lat=%.4f, lon=%.4f' % (lat, lon))

    y, x = ut.glob2radar(np.array(lat), np.array(lon), trans_file,
                         atr_rdr)[0:2]
    print('corresponding radar coord: y=%d, x=%d' % (y, x))

    return
Пример #2
0
def main(argv):

    inps = cmdLineParse()
    #print '\n****************** mask *********************'
    inps.file = ut.get_file_list(inps.file)
    print('number of file to mask: ' + str(len(inps.file)))
    print(inps.file)

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    # masking
    if len(inps.file) == 1:
        mask_file(inps.file[0], inps.mask_file, inps.outfile, vars(inps))

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(
            delayed(mask_file)(File, inps.mask_file, inps_dict=vars(inps))
            for File in inps.file)
    else:
        for File in inps.file:
            print('-------------------------------------------')
            mask_file(File, inps.mask_file, inps_dict=vars(inps))

    print('Done.')
    return
Пример #3
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file, abspath=True)

    # Check input file type
    ext = os.path.splitext(inps.file[0])[1]
    if ext not in ['.unw', '.cor', '.int']:
        print('No need to extract attributes for ROI_PAC ' + ext + ' file')
        return

    print('number of files: ' + str(len(inps.file)))

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    if len(inps.file) == 1:
        extract_attribute(inps.file[0])
    elif inps.parallel:
        Parallel(n_jobs=num_cores)(delayed(extract_attribute)(file)
                                   for file in inps.file)
    else:
        for File in inps.file:
            extract_attribute(File)

    return
Пример #4
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print('number of files to geocode: ' + str(len(inps.file)))
    print(inps.file)
    print('interpolation method: ' + inps.method)
    print('fill_value: ' + str(inps.fill_value))

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    #####
    if len(inps.file) == 1:
        geocode_file_with_geo_lut(inps.file[0], inps.lookup_file, inps.method,
                                  inps.fill_value, inps.outfile)
    elif inps.parallel:
        Parallel(n_jobs=num_cores)(delayed(geocode_file_with_geo_lut)\
                                   (fname, inps.lookup_file, inps.method, inps.fill_value) for fname in inps.file)
    else:
        for fname in inps.file:
            geocode_file_with_geo_lut(fname, inps.lookup_file, inps.method,
                                      inps.fill_value)

    print('Done.')
    return
Пример #5
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    # multilooking
    if len(inps.file) == 1:
        multilook_file(inps.file[0], inps.lks_y, inps.lks_x, inps.outfile)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(
            delayed(multilook_file)(file, inps.lks_y, inps.lks_x)
            for file in inps.file)
    else:
        for File in inps.file:
            print('-------------------------------------------')
            multilook_file(File, inps.lks_y, inps.lks_x)

    print('Done.')
    return
Пример #6
0
def dload_grib(date_list, hour, grib_source='ECMWF', weather_dir='./'):
    '''Download weather re-analysis grib files using PyAPS
    Inputs:
        date_list   : list of string in YYYYMMDD format
        hour        : string in HH:MM or HH format
        grib_source : string, 
        weather_dir : string,
    Output:
        grib_file_list : list of string
    '''

    ## Grib data directory
    weather_dir = os.path.abspath(weather_dir)
    grib_dir = weather_dir+'/'+grib_source
    if not os.path.isdir(grib_dir):
        print('making directory: '+grib_dir)
        os.makedirs(grib_dir)

    ## Date list to grib file list
    grib_file_list = []
    for d in date_list:
        if   grib_source == 'ECMWF':  grib_file = grib_dir+'/ERA-Int_'+d+'_'+hour+'.grb'
        elif grib_source == 'ERA'  :  grib_file = grib_dir+'/ERA_'+d+'_'+hour+'.grb'
        elif grib_source == 'MERRA':  grib_file = grib_dir+'/merra-'+d+'-'+hour+'.hdf'
        elif grib_source == 'NARR' :  grib_file = grib_dir+'/narr-a_221_'+d+'_'+hour+'00_000.grb'
        grib_file_list.append(grib_file)

    ## Get date list to download (skip already downloaded files)
    grib_file_existed = ut.get_file_list(grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode([os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [i for i in grib_file_existed if os.path.getsize(i) != grib_filesize_mode]
        print('number of grib files existed    : %d' % len(grib_file_existed))
        print('file size mode: %d' % grib_filesize_mode)
        if grib_file_corrupted:
            print('------------------------------------------------------------------------------')
            print('corrupted grib files detected! Delete them and re-download...')
            print('number of grib files corrupted  : %d' % len(grib_file_corrupted))
            for i in grib_file_corrupted:
                rmCmd = 'rm '+i
                print(rmCmd)
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print('------------------------------------------------------------------------------')
    grib_file2download = sorted(list(set(grib_file_list) - set(grib_file_existed)))
    date_list2download = [str(re.findall('\d{8}', i)[0]) for i in grib_file2download]
    print('number of grib files to download: %d' % len(date_list2download))
    print('------------------------------------------------------------------------------\n')

    ## Download grib file using PyAPS
    if   grib_source == 'ECMWF':  pa.ECMWFdload(date_list2download, hour, grib_dir)
    elif grib_source == 'ERA'  :  pa.ERAdload(  date_list2download, hour, grib_dir)
    elif grib_source == 'MERRA':  pa.MERRAdload(date_list2download, hour, grib_dir)
    elif grib_source == 'NARR' :  pa.NARRdload( date_list2download, hour, grib_dir)

    return grib_file_existed
Пример #7
0
def set_mask():
    global mask, inps, atr

    if not inps.mask_file:
        if 'X_FIRST' in atr.keys():
            file_list = ['geo_maskTempCoh.h5']
        else:
            file_list = ['maskTempCoh.h5', 'mask.h5']

        try:
            inps.mask_file = ut.get_file_list(file_list)[0]
        except:
            inps.mask_file = None

    try:
        mask = readfile.read(inps.mask_file)[0]
        mask[mask != 0] = 1
        print('load mask from file: ' + inps.mask_file)
    except:
        mask = None
        print('No mask used.')
Пример #8
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file, abspath=True)
    print('number of files: ' + str(len(inps.file)))

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    ##### multiple datasets files
    ext = os.path.splitext(inps.file[0])[1]
    if ext in ['.unw', '.cor', '.int']:
        if len(inps.file) == 1:
            extract_attribute_interferogram(inps.file[0])
        elif inps.parallel:
            Parallel(n_jobs=num_cores)(
                delayed(extract_attribute_interferogram)(file)
                for file in inps.file)
        else:
            for File in inps.file:
                extract_attribute_interferogram(File)

    ##### Single dataset files
    elif ext in ['.dem']:
        for File in inps.file:
            atr_file = extract_attribute_dem_geo(File)
    elif ext in ['.hgt_sim']:
        for File in inps.file:
            atr_file = extract_attribute_dem_radar(File)
    elif ext in ['.UTM_TO_RDC']:
        for File in inps.file:
            atr_file = extract_attribute_lookup_table(File)
    else:
        print('No need to extract attributes for Gamma ' + ext + ' file')

    print('Done.')
    return
Пример #9
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(inps.template_file)
    key_list = list(template.keys())

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceBased'
    if key in key_list and template[key] in ['auto', 'yes']:
        inps.coherence_based = True

    key = prefix + 'keepMinSpanTree'
    if key in key_list and template[key] in ['no']:
        inps.keep_mst = False

    key = prefix + 'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    # find coherence file from input files if inps.coherence_file does not exists.
    if inps.coherence_based and not os.path.isfile(inps.coherence_file):
        k_list = [readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file]
        try:
            coh_file_idx = k_list.index('coherence')
        except ValueError:
            print(
                'No coherence file found! Can not use coherence-based method without it.'
            )
        inps.coherence_file = inps.file[coh_file_idx]

    key = prefix + 'minCoherence'
    if key in key_list:
        if template[key] == 'auto':
            inps.min_coherence = 0.7
        else:
            inps.min_coherence = float(template[key])

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.mask_file = 'mask.h5'
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'maskAoi.yx'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_pix_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
            sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])

    key = prefix + 'maskAoi.lalo'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_geo_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
            sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
            # Check trans file
            try:
                inps.trans_file = ut.get_file_list(inps.trans_file)[0]
            except:
                inps.trans_file = None
                print(
                    'Warning: no mapping transformation file found! Can not use '
                    + key + ' option without it.')
                print('skip this option.')
                inps.aoi_pix_box = None

    ## Network Modification based on thresholds
    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_temp_baseline = float(value)

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_perp_baseline = float(value)

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_date = [i for i in value.replace(',', ' ').split()]

    key = prefix + 'excludeIfgIndex'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_ifg_index = [
                i for i in value.replace(',', ' ').split()
            ]

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.start_date = ptime.yymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.end_date = ptime.yymmdd(value)

    return inps
Пример #10
0
def extract_attribute_interferogram(fname):
    '''Read/extract attributes for PySAR from Gamma .unw, .cor and .int file
    Inputs:
        fname : str, Gamma interferogram filename or path, i.e. /PopoSLT143TsxD/diff_filt_HDR_130118-130129_4rlks.unw
    Output:
        atr : dict, Attributes dictionary
    '''
    file_dir = os.path.dirname(fname)
    file_basename = os.path.basename(fname)

    atr_file = fname + '.rsc'
    #if os.path.isfile(atr_file):
    #    return atr_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['INSAR_PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]

    ## Get info: date12, num of loooks
    try:
        date12 = str(re.findall('\d{8}[-_]\d{8}', file_basename)[0])
    except:
        date12 = str(re.findall('\d{6}[-_]\d{6}', file_basename)[0])
    m_date, s_date = date12.replace('_', '-').split('-')
    atr['DATE12'] = ptime.yymmdd(m_date) + '-' + ptime.yymmdd(s_date)
    lks = os.path.splitext(file_basename.split(date12)[1])[0]

    ## Read .off and .par file
    off_file = file_dir + '/*' + date12 + lks + '.off'
    m_par_file = file_dir + '/*' + m_date + lks + '.amp.par'
    s_par_file = file_dir + '/*' + s_date + lks + '.amp.par'

    try:
        off_file = ut.get_file_list(off_file)[0]
    except:
        print('\nERROR: Can not find .off file, it supposed to be like: ' +
              off_file)
    try:
        m_par_file = ut.get_file_list(m_par_file)[0]
    except:
        print(
            '\nERROR: Can not find master date .par file, it supposed to be like: '
            + m_par_file)
    try:
        s_par_file = ut.get_file_list(s_par_file)[0]
    except:
        print(
            '\nERROR: Can not find slave date .par file, it supposed to be like: '
            + s_par_file)

    #print 'read '+m_par_file
    #print 'read '+off_file
    par_dict = readfile.read_gamma_par(m_par_file)
    off_dict = readfile.read_gamma_par(off_file)

    #print 'convert Gamma attribute to ROI_PAC style'
    atr.update(par_dict)
    atr.update(off_dict)
    atr = readfile.attribute_gamma2roipac(atr)

    ## Perp Baseline Info
    #print 'extract baseline info from %s, %s and %s file' % (m_par_file, s_par_file, off_file)
    atr = get_perp_baseline(m_par_file, s_par_file, off_file, atr)

    ## LAT/LON_REF1/2/3/4
    #print 'extract LAT/LON_REF1/2/3/4 from '+m_par_file
    atr = get_lalo_ref(m_par_file, atr)

    ## Write to .rsc file
    #print 'writing >>> '+atr_file
    print('merge %s, %s and %s into %s' % (os.path.basename(m_par_file), os.path.basename(s_par_file),\
                                           os.path.basename(off_file), os.path.basename(atr_file)))
    writefile.write_roipac_rsc(atr, atr_file)

    return atr_file
Пример #11
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print('number of input files: ' + str(len(inps.file)))
    print(inps.file)

    #print '\n**************** Subset *********************'
    atr = readfile.read_attribute(inps.file[0])

    ##### Convert All Inputs into subset_y/x/lat/lon
    # Input Priority: subset_y/x/lat/lon > reference > template > tight
    if not inps.subset_x and not inps.subset_y and not inps.subset_lat and not inps.subset_lon:
        # 1. Read subset info from Reference File
        if inps.reference:
            ref_atr = readfile.read_attribute(inps.reference)
            pix_box, geo_box = get_coverage_box(ref_atr)
            print('using subset info from ' + inps.reference)

        # 2. Read subset info from template options
        elif inps.template_file:
            pix_box, geo_box = read_subset_template2box(inps.template_file)
            print('using subset info from ' + inps.template_file)

        # 3. Use subset from tight info
        elif inps.tight:
            if atr['FILE_TYPE'] == '.trans':
                # Non-zero area in geomap_*.trans file, accurate
                trans_rg, trans_atr = readfile.read(inps.file[0], (), 'range')
                idx_row, idx_col = np.nonzero(trans_rg)
                pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                           np.max(idx_col) + 10, np.max(idx_row) + 10)
                geo_box = box_pixel2geo(pix_box, trans_atr)
            else:
                print(
                    'ERROR: --tight option only works for geomap_*.trans file.\n'
                )
                inps.tight = False
                sys.exit(1)

            ## from LAT/LON_REF*, which is not accurate
            #lats = [atr['LAT_REF1'], atr['LAT_REF3'], atr['LAT_REF4'], atr['LAT_REF2']]
            #lons = [atr['LON_REF1'], atr['LON_REF3'], atr['LON_REF4'], atr['LON_REF2']]
            #lats = [float(i) for i in lats]
            #lons = [float(i) for i in lons]
            #lalo_buff = min([max(lats)-min(lats), max(lons)-min(lons)]) * 0.05
            #geo_box = (min(lons)-lalo_buff, max(lats)+lalo_buff, max(lons)+lalo_buff, min(lats)-lalo_buff)
            #pix_box = None
            #if not inps.fill_value: inps.fill_value = np.nan
            #print 'using subset info from scene footprint - LAT/LON_REF1/2/3/4'
        else:
            raise Exception('No subset inputs found!')
        # Update subset_y/x/lat/lon
        inps = subset_box2inps(inps, pix_box, geo_box)

    ##### --bbox option
    if inps.trans_file:
        ## Seperate files in radar and geo coord
        rdrFileList = []
        geoFileList = []
        for File in inps.file:
            atr = readfile.read_attribute(File)
            if 'X_FIRST' in list(atr.keys()):
                geoFileList.append(File)
            else:
                rdrFileList.append(File)

        ## Calculate bbox
        rdrFile = rdrFileList[0]
        atr_rdr = readfile.read_attribute(rdrFile)
        if inps.subset_lat and inps.subset_lon:
            print('use subset input in lat/lon')
            print('calculate corresponding bounding box in radar coordinate.')
            geo_box = (inps.subset_lon[0], inps.subset_lat[1],
                       inps.subset_lon[1], inps.subset_lat[0])
            pix_box = bbox_geo2radar(geo_box, atr_rdr, inps.trans_file)
        else:
            print('use subset input in y/x')
            print('calculate corresponding bounding box in geo coordinate.')
            pix_box = (inps.subset_x[0], inps.subset_y[0], inps.subset_x[1],
                       inps.subset_y[1])
            geo_box = bbox_radar2geo(pix_box, atr_rdr, inps.trans_file)
        print('geo   box: ' + str(geo_box))
        print('pixel box: ' + str(pix_box))

        ## Subset files
        inps.fill_value = 0
        print('--------------------------------------------')
        print('subseting dataset in geo coord geo_box: ' + str(geo_box))
        inps = subset_box2inps(inps, None, geo_box)
        subset_file_list(geoFileList, inps)
        print('--------------------------------------------')
        print('subseting dataset in radar coord pix_box: ' + str(pix_box))
        inps = subset_box2inps(inps, pix_box, None)
        subset_file_list(rdrFileList, inps)

    else:
        ##### Subset files
        subset_file_list(inps.file, inps)

    print('Done.')
    return
Пример #12
0
def main(argv):
    inps = cmdLineParse()

    ##### Check default input arguments
    # default output filename
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + '_tropHgt.h5'

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pix_num = length * width

    # default DEM file
    if not inps.dem_file:
        if 'X_FIRST' in list(atr.keys()):
            inps.dem_file = ['demGeo_tight.h5', 'demGeo.h5']
        else:
            inps.dem_file = ['demRadar.h5']
    try:
        inps.dem_file = ut.get_file_list(inps.dem_file)[0]
    except:
        inps.dem_file = None
        sys.exit('ERROR: No DEM file found!')

    # default Mask file
    if not inps.mask_file:
        if 'X_FIRST' in list(atr.keys()):
            inps.mask_file = 'geo_maskTempCoh.h5'
        else:
            inps.mask_file = 'maskTempCoh.h5'
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None
            sys.exit('ERROR: No mask file found!')

    ##### Read Mask
    print('reading mask from file: ' + inps.mask_file)
    mask = readfile.read(inps.mask_file)[0].flatten(1)
    ndx = mask != 0
    msk_num = np.sum(ndx)
    print('total            pixel number: %d' % pix_num)
    print('estimating using pixel number: %d' % msk_num)

    ##### Read DEM
    print('read DEM from file: ' + inps.dem_file)
    dem = readfile.read(inps.dem_file)[0]

    ref_y = int(atr['ref_y'])
    ref_x = int(atr['ref_x'])
    dem -= dem[ref_y, ref_x]

    print('considering the incidence angle of each pixel ...')
    inc_angle = ut.incidence_angle(atr, dimension=2)
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ##### Design matrix for elevation v.s. phase
    dem = dem.flatten(1)
    if inps.poly_order == 1:
        A = np.vstack((dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem, np.ones(pix_num))).T
    elif inps.poly_order == 2:
        A = np.vstack((dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**2, dem, np.ones(pix_num))).T
    elif inps.poly_order == 3:
        A = np.vstack((dem[ndx]**3, dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(pix_num))).T
    print('polynomial order: %d' % inps.poly_order)

    A_inv = np.linalg.pinv(A)

    ##### Calculate correlation coefficient
    print(
        'Estimating the tropospheric effect between the differences of the subsequent epochs and DEM'
    )

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    print('number of acquisitions: ' + str(date_num))
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]

    print('----------------------------------------------------------')
    print('correlation of DEM with each time-series epoch:')
    corr_array = np.zeros(date_num)
    par_dict = {}
    for i in range(date_num):
        date = date_list[i]
        if date == ref_date:
            cc = 0.0
            par = np.zeros(inps.poly_order + 1)
        else:
            data = h5[k].get(date)[:].flatten(1)

            C = np.zeros((2, msk_num))
            C[0, :] = dem[ndx]
            C[1, :] = data[ndx]
            cc = np.corrcoef(C)[0, 1]

            corr_array[i] = cc
            if inps.threshold and np.abs(cc) < inps.threshold:
                par = np.zeros(inps.poly_order + 1)
            else:
                par = np.dot(A_inv, data[ndx])
        print('%s: %.2f' % (date, cc))
        par_dict[date] = par

    average_phase_height_corr = np.nansum(np.abs(corr_array)) / (date_num - 1)
    print('----------------------------------------------------------')
    print('Average Correlation of DEM with time-series epochs: %.2f' %
          average_phase_height_corr)

    # Correlation of DEM with Difference of subsequent epochs (Not used for now)
    corr_diff_dict = {}
    par_diff_dict = {}
    for i in range(date_num - 1):
        date1 = date_list[i]
        date2 = date_list[i + 1]
        date12 = date1 + '-' + date2

        data1 = h5[k].get(date1)[:].flatten(1)
        data2 = h5[k].get(date2)[:].flatten(1)
        data_diff = data2 - data1

        C_diff = np.zeros((2, msk_num))
        C_diff[0, :] = dem[ndx]
        C_diff[1, :] = data_diff[ndx]
        cc_diff = np.corrcoef(C_diff)[0, 1]

        corr_diff_dict[date12] = cc_diff
        par = np.dot(A_inv, data_diff[ndx])
        par_diff_dict[date12] = par

    ##### Correct and write time-series file
    print('----------------------------------------------------------')
    print('removing the stratified tropospheric delay from each epoch')
    print('writing >>> ' + inps.outfile)
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]

        if date != ref_date:
            par = par_dict[date]
            trop_delay = np.reshape(np.dot(B, par), [width, length]).T
            trop_delay -= trop_delay[ref_y, ref_x]
            data -= trop_delay

        dset = group.create_dataset(date, data=data, compression='gzip')
        prog_bar.update(i + 1, suffix=date)

    for key, value in atr.items():
        group.attrs[key] = value

    prog_bar.close()
    h5out.close()
    h5.close()

    print('Done.')
    return inps.outfile
Пример #13
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print('input file(s) to be modified: ' + str(inps.file))
    print('number of interferograms: ' + str(len(date12_orig)))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if inps.reset:
        print(
            '----------------------------------------------------------------------------'
        )
        for file in inps.file:
            reset_pairs(file)

        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print(rmCmd)
            os.system(rmCmd)

        return

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based, inps.start_date, inps.end_date]):
        # Display network for manually modification when there is no other modification input.
        print('No input option found to remove interferogram')
        print('To manually modify network, please use --manual option ')
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print('Unrecoganized input: ' + index)
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print(
            '----------------------------------------------------------------------------'
        )
        print('use reference pairs info from file: ' + inps.reference_file)
        print('number of interferograms in reference: ' +
              str(len(date12_to_keep)))
        print('date12 not in reference file:')
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print(date12)

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print(
            '----------------------------------------------------------------------------'
        )
        print(
            'use coherence-based network modification from coherence file: ' +
            inps.coherence_file)
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.trans_file:
            print('input AOI in (lon0, lat1, lon1, lat0): ' +
                  str(inps.aoi_geo_box))
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.trans_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print('input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box))

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print(
                'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            )
            print('date12 with 1) average coherence < ' +
                  str(inps.min_coherence) + ' AND 2) not in MST network: ')
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print('date12 with average coherence < ' + str(inps.min_coherence))
            mst_date12_list = []

        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                print(date12)

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with perpendicular spatial baseline > ' +
              str(inps.max_perp_baseline) + ' meters')
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with temporal baseline > ' +
              str(inps.max_temp_baseline) + ' days')
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print(
            '----------------------------------------------------------------------------'
        )
        print('drop date12/pair with the following index number:')
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print(str(index) + '    ' + date12)

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs including the following dates: \n' +
              str(inps.exclude_date))
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with date earlier than start-date: ' +
              inps.start_date)
        min_date = int(ptime.yyyymmdd(inps.start_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print(date12)

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with date earlier than end-date: ' + inps.end_date)
        max_date = int(ptime.yyyymmdd(inps.end_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print(date12)

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print('date12 selected to remove:')
        print(date12_click)
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print(
        '----------------------------------------------------------------------------'
    )
    print('number of interferograms to remove: ' + str(len(date12_to_rmv)))
    print('list   of interferograms to remove:')
    print(date12_to_rmv)

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5,
                                            atr,
                                            ifgram_list_all,
                                            print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print(
            'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        )
        return

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print('update mask file for input ' + k + ' file based on ' +
                      Modified_File)
                inps.mask_file = 'mask.h5'
                print('writing >>> ' + inps.mask_file)
                ut.nonzero_mask(Modified_File, inps.mask_file)
            elif k == 'coherence':
                print('update average spatial coherence for input ' + k +
                      ' file based on: ' + Modified_File)
                outFile = 'averageSpatialCoherence.h5'
                print('writing >>> ' + outFile)
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print('\nplot modified network and save to file.')
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            if inps.mask_file:
                plotCmd += ' --mask ' + inps.mask_file
            print(plotCmd)
            os.system(plotCmd)

        print('Done.')
        return
    else:
        print('No new interferograms to drop, skip update.')
        return
Пример #14
0
def write_kmz_file(data, atr, out_name_base, inps=None):
    ''' Generate Google Earth KMZ file for input data matrix.
    Inputs:
        data - 2D np.array in int/float, data matrix to write
        out_name_base - string, output file name base
        atr  - dict, containing the following attributes:
               WIDTH/FILE_LENGTH : required, file size
               X/Y_FIRST/STEP    : required, for lat/lon spatial converage
               ref_x/y           : optional, column/row number of reference pixel
               PROJECT_NAME      : optional, for KMZ folder name
        inps - Namespace, optional, input options for display
    Output:
        kmz_file - string, output KMZ filename
    Example:
        import pysar._readfile as readfile
        import pysar.view as pview
        import pysar.save_kml as save_kml
        fname = 'geo_velocity_masked.h5'
        data, atr = readfile.read(fname)
        out_name_base = pview.auto_figure_title(fname, None)
        save_kml.write_kmz_file(data, atr, out_name_base)
    '''
    if not inps:
        inps = cmdLineParse()

    if not inps.ylim:
        inps.ylim = [np.nanmin(data), np.nanmax(data)]

    west, east, south, north = ut.four_corners(atr)

    ## 2.1 Make PNG file - Data
    print('plotting data ...')

    # Figure size
    if not inps.fig_size:
        fig_scale = min(pysar.figsize_single_min/min(data.shape),\
                        pysar.figsize_single_max/max(data.shape))
        inps.fig_size = [np.rint(i * fig_scale * 2) / 2 for i in data.shape]
    print('create figure in size: ' + str(inps.fig_size))
    fig = plt.figure(figsize=inps.fig_size, frameon=False)
    ax = fig.add_axes([0., 0., 1., 1.])
    ax.set_axis_off()

    print('colormap: ' + inps.colormap)
    inps.colormap = plt.get_cmap(inps.colormap)

    # Plot - data matrix
    ax.imshow(data,
              aspect='auto',
              cmap=inps.colormap,
              vmin=inps.ylim[0],
              vmax=inps.ylim[1])

    # Plot - reference pixel
    if inps.disp_seed == 'yes':
        try:
            xref = int(atr['ref_x'])
            yref = int(atr['ref_y'])
            ax.plot(xref, yref, 'ks', ms=inps.seed_size)
            print('show reference point')
        except:
            inps.disp_seed = False
            print('Cannot find reference point info!')

    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    ax.set_xlim([0, width])
    ax.set_ylim([length, 0])

    data_png_file = out_name_base + '.png'
    print('writing ' + data_png_file)
    plt.savefig(data_png_file,
                pad_inches=0.0,
                transparent=True,
                dpi=inps.fig_dpi)

    ## 2.2 Making PNG file - colorbar
    pc = plt.figure(figsize=(1, 8))
    cax = pc.add_subplot(111)
    norm = mpl.colors.Normalize(vmin=inps.ylim[0], vmax=inps.ylim[1])
    cbar = mpl.colorbar.ColorbarBase(cax,
                                     cmap=inps.colormap,
                                     norm=norm,
                                     orientation='vertical')

    cbar.set_label(inps.cbar_label + ' [' + inps.disp_unit + ']')
    cbar.locator = mpl.ticker.MaxNLocator(nbins=inps.cbar_bin_num)
    cbar.update_ticks()

    pc.subplots_adjust(left=0.2, bottom=0.3, right=0.4, top=0.7)
    pc.patch.set_facecolor('white')
    pc.patch.set_alpha(0.7)

    cbar_png_file = out_name_base + '_cbar.png'
    print('writing ' + cbar_png_file)
    pc.savefig(cbar_png_file,
               bbox_inches='tight',
               facecolor=pc.get_facecolor(),
               dpi=inps.fig_dpi)

    ## 2.3 Generate KML file
    print('generating kml file ...')
    try:
        doc = KML.kml(KML.Folder(KML.name(atr['PROJECT_NAME'])))
    except:
        doc = KML.kml(KML.Folder(KML.name('PySAR product')))

    # Add data png file
    slc = KML.GroundOverlay(KML.name(data_png_file), KML.Icon(KML.href(data_png_file)),\
                            KML.altitudeMode('clampToGround'),\
                            KML.LatLonBox(KML.north(str(north)), KML.east(str(east)),\
                                          KML.south(str(south)), KML.west(str(west))))
    doc.Folder.append(slc)

    # Add colorbar png file
    cb_rg = min(north - south, east - west)
    cb_N = (north + south) / 2.0 + 0.5 * 0.5 * cb_rg
    cb_W = east + 0.1 * cb_rg

    ## Use mean height from existed DEM file
    if not inps.cbar_height:
        try:
            dem_file = ut.get_file_list(
                ['demGeo*.h5', '*.dem', 'demRadar.h5', 'radar*.hgt'])[0]
            print('use mean height from file: ' + dem_file +
                  ' + 1000 m as colorbar height.')
            inps.cbar_height = np.rint(np.nanmean(
                readfile.read(dem_file)[0])) + 1000.0
        except:
            pass
    elif str(inps.cbar_height).lower().endswith('ground'):
        inps.cbar_height = None

    if inps.cbar_height:
        print('set colorbar in height: %.2f m' % inps.cbar_height)
        slc1 = KML.GroundOverlay(KML.name('colorbar'), KML.Icon(KML.href(cbar_png_file)),\
                                 KML.altitude(str(inps.cbar_height)),KML.altitudeMode('absolute'),\
                                 KML.LatLonBox(KML.north(str(cb_N)),KML.south(str(cb_N-0.5*cb_rg)),\
                                               KML.west( str(cb_W)),KML.east( str(cb_W+0.14*cb_rg))))
    else:
        print('set colorbar clampToGround')
        slc1 = KML.GroundOverlay(KML.name('colorbar'), KML.Icon(KML.href(cbar_png_file)),\
                                 KML.altitudeMode('clampToGround'),\
                                 KML.LatLonBox(KML.north(str(cb_N)),KML.south(str(cb_N-0.5*cb_rg)),\
                                               KML.west( str(cb_W)),KML.east( str(cb_W+0.14*cb_rg))))
    doc.Folder.append(slc1)

    # Write KML file
    kmlstr = etree.tostring(doc, pretty_print=True)
    kml_file = out_name_base + '.kml'
    print('writing ' + kml_file)
    f = open(kml_file, 'w')
    f.write(kmlstr)
    f.close()

    ## 2.4 Generate KMZ file
    kmz_file = out_name_base + '.kmz'
    print('writing ' + kmz_file)
    cmdKMZ = 'zip ' + kmz_file + ' ' + kml_file + ' ' + data_png_file + ' ' + cbar_png_file
    os.system(cmdKMZ)

    cmdClean = 'rm ' + kml_file
    print(cmdClean)
    os.system(cmdClean)
    cmdClean = 'rm ' + data_png_file
    print(cmdClean)
    os.system(cmdClean)
    cmdClean = 'rm ' + cbar_png_file
    print(cmdClean)
    os.system(cmdClean)

    return kmz_file
Пример #15
0
def load_file(fileList, inps_dict=dict(), outfile=None, file_type=None):
    '''Load input file(s) into one HDF5 file 
    It supports ROI_PAC files only for now.
    Inputs:
        fileList  - string / list of string, path of files to load
        inps_dict - dict, including the following attributes
                    PROJECT_NAME   : KujuAlosAT422F650  (extra attribute dictionary to add to output file)
                    timeseries_dir : directory of time series analysis, e.g. KujuAlosAT422F650/PYSAR
                    insar_processor: InSAR processor, roipac, isce, gamma, doris
        outfile   - string, output file name
        file_type - string, group name for output HDF5 file, interferograms, coherence, dem, etc.
    Output:
        outfile - string, output file name
    Example:
        unwrapIfgram.h5 = load_file('filt*.unw', inps_dict=vars(inps))
    '''
    # Get project_name from input template file
    if not 'project_name' in list(inps_dict.keys()) and 'template_file' in list(inps_dict.keys()):
        template_filename_list = [os.path.basename(i) for i in inps_dict['template_file']]
        try:  template_filename_list.remove('pysarApp_template.txt')
        except:  pass
        if template_filename_list:
            inps_dict['project_name'] = os.path.splitext(template_filename_list[0])[0]

    # Input file(s) info
    fileList = ut.get_file_list(fileList, abspath=True)
    if not fileList:
        return None

    ##### Prepare attributes file
    processor = inps_dict['insar_processor']
    print('--------------------------------------------')
    print('preparing attributes files using prep_%s.py ...' % processor)
    # prepare multiple files input for cmd calling
    files_input = ''
    for x in fileList:
        files_input += x+' '
    # call prepare_*.py
    if   processor == 'gamma' :  prepCmd = 'prep_gamma.py ' +files_input;   os.system(prepCmd)
    elif processor == 'roipac':  prepCmd = 'prep_roipac.py '+files_input;   os.system(prepCmd)
    else:
        print('Un-supported InSAR processor: '+processor)
        print('Skip preparing attributes files')

    print('----------------------------')
    print('loading files ...')
    atr = readfile.read_attribute(fileList[0])
    k = atr['FILE_TYPE']
    print('Input file(s) is '+atr['PROCESSOR']+' '+k)

    # Get output file type
    if not file_type:
        if k in ['.unw']:  file_type = 'interferograms'
        elif k in ['.cor']:  file_type = 'coherence'
        elif k in ['.int']:  file_type = 'wrapped'
        elif k in ['.byt']:  file_type = 'snaphu_connect_component'
        elif k in ['.msk']:  file_type = 'mask'
        elif k in ['.hgt','.dem','dem','.hgt_sim']:
            file_type = 'dem'
        else:
            file_type = k

    # Get output file name
    if not outfile:
        # output file basename
        if file_type == 'interferograms':  outfile = 'unwrapIfgram.h5'
        elif file_type == 'coherence':  outfile = 'coherence.h5'
        elif file_type == 'wrapped':  outfile = 'wrapIfgram.h5'
        elif file_type == 'snaphu_connect_component':  outfile = 'snaphuConnectComponent.h5'
        elif file_type == 'mask':  outfile = 'mask.h5'
        elif file_type == 'dem':
            if 'Y_FIRST' in list(atr.keys()):
                outfile = 'demGeo.h5'
            else:
                outfile = 'demRadar.h5'
        elif file_type in ['.trans','.utm_to_rdc','.UTM_TO_RDC']:
            outfile = os.path.basename(fileList[0])
        else:
            warnings.warn('Un-recognized file type: '+file_type)

        # output directory
        if 'timeseries_dir' in list(inps_dict.keys()) and inps_dict['timeseries_dir']:
            outdir = inps_dict['timeseries_dir']
        else:
            outdir = os.path.abspath(os.getcwd())
        outfile = outdir+'/'+outfile
    outfile = os.path.abspath(outfile)

    # Convert 
    if file_type in multi_group_hdf5_file:
        outfile = load_multi_group_hdf5(file_type, fileList, outfile, inps_dict)[0]

    elif file_type in single_dataset_hdf5_file:
        outfile = load_single_dataset_hdf5(file_type, fileList[-1], outfile, inps_dict)

    elif file_type in ['.trans','.utm_to_rdc','.UTM_TO_RDC']:
        outfile = copy_file(fileList[-1], os.path.dirname(outfile))
    else:
        warnings.warn('Un-supported file type: '+file_type)

    return outfile
Пример #16
0
def auto_path_miami(inps, template={}):
    '''Auto File Path Setting for Geodesy Lab - University of Miami'''
    print('Use auto path setting in University of Miami.'+\
          '(To turn it off, change miami_path value to False in pysar/__init__.py)')
    # PYSAR working directory
    if not inps.timeseries_dir:
        inps.timeseries_dir = os.getenv('SCRATCHDIR')+'/'+inps.project_name+'/PYSAR'

    ##### .unw/.cor/.int files
    process_dir = os.getenv('SCRATCHDIR')+'/'+inps.project_name+'/PROCESS'
    print("PROCESS directory: "+process_dir)
    if inps.insar_processor == 'roipac':
        if not inps.unw or inps.unw == 'auto':   inps.unw = process_dir+'/DONE/IFGRAM*/filt_*.unw'
        if not inps.cor or inps.cor == 'auto':   inps.cor = process_dir+'/DONE/IFGRAM*/filt_*rlks.cor'
        #if not inps.int or inps.int == 'auto':   inps.int = process_dir+'/DONE/IFGRAM*/filt_*rlks.int'
    elif inps.insar_processor == 'gamma':
        if not inps.unw or inps.unw == 'auto':   inps.unw = process_dir+'/DONE/IFGRAM*/diff_*rlks.unw'
        if not inps.cor or inps.cor == 'auto':   inps.cor = process_dir+'/DONE/IFGRAM*/filt_*rlks.cor'
        #if not inps.int or inps.int == 'auto':   inps.int = process_dir+'/DONE/IFGRAM*/diff_*rlks.int'

    ##### master interferogram for geomap*.trans and DEM in radar coord
    if all(fname and fname != 'auto' for fname in [inps.trans, inps.dem_radar, inps.dem_geo]):
        return inps

    try:     m_date12 = np.loadtxt(process_dir+'/master_ifgram.txt', dtype=str).tolist()
    except:
        try: m_date12 = os.walk(process_dir+'/GEO').next()[1][0].split('geo_')[1]
        except: pass

    if not inps.trans or inps.trans == 'auto':
        try:
            if inps.insar_processor == 'roipac':
                inps.trans = process_dir+'/GEO/*'+m_date12+'*/geomap*.trans'
            elif inps.insar_processor == 'gamma':
                inps.trans = process_dir+'/SIM/sim_'+m_date12+'/sim_*.UTM_TO_RDC'
        except:
            warnings.warn('No master interferogram found! Can not locate mapping transformation file for geocoding!')

    if not inps.dem_radar or inps.dem_radar == 'auto':
        try:
            if inps.insar_processor == 'roipac':
                inps.dem_radar = process_dir+'/DONE/*'+m_date12+'*/radar*.hgt'
            elif inps.insar_processor == 'gamma':
                inps.dem_radar = process_dir+'/SIM/sim_'+m_date12+'/sim_*.hgt_sim'
        except:
            warnings.warn('No master interferogram found! Can not locate DEM in radar coord!')

    # Use DEMg/DEM option if dem_geo is not specified in pysar option
    dem_dir = os.getenv('SCRATCHDIR')+'/'+inps.project_name+'/DEM'
    if not inps.dem_geo or inps.dem_geo == 'auto':
        inps.dem_geo = []
        if os.path.isdir(dem_dir):
            inps.dem_geo = [dem_dir+'/*.dem']
        elif inps.insar_processor == 'gamma':
            inps.dem_geo = [process_dir+'/SIM/sim_'+m_date12+'/sim_*.utm.dem']

        if   'DEMg' in list(template.keys()):  inps.dem_geo.append(template['DEMg'])
        elif 'DEM'  in list(template.keys()):  inps.dem_geo.append(template['DEM'])
        try:    inps.dem_geo = ut.get_file_list(inps.dem_geo)[0]
        except: inps.dem_geo = None

        if not inps.dem_geo:
            warnings.warn('Can not locate DEM in geo coord!')

    return inps
Пример #17
0
def main(argv):
    
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print('input file(s): '+str(len(inps.file)))
    print(inps.file)
    
    #print '\n*************** Phase Ramp Removal ***********************'
    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # Read mask file if inputed
    if inps.mask_file == 'no':  inps.mask_file = None
    if inps.mask_file:
        try:
            mask_atr = readfile.read_attribute(inps.mask_file)
        except:
            print('Can not open mask file: '+inps.mask_file)
            inps.mask_file = None

    # Update mask for multiple surfaces
    if inps.ysub:
        # Read mask
        if not inps.mask_file:
            Mask_temp = readfile.read(inps.mask_file)[0]
            Mask = np.zeros((length, width), dtype=np.float32)
            Mask[Mask_temp!=0] = 1
        else:
            Mask = np.ones((length, width))
        
        # Update mask for multiple surface from inps.ysub
        mask_multiSurface = np.zeros((length,width), dtype=np.float32)
        surfNum = len(inps.ysub)/2
        if surfNum == 1:
            mask_multiSurface = Mask
        else:
            i = 0
            mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i+1],:] = Mask[inps.ysub[2*i]:inps.ysub[2*i+1],:]
            for i in range(1,surfNum):
                if inps.ysub[2*i] < inps.ysub[2*i-1]:
                    mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i-1],:]  += Mask[inps.ysub[2*i]:inps.ysub[2*i-1],:]*(i+1)
                    mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i-1],:]  /= 2
                    mask_multiSurface[inps.ysub[2*i-1]:inps.ysub[2*i+1],:] = Mask[inps.ysub[2*i-1]:inps.ysub[2*i+1],:]*(i+1)
                else:
                    mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i+1],:]   = Mask[inps.ysub[2*i]:inps.ysub[2*i+1],:]*(i+1)
         
        # Write updated mask for multiple surfaces into file
        outFile = 'mask_'+str(surfNum)+inps.surface_type+'.h5'
        atr['FILE_TYPE'] = 'mask'
        writefile.write(mask_multiSurface, atr, outFile)
        print('saved mask to '+outFile)

    ############################## Removing Phase Ramp #######################################
    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(len(inps.file))

    if len(inps.file) == 1:
        rm.remove_surface(inps.file[0], inps.surface_type, inps.mask_file, inps.outfile, inps.ysub)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(rm.remove_surface)(file, inps.surface_type, inps.mask_file, ysub=inps.ysub)\
                                   for file in inps.file)

    else:
        for File in inps.file:
            print('------------------------------------------')
            rm.remove_surface(File, inps.surface_type, inps.mask_file, ysub=inps.ysub)
    
    print('Done.')
    return
Пример #18
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    if inps.reset:
        print(
            '----------------------------------------------------------------------------'
        )
        for file in inps.file:
            remove_reference_pixel(file)
        return

    ##### Check Input Coordinates
    # Read ref_y/x/lat/lon from reference/template
    # priority: Direct Input > Reference File > Template File
    if inps.template_file:
        print('reading reference info from template: ' + inps.template_file)
        inps = read_seed_template2inps(inps.template_file, inps)
    if inps.reference_file:
        print('reading reference info from reference: ' + inps.reference_file)
        inps = read_seed_reference2inps(inps.reference_file, inps)

    ## Do not use ref_lat/lon input for file in radar-coord
    #if not 'X_FIRST' in atr.keys() and (inps.ref_lat or inps.ref_lon):
    #    print 'Lat/lon reference input is disabled for file in radar coord.'
    #    inps.ref_lat = None
    #    inps.ref_lon = None

    # Convert ref_lat/lon to ref_y/x
    if inps.ref_lat and inps.ref_lon:
        if 'X_FIRST' in list(atr.keys()):
            inps.ref_y = subset.coord_geo2radar(inps.ref_lat, atr, 'lat')
            inps.ref_x = subset.coord_geo2radar(inps.ref_lon, atr, 'lon')
        else:
            # Convert lat/lon to az/rg for radar coord file using geomap*.trans file
            inps.ref_y, inps.ref_x = ut.glob2radar(np.array(inps.ref_lat), np.array(inps.ref_lon),\
                                                   inps.trans_file, atr)[0:2]
        print('Input reference point in lat/lon: ' +
              str([inps.ref_lat, inps.ref_lon]))
    print('Input reference point in   y/x  : ' + str([inps.ref_y, inps.ref_x]))

    # Do not use ref_y/x outside of data coverage
    if (inps.ref_y and inps.ref_x
            and not (0 <= inps.ref_y <= length and 0 <= inps.ref_x <= width)):
        inps.ref_y = None
        inps.ref_x = None
        print('WARNING: input reference point is OUT of data coverage!')
        print('Continue with other method to select reference point.')

    # Do not use ref_y/x in masked out area
    if inps.ref_y and inps.ref_x and inps.mask_file:
        print('mask: ' + inps.mask_file)
        mask = readfile.read(inps.mask_file)[0]
        if mask[inps.ref_y, inps.ref_x] == 0:
            inps.ref_y = None
            inps.ref_x = None
            print('WARNING: input reference point is in masked OUT area!')
            print('Continue with other method to select reference point.')

    ##### Select method
    if inps.ref_y and inps.ref_x:
        inps.method = 'input-coord'
    elif inps.coherence_file:
        if os.path.isfile(inps.coherence_file):
            inps.method = 'max-coherence'
        else:
            inps.coherence_file = None

    if inps.method == 'manual':
        inps.parallel = False
        print('Parallel processing is disabled for manual seeding method.')

    ##### Seeding file by file
    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    if len(inps.file) == 1:
        seed_file_inps(inps.file[0], inps, inps.outfile)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(seed_file_inps)(file, inps)
                                   for file in inps.file)
    else:
        for File in inps.file:
            seed_file_inps(File, inps)

    print('Done.')
    return
Пример #19
0
def cmdLineParse():
    parser = argparse.ArgumentParser(description='Generate transect/profile along a line',\
                                     formatter_class=argparse.RawTextHelpFormatter,\
                                     epilog=EXAMPLE)

    parser.add_argument('file',
                        nargs='+',
                        help='input file to show transection')
    parser.add_argument('-m',
                        '--min',
                        dest='disp_min',
                        type=float,
                        help='minimum value for data display')
    parser.add_argument('-M',
                        '--max',
                        dest='disp_max',
                        type=float,
                        help='maximum value for data display')
    parser.add_argument('-u',
                        '--unit',
                        dest='disp_unit',
                        default='cm',
                        help='unit for data display. Default: cm')
    parser.add_argument('--offset',
                        dest='disp_offset',
                        type=float,
                        default=3.0,
                        help='offset between each data profile')
    parser.add_argument('--interpolation', default='nearest', choices=['nearest','bilinear','cubic'],\
                        help='interpolation method while extacting profile along the line')

    # Start / End Point
    end = parser.add_argument_group('Start and End Point of Profile')
    end.add_argument('-s','--start-yx', dest='start_yx', type=int, nargs=2,\
                     help='start point of the profile in pixel number [y, x]')
    end.add_argument('-e','--end-yx', dest='end_yx', type=int, nargs=2,\
                     help='end   point of the profile in pixel number [y, x]')
    end.add_argument('--start-lalo', dest='start_lalo', type=float, nargs=2,\
                     help='start point of the profile in [lat, lon]')
    end.add_argument('--end-lalo', dest='end_lalo', type=float, nargs=2,\
                     help='end   point of the profile in [lat, lon]')
    end.add_argument('--line-file', dest='lola_file',\
                     help='file with start and end point info in lon lat, same as GMT format.\n'
                          'i.e. transect_lonlat.xy:\n'
                          '>\n'
                          '131.1663    33.1157\n'
                          '131.2621    33.0860')

    # DEM
    dem = parser.add_argument_group('DEM', 'display topography in the bottom')
    dem.add_argument('-d', '--dem', help='DEM file')
    dem.add_argument('--dem-min',
                     dest='dem_disp_min',
                     type=float,
                     help='min display value for DEM display, in km')
    dem.add_argument('--dem-max',
                     dest='dem_disp_max',
                     type=float,
                     help='max display value for DEM display, in km')

    # Output
    outfile = parser.add_argument_group('Output',
                                        'Save figure and write to file(s)')
    outfile.add_argument('--save', dest='save_fig', action='store_true',\
                         help='save the figure')
    outfile.add_argument('--nodisplay', dest='disp_fig', action='store_false',\
                         help='save and do not display the figure')
    outfile.add_argument('-o','--outfile',\
                                help="save the figure with assigned filename.\n"
                                     "By default, it's calculated based on inputs.")

    # Figure
    fig = parser.add_argument_group('Figure', 'Figure settings for display')
    fig.add_argument('--dpi',
                     dest='fig_dpi',
                     type=int,
                     default=300,
                     help='DPI - dot per inch - for display/write')
    fig.add_argument('--figsize', dest='fig_size', type=float, nargs=2, default=[7.0, 6.0],\
                     help='figure size in inches - width and length')
    fig.add_argument('--figext', dest='outfile_ext',\
                     default='.png', choices=['.emf','.eps','.pdf','.png','.ps','.raw','.rgba','.svg','.svgz'],\
                     help='File extension for figure output file')
    fig.add_argument('--fontsize',
                     dest='font_size',
                     type=int,
                     help='font size')
    fig.add_argument('--ms','--markersize', dest='marker_size', type=float, default=2.0,\
                     help='Point marker size. Default: 2.0')

    inps = parser.parse_args()
    inps.file = ut.get_file_list(inps.file)
    if inps.outfile or not inps.disp_fig:
        inps.save_fig = True
    return inps
Пример #20
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print('convert DEM file to ROIPAC format')
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in list(atr_dem.keys()):
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print('*******************************************************************************')
    print('Downloading weather model data ...')

    ## Get Grib Source
    if   inps.weather_model in ['ECMWF','ERA-Interim']:   inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA'  :                   inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':                   inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR' :                   inps.grib_source = 'NARR'
    else: raise Reception('Unrecognized weather model: '+inps.weather_model)
    print('grib source: '+inps.grib_source)

    # Get weather directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print('Store weather data into directory: '+inps.weather_dir)

    # Get date list to download
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print('read date list info from: '+inps.timeseries_file)
    else:
        dateList = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print('read date list info from: '+inps.date_list_file)

    # Get Acquisition time - hour
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
    print('Time of cloest available product: '+inps.hour)

    ## Download data using PyAPS
    inps.grib_file_list = dload_grib(dateList, inps.hour, inps.weather_model, inps.weather_dir)

    if inps.download:
        print('Download completed, exit as planned.')
        return

    print('*******************************************************************************')
    print('Calcualting delay for each epoch.')

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print('incidence angle: '+str(inps.incidence_angle))
    else:
        print('calculating incidence angle ...')
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle*np.pi/180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source+'.h5'
    print('writing >>> '+tropFile)
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
    print('writing >>> '+inps.out_file)
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = dateList[0]
    print('calculating phase delay on reference date: '+ref_date)
    ref_date_grib_file = None
    for fname in inps.grib_file_list:
        if ref_date in fname:
            ref_date_grib_file = fname
    phs_ref = get_delay(ref_date_grib_file, atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        grib_file = inps.grib_file_list[i] 
        date = re.findall('\d{8}', grib_file)[0]

        # Get phase delay
        if date != ref_date:
            print('calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file)))
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print('writing to HDF5 files ...')
        data = h5timeseries['timeseries'].get(date)[:]
        dset  = group_tropCor.create_dataset(date, data=data-phs, compression='gzip')
        dset  = group_trop.create_dataset(date, data=phs, compression='gzip')

    ## Write Attributes
    for key,value in atr.items():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value
    
    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm '+inps.dem_file+' '+inps.dem_file+'.rsc '
        print(rmCmd)
        os.system(rmCmd)
    
    print('Done.')

    return
Пример #21
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print('------------------------------------------------------')
    print('geocoding file: ' + fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print('reading lookup table file: ' + lut_file)
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in list(atr_rdr.keys()):
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print(
            '\tinput radar coord file has been subsetted, adjust lookup table value'
        )

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print('geocoding using scipy.interpolate.RegularGridInterpolator ...')
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print('update attributes')
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print('reading ' + fname)
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print('update attributes')
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print('writing >>> ' + fname_out)
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print('Time used: %02d hours %02d mins %02d secs' % (h, m, s))
    return fname_out