Ejemplo n.º 1
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    # multilooking
    if len(inps.file) == 1:
        multilook_file(inps.file[0], inps.lks_y, inps.lks_x, inps.outfile)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(
            delayed(multilook_file)(file, inps.lks_y, inps.lks_x)
            for file in inps.file)
    else:
        for File in inps.file:
            print '-------------------------------------------'
            multilook_file(File, inps.lks_y, inps.lks_x)

    print 'Done.'
    return
Ejemplo n.º 2
0
def main(argv):
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    lat = float(argv[0])
    lon = float(argv[1])

    try:
        trans_file = argv[2]
    except:
        trans_file = ut.get_file_list('geomap*.trans')[0]

    try:
        radar_file = argv[3]
    except:
        radar_file = 'unwrapIfgram.h5'

    atr_rdr = readfile.read_attribute(radar_file)

    print 'input geo coord: lat=%.4f, lon=%.4f' % (lat, lon)

    y, x = ut.glob2radar(np.array(lat), np.array(lon), trans_file,
                         atr_rdr)[0:2]
    print 'corresponding radar coord: y=%d, x=%d' % (y, x)

    return
Ejemplo n.º 3
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file, abspath=True)

    # Check input file type
    ext = os.path.splitext(inps.file[0])[1]
    if ext not in ['.unw', '.cor', '.int']:
        print 'No need to extract attributes for ROI_PAC ' + ext + ' file'
        return

    print 'number of files: ' + str(len(inps.file))

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    if len(inps.file) == 1:
        extract_attribute(inps.file[0])
    elif inps.parallel:
        Parallel(n_jobs=num_cores)(delayed(extract_attribute)(file)
                                   for file in inps.file)
    else:
        for File in inps.file:
            extract_attribute(File)

    return
Ejemplo n.º 4
0
def main(argv): 

    inps = cmdLineParse()
    #print '\n****************** mask *********************'
    inps.file = ut.get_file_list(inps.file)
    print 'number of file to mask: '+str(len(inps.file))
    print inps.file

    # check outfile and parallel option
    if len(inps.file) > 1:
        inps.outfile = None
    elif len(inps.file) == 1 and inps.parallel:
        inps.parallel =  False
        print 'parallel processing is diabled for one input file'

    # masking
    if inps.parallel:
        num_cores = multiprocessing.cpu_count()
        print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(mask_file)(File, inps.mask_file, inps_dict=vars(inps)) for File in inps.file)
    else:
        for File in inps.file:
            print '-------------------------------------------'
            mask_file(File, inps.mask_file, inps.outfile, vars(inps))

    print 'Done.'
    return
Ejemplo n.º 5
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print 'number of files to geocode: ' + str(len(inps.file))
    print inps.file
    print 'interpolation method: ' + inps.method
    print 'fill_value: ' + str(inps.fill_value)

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    #####
    if len(inps.file) == 1:
        geocode_file_with_geo_lut(inps.file[0], inps.lookup_file, inps.method,
                                  inps.fill_value, inps.outfile)
    elif inps.parallel:
        Parallel(n_jobs=num_cores)(delayed(geocode_file_with_geo_lut)\
                                   (fname, inps.lookup_file, inps.method, inps.fill_value) for fname in inps.file)
    else:
        for fname in inps.file:
            geocode_file_with_geo_lut(fname, inps.lookup_file, inps.method,
                                      inps.fill_value)

    print 'Done.'
    return
Ejemplo n.º 6
0
def main(argv):

    inps = cmdLineParse()
    #print '\n**************** Multilook *********************'
    fileList = get_file_list(inps.file)

    # check outfile and parallel option
    if len(inps.file) > 1:
        inps.outfile = None
    elif len(inps.file) == 1 and inps.parallel:
        inps.parallel = False
        print 'parallel processing is diabled for one input file'

    # multilooking
    if inps.parallel:
        num_cores = multiprocessing.cpu_count()
        print 'parallel processing using %d cores ...' % (num_cores)
        Parallel(n_jobs=num_cores)(
            delayed(multilook_file)(file, inps.lks_y, inps.lks_x)
            for file in fileList)
    else:
        for File in inps.file:
            print '-------------------------------------------'
            multilook_file(File, inps.lks_y, inps.lks_x, inps.outfile)

    print 'Done.'
    return
Ejemplo n.º 7
0
def main(argv):

    inps = cmdLineParse()
    #print '\n****************** mask *********************'
    inps.file = ut.get_file_list(inps.file)
    print 'number of file to mask: ' + str(len(inps.file))
    print inps.file

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    # masking
    if len(inps.file) == 1:
        mask_file(inps.file[0], inps.mask_file, inps.outfile, vars(inps))

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(
            delayed(mask_file)(File, inps.mask_file, inps_dict=vars(inps))
            for File in inps.file)
    else:
        for File in inps.file:
            print '-------------------------------------------'
            mask_file(File, inps.mask_file, inps_dict=vars(inps))

    print 'Done.'
    return
Ejemplo n.º 8
0
def main(argv):
    inps = cmdLineParse()
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    inps.file = ut.get_file_list(inps.file)
    print 'number of files to geocode: '+str(len(inps.file))
    print inps.file
    if len(inps.file) > 1:
        inps.outfile = None
    print 'fill_value: '+str(inps.fill_value)

    ##Check Lookup table
    inps.lookup_file = ut.get_lookup_file(inps.lookup_file)
    if not inps.lookup_file:
        sys.exit('No lookup table found! Can not geocode without it.')

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(len(inps.file))

    #####
    if inps.parallel:
        Parallel(n_jobs=num_cores)(delayed(geocode_file)\
                                   (fname, inps.lookup_file, inps.outfile, inps) for fname in inps.file)
    else:
        for fname in inps.file:
            geocode_file(fname, inps.lookup_file, inps.outfile, inps)

    print 'Done.'
    return
Ejemplo n.º 9
0
def dload_grib(date_list, hour, grib_source='ECMWF', weather_dir='./'):
    '''Download weather re-analysis grib files using PyAPS
    Inputs:
        date_list   : list of string in YYYYMMDD format
        hour        : string in HH:MM or HH format
        grib_source : string, 
        weather_dir : string,
    Output:
        grib_file_list : list of string
    '''

    ## Grib data directory
    weather_dir = os.path.abspath(weather_dir)
    grib_dir = weather_dir+'/'+grib_source
    if not os.path.isdir(grib_dir):
        print 'making directory: '+grib_dir
        os.makedirs(grib_dir)

    ## Date list to grib file list
    grib_file_list = []
    for d in date_list:
        if   grib_source == 'ECMWF':  grib_file = grib_dir+'/ERA-Int_'+d+'_'+hour+'.grb'
        elif grib_source == 'ERA'  :  grib_file = grib_dir+'/ERA_'+d+'_'+hour+'.grb'
        elif grib_source == 'MERRA':  grib_file = grib_dir+'/merra-'+d+'-'+hour+'.hdf'
        elif grib_source == 'NARR' :  grib_file = grib_dir+'/narr-a_221_'+d+'_'+hour+'00_000.grb'
        grib_file_list.append(grib_file)

    ## Get date list to download (skip already downloaded files)
    grib_file_existed = ut.get_file_list(grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode([os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [i for i in grib_file_existed if os.path.getsize(i) != grib_filesize_mode]
        print 'number of grib files existed    : %d' % len(grib_file_existed)
        print 'file size mode: %d' % grib_filesize_mode
        if grib_file_corrupted:
            print '------------------------------------------------------------------------------'
            print 'corrupted grib files detected! Delete them and re-download...'
            print 'number of grib files corrupted  : %d' % len(grib_file_corrupted)
            for i in grib_file_corrupted:
                rmCmd = 'rm '+i
                print rmCmd
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print '------------------------------------------------------------------------------'
    grib_file2download = sorted(list(set(grib_file_list) - set(grib_file_existed)))
    date_list2download = [str(re.findall('\d{8}', i)[0]) for i in grib_file2download]
    print 'number of grib files to download: %d' % len(date_list2download)
    print '------------------------------------------------------------------------------\n'

    ## Download grib file using PyAPS
    if   grib_source == 'ECMWF':  pa.ECMWFdload(date_list2download, hour, grib_dir)
    elif grib_source == 'ERA'  :  pa.ERAdload(  date_list2download, hour, grib_dir)
    elif grib_source == 'MERRA':  pa.MERRAdload(date_list2download, hour, grib_dir)
    elif grib_source == 'NARR' :  pa.NARRdload( date_list2download, hour, grib_dir)

    return grib_file_existed
Ejemplo n.º 10
0
def main(argv):
    inps = cmdLineParse()
    inps.outfile = os.path.abspath(inps.outfile)
    atr = readfile.read_attribute(inps.file[0])
    k = atr['FILE_TYPE']

    if not inps.sensor:
        inps.sensor = get_mission_name(atr)
    print 'Sensor name: %s' % (inps.sensor)

    m_date_list = []
    s_date_list = []
    bperp_list = []

    inps.file = ut.get_file_list(inps.file, abspath=True)
    if os.path.splitext(inps.file[0])[1] not in ['.h5', '.he5']:
        ifgramNum = len(inps.file)
        print 'Number of interferograms: %d' % (ifgramNum)
        for fname in inps.file:
            try:
                date12 = str(
                    re.findall('\d{8}[-_]\d{8}',
                               os.path.basename(fname))[0]).replace('_', '-')
            except:
                date12 = str(
                    re.findall('\d{6}[-_]\d{6}',
                               os.path.basename(fname))[0]).replace('_', '-')
            m_date, s_date = date12.split('-')
            bperp = readfile.read_attribute(fname)['P_BASELINE_TOP_HDR']
            m_date_list.append(m_date)
            s_date_list.append(s_date)
            bperp_list.append(bperp)

    else:
        h5 = h5py.File(inps.file[0], 'r')
        ifgram_list = ut.check_drop_ifgram(h5)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        m_date_list = [date12.split('-')[0] for date12 in date12_list]
        s_date_list = [date12.split('-')[1] for date12 in date12_list]
        for ifgram in ifgram_list:
            bperp = h5[k][ifgram].attrs['P_BASELINE_TOP_HDR']
            bperp_list.append(bperp)
        ifgramNum = len(ifgram_list)

    fout = '{0} {1}     {2:<15}   {3}\n'
    fl = open(inps.outfile, 'w')
    for i in range(ifgramNum):
        fl.write(
            fout.format(m_date_list[i], s_date_list[i], bperp_list[i],
                        inps.sensor))
    fl.close()
    print 'write to %s' % (inps.outfile)
    return
Ejemplo n.º 11
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    if not ut.which('geocode.pl'):
        sys.exit(
            "\nERROR: Can not find geocode.pl, it's needed for geocoding.\n")

    #print '\n***************** Geocoding *******************'
    if not inps.lookup_file.endswith('.trans'):
        print 'ERROR: Input lookup file is not .trans file: ' + inps.lookup_file + '\n'
        sys.exit(1)
    print 'number of file to mask: ' + str(len(inps.file))
    print inps.file

    # check outfile and parallel option
    if len(inps.file) > 1:
        inps.outfile = None
    elif len(inps.file) == 1 and inps.parallel:
        inps.parallel = False
        print 'parallel processing is diabled for one input file'

    # Check geomap file for previously subsetted radar coord file
    atr = readfile.read_attribute(inps.file[0])
    if 'subset_x0' in atr.keys():
        inps.lookup_file = geomap4subset_radar_file(atr, inps.lookup_file)

    # Geocoding
    if inps.parallel:
        num_cores = multiprocessing.cpu_count()
        print 'parallel processing using %d cores ...' % (num_cores)
        Parallel(n_jobs=num_cores)(
            delayed(geocode_file_roipac)(file, inps.lookup_file)
            for file in inps.file)
    else:
        for File in inps.file:
            print '----------------------------------------------------'
            geocode_file_roipac(File, inps.lookup_file, inps.outfile)

    # clean temporary geomap file for previously subsetted radar coord file
    if 'subset_x0' in atr.keys():
        rmCmd = 'rm ' + inps.lookup_file
        os.system(rmCmd)
        print rmCmd
        rmCmd = 'rm ' + inps.lookup_file + '.rsc'
        os.system(rmCmd)
        print rmCmd

    print 'Done.'
    return
Ejemplo n.º 12
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    print 'read options from template file: ' + os.path.basename(template_file)
    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = value

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            try:
                inps.mask_file = ut.get_file_list(['maskLand.h5',
                                                   'mask.h5'])[0]
            except:
                inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'minCoherence'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.coh_thres = 0.7
        else:
            inps.coh_thres = float(value)

    return inps
Ejemplo n.º 13
0
def main(argv):
    if len(sys.argv) < 3:
        usage(); sys.exit(1)

    y = int(argv[0])
    x = int(argv[1])

    try:    trans_file = argv[2]
    except: trans_file = ut.get_file_list('geomap*.trans')[0]

    try:    radar_file = argv[3]
    except: radar_file = 'unwrapIfgram.h5'

    atr_rdr = readfile.read_attribute(radar_file)
    
    print 'input radar coord: y/azimuth=%d, x/range=%d' % (y, x)
     
    lat, lon = ut.radar2glob(np.array(y), np.array(x), trans_file, atr_rdr)[0:2]
    print 'corresponding geo coord: lat=%.4f, lon=%.4f' % (lat, lon)

    return
Ejemplo n.º 14
0
def extract_attribute_lookup_table(fname):
    '''Read/extract attribute for .UTM_TO_RDC file from Gamma to ROI_PAC
    For example, it read input file, sim_150911-150922.UTM_TO_RDC, 
    find its associated par file, sim_150911-150922.utm.dem.par, read it, and
    convert to ROI_PAC style and write it to an rsc file, sim_150911-150922.UTM_TO_RDC.rsc'''

    ## Check existed .rsc file
    rsc_file_list = ut.get_file_list(fname + '.rsc')
    if rsc_file_list:
        rsc_file = rsc_file_list[0]
        print rsc_file + ' is existed, no need to re-extract.'
        return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['INSAR_PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]
    atr['Y_UNIT'] = 'degrees'
    atr['X_UNIT'] = 'degrees'

    par_file = os.path.splitext(fname)[0] + '.utm.dem.par'

    print 'read ' + os.path.basename(par_file)
    par_dict = readfile.read_gamma_par(par_file)

    print 'convert Gamma attribute to ROI_PAC style'
    par_dict = readfile.attribute_gamma2roipac(par_dict)
    atr.update(par_dict)

    ## Write to .rsc file
    rsc_file = fname + '.rsc'
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = None
    if atr_orig != atr:
        print 'writing >>> ' + os.path.basename(rsc_file)
        writefile.write_roipac_rsc(atr, rsc_file)
    return rsc_file
Ejemplo n.º 15
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file, abspath=True)
    print 'number of files: ' + str(len(inps.file))

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    ##### multiple datasets files
    ext = os.path.splitext(inps.file[0])[1]
    if ext in ['.unw', '.cor', '.int']:
        if len(inps.file) == 1:
            extract_attribute_interferogram(inps.file[0])
        elif inps.parallel:
            Parallel(n_jobs=num_cores)(
                delayed(extract_attribute_interferogram)(file)
                for file in inps.file)
        else:
            for File in inps.file:
                extract_attribute_interferogram(File)

    ##### Single dataset files
    elif inps.file[0].endswith('.utm.dem'):
        for File in inps.file:
            atr_file = extract_attribute_dem_geo(File)
    elif inps.file[0].endswith(('.rdc.dem', '.hgt_sim')):
        for File in inps.file:
            atr_file = extract_attribute_dem_radar(File)
    elif ext in ['.UTM_TO_RDC']:
        for File in inps.file:
            atr_file = extract_attribute_lookup_table(File)
    else:
        print 'No need to extract attributes for Gamma ' + ext + ' file'

    print 'Done.'
    return
Ejemplo n.º 16
0
    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = view.auto_flip_direction(atr)
    else:
        inps.flip_ud = False
        inps.left_lr = False

    # Mask file
    if not inps.mask_file:
        if 'X_FIRST' in atr.keys():
            file_list = ['geo_maskTempCoh.h5']
        else:
            file_list = ['maskTempCoh.h5', 'mask.h5']
        try:
            inps.mask_file = ut.get_file_list(file_list)[0]
        except:
            inps.mask_file = None
    try:
        mask = readfile.read(inps.mask_file)[0]
        mask[mask != 0] = 1
        print 'load mask from file: ' + inps.mask_file
    except:
        mask = None
        print 'No mask used.'

    # Initial Map
    d_v = h5[k].get(dateList[inps.epoch_num])[:] * inps.unit_fac
    if inps.ref_date:
        inps.ref_d_v = h5[k].get(inps.ref_date)[:] * inps.unit_fac
        d_v -= inps.ref_d_v
Ejemplo n.º 17
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: ' + fname
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print 'reading lookup table file: ' + lut_file
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading ' + fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print 'update attributes'
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> ' + fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
Ejemplo n.º 18
0
def geocode_file_with_geo_lookup_table(fname,
                                       lookup_file=None,
                                       interp_method='nearest',
                                       fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Inputs:
        fname         : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                        i.e. geomap_4rlks.trans           from ROI_PAC
                             sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method : string, optional, interpolation/resampling method, supporting nearest, linear, cubic
        fname_out : string, optional, output geocoded filename
    Output:
        fname_out

    A faster way is as below:
    https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpolations-between-two-irregular-grids
    '''
    atr_rdr = readfile.read_attribute(fname)
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default values:
    if not lookup_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lookup_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lookup_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    # Check lookup table file
    try:
        lookup_file = ut.get_file_list(lookup_file)[0]
    except:
        lookup_file = None
    if not lookup_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ##### 1. Get Y/X coordinates in radar file
    print '------------------------------------------------------'
    print 'geocoding file: ' + fname
    print 'getting Y/X coordinates from file in radar coordinates'
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    yy, xx = np.mgrid[0:len_rdr - 1:len_rdr * 1j, 0:wid_rdr - 1:wid_rdr * 1j]
    yx_rdr = np.hstack((yy.reshape(-1, 1), xx.reshape(-1, 1)))

    ##### 2. Get Y/X coordinates in geo*trans file
    print 'reading ' + lookup_file
    rg, az, atr_lut = readfile.read(lookup_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust value read from lookup table file'

    # extract pixels only available in radar file (get ride of invalid corners)
    az = az.flatten()
    rg = rg.flatten()
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    yx_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))

    print 'interpolation method: ' + interp_method
    k = atr_rdr['FILE_TYPE']

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:].flatten()

                data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
                data_geo[idx] = griddata(yx_rdr,
                                         data,
                                         yx_geo,
                                         method=interp_method)

                dset = group.create_dataset(date,
                                            data=data_geo.reshape(
                                                (len_geo, wid_geo)),
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = geocode_attribute_with_geo_lookup_table(atr_rdr, atr_lut)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:].flatten()

                data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
                data_geo[idx] = griddata(yx_rdr,
                                         data,
                                         yx_geo,
                                         method=interp_method)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo.reshape(
                                             (len_geo, wid_geo)),
                                         compression='gzip')
                atr = geocode_attribute_with_geo_lookup_table(
                    h5[k][ifgram].attrs, atr_lut, print_message=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading ' + fname
        data = readfile.read(fname)[0].flatten()
        print 'geocoding'
        data_geo = np.zeros(len_geo * wid_geo, dtype=data.dtype)
        data_geo[idx] = griddata(yx_rdr, data, yx_geo, method=interp_method)
        print 'update attributes'
        atr = geocode_attribute_with_geo_lookup_table(atr_rdr, atr_lut)
        print 'writing >>> ' + fname_out
        writefile.write(data_geo.reshape((len_geo, wid_geo)), atr, fname_out)

    return fname_out
Ejemplo n.º 19
0
def main(argv):

    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print 'input file(s): ' + str(len(inps.file))
    print inps.file

    #print '\n*************** Phase Ramp Removal ***********************'
    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # check outfile and parallel option
    if len(inps.file) > 1:
        inps.outfile = None
    elif len(inps.file) == 1 and inps.parallel:
        inps.parallel = False
        print 'parallel processing is diabled for one input file'

    # Update mask for multiple surfaces
    if inps.ysub:
        # Read mask
        if not inps.mask_file:
            Mask_temp = readfile.read(inps.mask_file)[0]
            Mask = np.zeros((length, width))
            Mask[Mask_temp != 0] = 1
        else:
            Mask = np.ones((length, width))

        # Update mask for multiple surface from inps.ysub
        mask_multiSurface = np.zeros((length, width))
        surfNum = len(inps.ysub) / 2
        if surfNum == 1:
            mask_multiSurface = Mask
        else:
            i = 0
            mask_multiSurface[inps.ysub[2 * i]:inps.ysub[2 * i + 1], :] = Mask[
                inps.ysub[2 * i]:inps.ysub[2 * i + 1], :]
            for i in range(1, surfNum):
                if inps.ysub[2 * i] < inps.ysub[2 * i - 1]:
                    mask_multiSurface[
                        inps.ysub[2 * i]:inps.ysub[2 * i - 1], :] += Mask[
                            inps.ysub[2 * i]:inps.ysub[2 * i - 1], :] * (i + 1)
                    mask_multiSurface[inps.ysub[2 * i]:inps.ysub[2 * i -
                                                                 1], :] /= 2
                    mask_multiSurface[
                        inps.ysub[2 * i - 1]:inps.ysub[2 * i + 1], :] = Mask[
                            inps.ysub[2 * i - 1]:inps.ysub[2 * i +
                                                           1], :] * (i + 1)
                else:
                    mask_multiSurface[
                        inps.ysub[2 * i]:inps.ysub[2 * i + 1], :] = Mask[
                            inps.ysub[2 * i]:inps.ysub[2 * i + 1], :] * (i + 1)

        # Write updated mask for multiple surfaces into file
        outFile = 'mask_' + str(surfNum) + inps.surface_type + '.h5'
        atr['FILE_TYPE'] = 'mask'
        writefile.write(mask_multiSurface, atr, outFile)
        print 'saved mask to ' + outFile

    ############################## Removing Phase Ramp #######################################
    if inps.parallel:
        num_cores = multiprocessing.cpu_count()
        print 'parallel processing using %d cores ...' % (num_cores)
        Parallel(n_jobs=num_cores)(delayed(rm.remove_surface)(file, inps.surface_type, inps.mask_file, ysub=inps.ysub)\
                                   for file in inps.file)
    else:
        for File in inps.file:
            print '------------------------------------------'
            rm.remove_surface(inps.file[0], inps.surface_type, inps.mask_file,
                              inps.outfile, inps.ysub)

    print 'Done.'
    return
Ejemplo n.º 20
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            remove_reference_pixel(file)
        return

    ##### Check Input Coordinates
    # Read ref_y/x/lat/lon from reference/template
    # priority: Direct Input > Reference File > Template File
    if inps.template_file:
        print 'reading reference info from template: ' + inps.template_file
        inps = read_seed_template2inps(inps.template_file, inps)
    if inps.reference_file:
        print 'reading reference info from reference: ' + inps.reference_file
        inps = read_seed_reference2inps(inps.reference_file, inps)

    ## Do not use ref_lat/lon input for file in radar-coord
    #if not 'X_FIRST' in atr.keys() and (inps.ref_lat or inps.ref_lon):
    #    print 'Lat/lon reference input is disabled for file in radar coord.'
    #    inps.ref_lat = None
    #    inps.ref_lon = None

    # Convert ref_lat/lon to ref_y/x
    if inps.ref_lat and inps.ref_lon:
        if 'X_FIRST' in atr.keys():
            inps.ref_y = subset.coord_geo2radar(inps.ref_lat, atr, 'lat')
            inps.ref_x = subset.coord_geo2radar(inps.ref_lon, atr, 'lon')
        else:
            # Convert lat/lon to az/rg for radar coord file using geomap*.trans file
            inps.ref_y, inps.ref_x = ut.glob2radar(np.array(inps.ref_lat), np.array(inps.ref_lon),\
                                                   inps.trans_file, atr)[0:2]
        print 'Input reference point in lat/lon: ' + str(
            [inps.ref_lat, inps.ref_lon])
    print 'Input reference point in   y/x  : ' + str([inps.ref_y, inps.ref_x])

    # Do not use ref_y/x outside of data coverage
    if (inps.ref_y and inps.ref_x
            and not (0 <= inps.ref_y <= length and 0 <= inps.ref_x <= width)):
        inps.ref_y = None
        inps.ref_x = None
        print 'WARNING: input reference point is OUT of data coverage!'
        print 'Continue with other method to select reference point.'

    # Do not use ref_y/x in masked out area
    if inps.ref_y and inps.ref_x and inps.mask_file:
        print 'mask: ' + inps.mask_file
        mask = readfile.read(inps.mask_file)[0]
        if mask[inps.ref_y, inps.ref_x] == 0:
            inps.ref_y = None
            inps.ref_x = None
            print 'WARNING: input reference point is in masked OUT area!'
            print 'Continue with other method to select reference point.'

    ##### Select method
    if inps.ref_y and inps.ref_x:
        inps.method = 'input-coord'
    elif inps.coherence_file:
        if os.path.isfile(inps.coherence_file):
            inps.method = 'max-coherence'
        else:
            inps.coherence_file = None

    if inps.method == 'manual':
        inps.parallel = False
        print 'Parallel processing is disabled for manual seeding method.'

    ##### Seeding file by file
    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file))

    if len(inps.file) == 1:
        seed_file_inps(inps.file[0], inps, inps.outfile)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(seed_file_inps)(file, inps)
                                   for file in inps.file)
    else:
        for File in inps.file:
            seed_file_inps(File, inps)

    print 'Done.'
    return
Ejemplo n.º 21
0
def extract_attribute_interferogram(fname):
    '''Read/extract attributes for PySAR from Gamma .unw, .cor and .int file
    Inputs:
        fname : str, Gamma interferogram filename or path, i.e. /PopoSLT143TsxD/diff_filt_HDR_130118-130129_4rlks.unw
    Output:
        atr : dict, Attributes dictionary
    '''
    file_dir = os.path.dirname(fname)
    file_basename = os.path.basename(fname)

    rsc_file = fname + '.rsc'
    #if os.path.isfile(rsc_file):
    #    return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['INSAR_PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]

    ## Get info: date12, num of loooks
    try:
        date12 = str(re.findall('\d{8}[-_]\d{8}', file_basename)[0])
    except:
        date12 = str(re.findall('\d{6}[-_]\d{6}', file_basename)[0])
    m_date, s_date = date12.replace('_', '-').split('-')
    atr['DATE12'] = ptime.yymmdd(m_date) + '-' + ptime.yymmdd(s_date)
    lks = os.path.splitext(file_basename.split(date12)[1])[0]

    ## Read .off and .par file
    off_file = file_dir + '/*' + date12 + lks + '.off'
    m_par_file = [
        file_dir + '/*' + m_date + lks + i for i in ['.amp.par', '.ramp.par']
    ]
    s_par_file = [
        file_dir + '/*' + s_date + lks + i for i in ['.amp.par', '.ramp.par']
    ]

    try:
        off_file = ut.get_file_list(off_file)[0]
    except:
        print '\nERROR: Can not find .off file, it supposed to be like: ' + off_file
    try:
        m_par_file = ut.get_file_list(m_par_file)[0]
    except:
        print '\nERROR: Can not find master date .par file, it supposed to be like: ' + m_par_file
    try:
        s_par_file = ut.get_file_list(s_par_file)[0]
    except:
        print '\nERROR: Can not find slave date .par file, it supposed to be like: ' + s_par_file

    #print 'read '+m_par_file
    #print 'read '+off_file
    par_dict = readfile.read_gamma_par(m_par_file)
    off_dict = readfile.read_gamma_par(off_file)

    #print 'convert Gamma attribute to ROI_PAC style'
    atr.update(par_dict)
    atr.update(off_dict)
    atr = readfile.attribute_gamma2roipac(atr)

    ## Perp Baseline Info
    #print 'extract baseline info from %s, %s and %s file' % (m_par_file, s_par_file, off_file)
    atr = get_perp_baseline(m_par_file, s_par_file, off_file, atr)

    ## LAT/LON_REF1/2/3/4
    #print 'extract LAT/LON_REF1/2/3/4 from '+m_par_file
    atr = get_lalo_ref(m_par_file, atr)

    ## Write to .rsc file
    #print 'writing >>> '+rsc_file
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = None
    if atr_orig != atr:
        print 'merge %s, %s and %s into %s' % (os.path.basename(m_par_file), os.path.basename(s_par_file),\
                                               os.path.basename(off_file), os.path.basename(rsc_file))
        writefile.write_roipac_rsc(atr, rsc_file)

    return rsc_file
Ejemplo n.º 22
0
def main(argv):

    inps = cmdLineParse()
    inps.dem_file = ut.get_file_list([inps.dem_file])[0]
    inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
    atr = readfile.read_attribute(inps.timeseries_file)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Get Acquisition time
    inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                             inps.grib_source)
    print 'Time of cloest vailable product: ' + inps.hour

    if not os.path.isdir(inps.grib_source):
        print 'making directory: ' + inps.grib_source
        os.mkdir(inps.grib_source)

    ## Loop to download
    inps.grib_file_list = []
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    dateList = sorted(h5timeseries['timeseries'].keys())
    for d in dateList:
        print[d]
        if inps.grib_source == 'ECMWF':
            grib_file = './ECMWF/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = './ERA/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = './MERRA/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = './NARR/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

        if os.path.isfile(grib_file):
            print grib_file + ' already exists.'
        else:
            if inps.grib_source == 'ECMWF':
                pa.ECMWFdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'ERA':
                pa.ERAdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'MERRA':
                pa.MERRAdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'NARR':
                pa.NARRdload([d], inps.hour, './' + inps.grib_source + '/')

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data + phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()
    print 'Done.'

    return
Ejemplo n.º 23
0
def main(argv):

    global xsub, ysub, thr
    parallel = 'yes'     ## Use parallel by default for multiple input files

    ######################################
    try:    opts, args = getopt.getopt(argv,"h:f:m:t:x:y:o:",['no-parallel'])
    except getopt.GetoptError:    Usage() ; sys.exit(1)

    if len(sys.argv) > 3:
        for opt,arg in opts:
            if opt in ("-h","--help"):     Usage();  sys.exit()
            elif opt == '-f':        File     = arg.split(',')
            elif opt == '-m':        maskFile = arg
            elif opt == '-t':        thr  = float(arg)
            elif opt == '-y':        ysub = [int(i) for i in arg.split(':')];     ysub.sort()
            elif opt == '-x':        xsub = [int(i) for i in arg.split(':')];     xsub.sort()
            elif opt == '-o':        outFile = arg
            elif opt == '--no-parallel':   parallel = 'no'

    elif len(sys.argv)==3:
        if os.path.isfile(argv[0]) and os.path.isfile(argv[1]):
            File     = argv[0].split(',')
            maskFile = argv[1]
        else:  print 'Input file does not existed: '+argv[0]+' / '+argv[1];  sys.exit(1)
    else:   Usage();  sys.exit(1)

    try:
        File
        maskFile
    except:    Usage() ; sys.exit(1)

    ##### Check Input File List
    print '\n****************** Masking *********************'
    fileList = ut.get_file_list(File)
    print 'number of file to mask: '+str(len(fileList))
    print fileList

    if len(fileList) == 1:
        parallel = 'no'
        try: outFile          ## Customized output file name for one input file only
        except:
            ext     = os.path.splitext(fileList[0])[1]
            outFile = os.path.basename(fileList[0]).split('.')[0]+'_masked'+ext
    elif len(fileList) > 1:
        try:
            del outFile
            print 'Disabled customized output name for multiple input files, continue with automatic naming insread.'
        except: pass
    else: print 'ERROR: No input file!'; sys.exit(1)

    ##### Check parallel computing requirement
    if parallel == 'yes':
        try:
            from joblib import Parallel, delayed
            import multiprocessing
        except:
            print '/nCannot import joblib or multiprocessing!'
            print 'Disabled parallel masking.'
            print 'Continue with masking file by file ...'

    ###### Read Mask File
    atr_mask = readfile.read_attributes(maskFile)
    k_mask = atr_mask['FILE_TYPE']
    if not k_mask == 'coherence':    ## Read mask file once 
        M,Matr = readfile.read(maskFile)
        print 'mask file: '+maskFile

    ##### Masking - file by file
    if parallel == 'no':
        ##### Single Mask
        if not k_mask == 'coherence':
            for in_file in fileList:
                print '-------------------------------------------'
                print 'masking  : '+in_file
                try:    mask_file(in_file,M,outFile)
                except: mask_file(in_file,M)
        ##### Multiple Mask
        else:
            try:    mask_with_multi_masks(fileList[0],maskFile,outFile)
            except: mask_with_multi_masks(fileList[0],maskFile)

    ##### Masking - parallel
    else:
        print '-----------------------'
        print 'parallel masking ...'
        print '-----------------------'
        num_cores = multiprocessing.cpu_count()
        Parallel(n_jobs=num_cores)(delayed(mask_file)(in_file,M) for in_file in fileList)
Ejemplo n.º 24
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.timeseries_file)[0]+suffix+os.path.splitext(inps.timeseries_file)[1]
    if inps.template_file:
        print 'read option from template file: '+inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    ##### Read Data
    atr = readfile.read_attribute(inps.timeseries_file)
    coordType = 'radar'
    if 'Y_FIRST' in atr.keys():
        coordType = 'geo'

    # 1. Incidence angle
    try:
        inps.inc_angle_file = ut.get_file_list(inps.inc_angle_file, coord=coordType)[0]
    except ValueError:
        print 'No incidence angle file found!\nRun incidence_angle.py to generate it.'
    print 'read incidence angle from file: '+str(inps.inc_angle_file)
    inps.inc_angle = readfile.read(inps.inc_angle_file, epoch='incidenceAngle')[0].flatten()
    inps.inc_angle *= np.pi/180.0

    # 2. Slant Range distance
    try:
        inps.range_dist_file = ut.get_file_list(inps.range_dist_file, coord=coordType)[0]
    except ValueError:
        print 'No range distance file found!\nRun range_distance.py to generate it.'
    print 'read slant range distance from file: '+str(inps.range_dist_file)
    inps.range_dist = readfile.read(inps.range_dist_file, epoch='slantRangeDistance')[0].flatten()

    # 3. Perp Baseline - 1D in time, 0D/1D in space (azimuth)
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=1)
        if inps.pbase.shape[1] > 1:
            print 'consider perp baseline variance in azimuth direction'
    except valueError:
        print 'No P_BASELINE_TIMESERIES found in timeseries file.\n'+\
              'Can not correct for DEM residula without it!'

    # 4. Time Series - 1D in time, 1D in space (flattened)
    print "read time series file: " + inps.timeseries_file
    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(-1,1)

    #Mark dates used in the estimation
    inps.ex_date = check_exclude_date(inps.ex_date, date_list)
    inps.date_flag = np.array([i not in inps.ex_date for i in date_list], dtype=np.bool_)
    if inps.poly_order > np.sum(inps.date_flag):
        raise ValueError("ERROR: input polynomial order=%d is larger than number of acquisition=%d used in estimation!" %\
                         (inps.poly_order, np.sum(inps.date_flag)))

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length*width
    timeseries = np.zeros((date_num, pixel_num),np.float32)
    for i in range(date_num):
        timeseries[i] = h5['timeseries'].get(date_list[i])[:].flatten()
        sys.stdout.write('\rreading acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
    h5.close()
    print ''


    ##### Design matrix - temporal deformation model
    print '-------------------------------------------------'
    print 'Correct topographic phase residual using Fattahi and Amelung (2013, IEEE-TGRS)'
    msg = 'minimum-norm constrain on: phase'
    if inps.phase_velocity:
        msg += ' velocity'
    print msg

    # Heresh's original code for phase history approach
    #A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
    #A2 = inps.tbase**2 / 2.0
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))

    # 1. Polynomial - 2D matrix in size of (date_num, polyOrder+1)
    print "temporal deformation model: polynomial order = "+str(inps.poly_order)
    A_def = np.ones((date_num, 1), np.float32)
    for i in range(inps.poly_order):
        Ai = inps.tbase**(i+1) / gamma(i+2)
        Ai = np.array(Ai, np.float32).reshape(-1,1)
        A_def = np.hstack((A_def, Ai))

    # 2. Step function - 2D matrix in size of (date_num, stepNum)
    if inps.step_date:
        print "temporal deformation model: step functions at "+str(inps.step_date)
        yySteps = ptime.yyyymmdd2years(inps.step_date)
        yyList = np.array(ptime.yyyymmdd2years(date_list)).reshape(-1,1)
        for yyStep in yySteps:
            Ai = yyList > yyStep
            Ai = np.array(Ai, np.float32).reshape(-1,1)
            A_def = np.hstack((A_def, Ai))
    inps.step_num = len(inps.step_date)

    print '-------------------------------------------------'


    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    ## Output estimated steps 
    print 'ordinal least squares (OLS) inversion using L2-norm minimization'
    timeseriesCor = np.zeros((date_num, pixel_num), dtype=np.float32)
    timeseriesRes = np.zeros((date_num, pixel_num), dtype=np.float32)
    topoRes = np.zeros(pixel_num, dtype=np.float32)
    constC  = np.zeros(pixel_num, dtype=np.float32)
    if inps.step_num > 0:
        stepModel = np.zeros((inps.step_num, pixel_num), dtype=np.float32)

    print 'skip pixels with zero/nan value in geometry files - incidence angle and range distance'
    mask = np.multiply(~np.isnan(inps.inc_angle), ~np.isnan(inps.range_dist))
    mask[inps.inc_angle == 0.] = 0
    mask[inps.range_dist == 0.] = 0
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels in the file: %d' % (pixel_num)
    print 'number of pixels to  inverse: %d' % (pixel_num2inv)

    if inps.pbase.shape[1] == 1:
        pbase = inps.pbase
    prog_bar = ptime.progress_bar(maxValue=pixel_num)
    for i in range(pixel_num2inv):
        prog_bar.update(i+1, every=1000, suffix='%s/%s pixels'%(str(i+1), str(pixel_num2inv)))
        idx = pixel_idx2inv[i]

        r = inps.range_dist[idx]
        inc_angle = inps.inc_angle[idx]
        if inps.pbase.shape[1] > 1:
            pbase = inps.pbase[:, int(idx/width)].reshape(-1,1)
        A_deltaZ = pbase / (r * np.sin(inc_angle))

        A = np.hstack((A_deltaZ, A_def))
        ts = timeseries[:,idx].reshape(date_num,-1)
        deltaZ, tsCor, tsRes, stepEst = topographic_residual_inversion(ts, A, inps)
        topoRes[idx:idx+1] = deltaZ
        timeseriesCor[:,idx:idx+1] = tsCor
        timeseriesRes[:,idx:idx+1] = tsRes
        if inps.step_num > 0:
            stepModel[:,idx:idx+1] = stepEst
    prog_bar.close()


    ##------------------------------------------------ Output  --------------------------------------------##
    # 1. DEM error file
    if 'Y_FIRST' in atr.keys():
        deltaZFile = 'demGeo_error.h5'
    else:
        deltaZFile = 'demRadar_error.h5'
    print 'writing >>> '+deltaZFile
    atrDeltaZ = atr.copy()
    atrDeltaZ['FILE_TYPE'] = 'dem'
    atrDeltaZ['UNIT'] = 'm'
    writefile.write(topoRes.reshape(length, width), atrDeltaZ, deltaZFile)

    # 2. Topo Residual Corrected Time Series
    print 'writing >>> '+inps.outfile
    h5 = h5py.File(inps.outfile,'w')
    group = h5.create_group('timeseries')
    for i in range(date_num):
        sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
        dset = group.create_dataset(date_list[i], data=timeseriesCor[i].reshape(length, width), compression='gzip')
    print ''
    for key,value in atr.iteritems():
        group.attrs[key] = value
    h5.close()

    # 3. Inversion residual Time Series
    tsResFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesResidual.h5')
    print 'writing >>> '+os.path.basename(tsResFile)
    h5 = h5py.File(tsResFile,'w')
    group = h5.create_group('timeseries')
    for i in range(date_num):
        sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, date_num))
        sys.stdout.flush()
        dset = group.create_dataset(date_list[i], data=timeseriesRes[i].reshape(length, width), compression='gzip')
    print ''
    # Attribute
    for key,value in atr.iteritems():
        group.attrs[key] = value
    h5.close()

    # 4. Step temporal Model estimation
    if inps.step_num > 0:
        stepFile = os.path.join(os.path.dirname(inps.outfile), 'timeseriesStepModel.h5')
        print 'writing >>> '+os.path.basename(stepFile)
        h5 = h5py.File(stepFile,'w')
        group = h5.create_group('timeseries')
        for i in range(inps.step_num):
            sys.stdout.write('\rwriting acquisition %3d/%3d ...' % (i+1, inps.step_num))
            sys.stdout.flush()
            dset = group.create_dataset(inps.step_date[i], data=stepModel[i].reshape(length, width), compression='gzip')
        print ''
        # Attribute
        for key,value in atr.iteritems():
            group.attrs[key] = value
        group.attrs.pop('ref_date')
        h5.close()

    print 'Done.'
    return
Ejemplo n.º 25
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    #print '\n**************** Subset *********************'
    atr = readfile.read_attribute(inps.file[0])

    ##### Convert All Inputs into subset_y/x/lat/lon
    # Input Priority: subset_y/x/lat/lon > reference > template > footprint
    if not inps.subset_x and not inps.subset_y and not inps.subset_lat and not inps.subset_lon:
        # 1. Read subset info from Reference File
        if inps.reference:
            ref_atr = readfile.read_attribute(inps.reference)
            pix_box, geo_box = get_coverage_box(ref_atr)
            print 'using subset info from ' + inps.reference

        # 2. Read subset info from template options
        elif inps.template_file:
            pix_box, geo_box = read_subset_template2box(inps.template_file)
            print 'using subset info from ' + inps.template_file

        # 3. Use subset from footprint info
        elif inps.footprint:
            if atr['FILE_TYPE'] == '.trans':
                # Non-zero area in geomap_*.trans file, accurate
                trans_rg, trans_atr = readfile.read(inps.file[0], (), 'range')
                idx_row, idx_col = np.nonzero(trans_rg)
                pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                           np.max(idx_col) + 10, np.max(idx_row) + 10)
                geo_box = box_pixel2geo(pix_box, trans_atr)
            else:
                print 'ERROR: --footprint option only works for geomap_*.trans file.\n'
                inps.footprint = False
                sys.exit(1)

            ## from LAT/LON_REF*, which is not accurate
            #lats = [atr['LAT_REF1'], atr['LAT_REF3'], atr['LAT_REF4'], atr['LAT_REF2']]
            #lons = [atr['LON_REF1'], atr['LON_REF3'], atr['LON_REF4'], atr['LON_REF2']]
            #lats = [float(i) for i in lats]
            #lons = [float(i) for i in lons]
            #lalo_buff = min([max(lats)-min(lats), max(lons)-min(lons)]) * 0.05
            #geo_box = (min(lons)-lalo_buff, max(lats)+lalo_buff, max(lons)+lalo_buff, min(lats)-lalo_buff)
            #pix_box = None
            #if not inps.fill_value: inps.fill_value = np.nan
            #print 'using subset info from scene footprint - LAT/LON_REF1/2/3/4'
        else:
            raise Exception('No subset inputs found!')
        # Update subset_y/x/lat/lon
        inps = subset_box2inps(inps, pix_box, geo_box)

    ##### --bbox option
    if inps.trans_file:
        ## Seperate files in radar and geo coord
        rdrFileList = []
        geoFileList = []
        for File in inps.file:
            atr = readfile.read_attribute(File)
            if 'X_FIRST' in atr.keys():
                geoFileList.append(File)
            else:
                rdrFileList.append(File)

        ## Calculate bbox
        rdrFile = rdrFileList[0]
        atr_rdr = readfile.read_attribute(rdrFile)
        if inps.subset_lat and inps.subset_lon:
            print 'use subset input in lat/lon'
            print 'calculate corresponding bounding box in radar coordinate.'
            geo_box = (inps.subset_lon[0], inps.subset_lat[1],
                       inps.subset_lon[1], inps.subset_lat[0])
            pix_box = bbox_geo2radar(geo_box, atr_rdr, inps.trans_file)
        else:
            print 'use subset input in y/x'
            print 'calculate corresponding bounding box in geo coordinate.'
            pix_box = (inps.subset_x[0], inps.subset_y[0], inps.subset_x[1],
                       inps.subset_y[1])
            geo_box = bbox_radar2geo(pix_box, atr_rdr, inps.trans_file)
        print 'geo   box: ' + str(geo_box)
        print 'pixel box: ' + str(pix_box)

        ## Subset files
        inps.fill_value = 0
        print '--------------------------------------------'
        print 'subseting dataset in geo coord geo_box: ' + str(geo_box)
        inps = subset_box2inps(inps, None, geo_box)
        subset_file_list(geoFileList, inps)
        print '--------------------------------------------'
        print 'subseting dataset in radar coord pix_box: ' + str(pix_box)
        inps = subset_box2inps(inps, pix_box, None)
        subset_file_list(rdrFileList, inps)

    else:
        ##### Subset files
        subset_file_list(inps.file, inps)

    print 'Done.'
    return
Ejemplo n.º 26
0
def auto_path_miami(inps, template={}):
    '''Auto File Path Setting for Geodesy Lab - University of Miami'''
    print 'Use auto path setting in University of Miami.'+\
          '(To turn it off, change miami_path value to False in pysar/__init__.py)'
    # PYSAR working directory
    if not inps.timeseries_dir:
        inps.timeseries_dir = os.getenv(
            'SCRATCHDIR') + '/' + inps.project_name + '/PYSAR'

    ##### .unw/.cor/.int files
    process_dir = os.getenv(
        'SCRATCHDIR') + '/' + inps.project_name + '/PROCESS'
    print "PROCESS directory: " + process_dir
    if inps.insarProcessor == 'roipac':
        if not inps.unw or inps.unw == 'auto':
            inps.unw = process_dir + '/DONE/IFGRAM*/filt_*.unw'
        if not inps.cor or inps.cor == 'auto':
            inps.cor = process_dir + '/DONE/IFGRAM*/filt_*rlks.cor'
        #if not inps.int or inps.int == 'auto':   inps.int = process_dir+'/DONE/IFGRAM*/filt_*rlks.int'
    elif inps.insarProcessor == 'gamma':
        if not inps.unw or inps.unw == 'auto':
            inps.unw = process_dir + '/DONE/IFGRAM*/diff_*rlks.unw'
        if not inps.cor or inps.cor == 'auto':
            inps.cor = process_dir + '/DONE/IFGRAM*/filt_*rlks.cor'
        #if not inps.int or inps.int == 'auto':   inps.int = process_dir+'/DONE/IFGRAM*/diff_*rlks.int'
    elif inps.insarProcessor == 'isce':
        process_dir = os.getenv('SCRATCHDIR') + '/' + inps.project_name
        if not inps.unw or inps.unw == 'auto':
            inps.unw = process_dir + '/merged/interferograms/*/filt*.unw'
        if not inps.cor or inps.cor == 'auto':
            inps.cor = process_dir + '/merged/interferograms/*/filt*.cor'
        if not inps.lut or inps.lut == 'auto':
            inps.lut = process_dir + '/merged/geom_master/l*.rdr'
        if not inps.dem_radar or inps.dem_radar == 'auto':
            inps.dem_radar = process_dir + '/merged/geom_master/hgt.rdr'
        if not inps.dem_geo or inps.dem_geo == 'auto': inps.dem_geo = ""
        #if not inps.int or inps.int == 'auto':   inps.int = process_dir+'/DONE/IFGRAM*/diff_*rlks.int'

    ##### master interferogram for lookup table and DEM in radar coord
    if all(fname and fname != 'auto'
           for fname in [inps.lut, inps.dem_radar, inps.dem_geo]):
        return inps

    try:
        m_date12 = np.loadtxt(process_dir + '/master_ifgram.txt',
                              dtype=str).tolist()
    except:
        try:
            m_date12 = os.walk(process_dir +
                               '/GEO').next()[1][0].split('geo_')[1]
        except:
            pass

    if not inps.lut or inps.lut == 'auto':
        try:
            if inps.insarProcessor == 'roipac':
                inps.lut = process_dir + '/GEO/*' + m_date12 + '*/geomap*.trans'
            elif inps.insarProcessor == 'gamma':
                inps.lut = process_dir + '/SIM/sim_' + m_date12 + '/sim_*.UTM_TO_RDC'
        except:
            warnings.warn(
                'No master interferogram found! Can not locate mapping transformation file for geocoding!'
            )

    if not inps.dem_radar or inps.dem_radar == 'auto':
        try:
            if inps.insarProcessor == 'roipac':
                inps.dem_radar = process_dir + '/DONE/*' + m_date12 + '*/radar*.hgt'
            elif inps.insarProcessor == 'gamma':
                inps.dem_radar = process_dir + '/SIM/sim_' + m_date12 + '/sim_*.hgt_sim'
        except:
            warnings.warn(
                'No master interferogram found! Can not locate DEM in radar coord!'
            )

    # Use DEMg/DEM option if dem_geo is not specified in pysar option
    dem_dir = os.getenv('SCRATCHDIR') + '/' + inps.project_name + '/DEM'
    if inps.dem_geo is None or inps.dem_geo == 'auto':
        inps.dem_geo = []
        if os.path.isdir(dem_dir):
            inps.dem_geo = [dem_dir + '/*.dem']
        elif inps.insarProcessor == 'gamma':
            inps.dem_geo = [
                process_dir + '/SIM/sim_' + m_date12 + '/sim_*.utm.dem'
            ]

        if 'DEMg' in template.keys(): inps.dem_geo.append(template['DEMg'])
        elif 'DEM' in template.keys(): inps.dem_geo.append(template['DEM'])
        try:
            inps.dem_geo = ut.get_file_list(inps.dem_geo)[0]
        except:
            inps.dem_geo = None

        if not inps.dem_geo:
            warnings.warn('Can not locate DEM in geo coord!')

    return inps
Ejemplo n.º 27
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in atr_dem.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(
                inps.dem_file)[0] + '4pyaps' + atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Grib data directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(
                inps.dem_file)) + '/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: ' + inps.weather_dir
    grib_dir = inps.weather_dir + '/' + inps.grib_source
    if not os.path.isdir(grib_dir):
        print 'making directory: ' + grib_dir
        os.makedirs(grib_dir)

    ## Get Acquisition time
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                                 inps.grib_source)
    print 'Time of cloest available product: ' + inps.hour

    ## Get grib file list and date list
    inps.grib_file_list = []
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print 'read date list info from: ' + inps.timeseries_file
    else:
        dateList = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print 'read date list info from: ' + inps.date_list_file

    for d in dateList:
        if inps.grib_source == 'ECMWF':
            grib_file = grib_dir + '/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = grib_dir + '/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = grib_dir + '/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = grib_dir + '/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

    ## Get date list to download
    grib_file_existed = ut.get_file_list(inps.grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode(
            [os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [
            i for i in grib_file_existed
            if os.path.getsize(i) != grib_filesize_mode
        ]
        print 'number of grib files existed    : %d' % len(grib_file_existed)
        print 'file size mode: %d' % grib_filesize_mode
        if grib_file_corrupted:
            print '------------------------------------------------------------------------------'
            print 'corrupted grib files detected! Delete them and re-download...'
            print 'number of grib files corrupted  : %d' % len(
                grib_file_corrupted)
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print rmCmd
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print '------------------------------------------------------------------------------'
    grib_file2download = sorted(
        list(set(inps.grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print 'number of grib files to download: %d' % len(date_list2download)
    print '------------------------------------------------------------------------------\n'

    ## Download grib file using PyAPS
    if inps.grib_source == 'ECMWF':
        pa.ECMWFdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'ERA':
        pa.ERAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'MERRA':
        pa.MERRAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'NARR':
        pa.NARRdload(date_list2download, inps.hour, grib_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data - phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm ' + inps.dem_file + ' ' + inps.dem_file + '.rsc '
        print rmCmd
        os.system(rmCmd)

    print 'Done.'

    return
Ejemplo n.º 28
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)
        k = atr['FILE_TYPE']
        if 'ref_y' not in atr.keys() and inps.ref_yx:
            print 'No reference info found in input file, use input ref_yx: ' + str(
                inps.ref_yx)
            atr['ref_y'] = inps.ref_yx[0]
            atr['ref_x'] = inps.ref_yx[1]

    #****reading incidence angle file***/
    if os.path.isfile(inps.inc_angle):
        inps.inc_angle = readfile.read(inps.inc_angle,
                                       epoch='incidenceAngle')[0]
        inps.inc_angle = np.nan_to_num(inps.inc_angle)
    else:
        inps.inps.inc_angle = float(inps.inc_angle)
        print 'incidence angle: ' + str(inps.inc_angle)
    cinc = np.cos(inps.inc_angle * np.pi / 180.0)

    #****look up file****/
    if inps.lookup_file:
        inps.lookup_file = ut.get_file_list(
            [inps.lookup_file])[0]  #'geomap_32rlks_tight.trans'

    #****GACOS****/
    delay_source = 'GACOS'
    # Get weather directory
    if not inps.GACOS_dir:
        if inps.timeseries_file:
            inps.GACOS_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER/GACOS'
        elif inps.lookup_file:
            inps.GACOS_dir = os.path.dirname(os.path.abspath(
                inps.lookup_file)) + '/../WEATHER/GACOS'
        else:
            inps.GACOS_dir = os.path.abspath(os.getcwd())

    print 'Store weather data into directory: ' + inps.GACOS_dir

    #source_dir=os.path.dirname(os.path.abspath('timeseries_file'))+'/Agung/GACOS/data';print source_dir
    #os.makedirs(GACOS_dir)  -----------------------------------------------add part to copy/download weather data------#
    #----get date list-----#
    if not inps.date_list_file:
        print 'read date list info from: ' + inps.timeseries_file
        h5 = h5py.File(inps.timeseries_file, 'r')
        if 'timeseries' in h5.keys():
            date_list = sorted(h5[k].keys())
        elif k in ['interferograms', 'coherence', 'wrapped']:
            ifgram_list = sorted(h5[k].keys())
            date12_list = ptime.list_ifgram2date12(ifgram_list)
            m_dates = [i.split('-')[0] for i in date12_list]
            s_dates = [i.split('-')[1] for i in date12_list]
            date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        else:
            raise ValueError('Un-support input file type:' + k)
        h5.close()
    else:
        date_list = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print 'read date list info from: ' + inps.date_list_file

    #****cheacking availability of delays****/
    print 'checking availability of delays'
    delay_file_list = []
    for d in date_list:
        if delay_source == 'GACOS':
            delay_file = inps.GACOS_dir + '/' + d + '.ztd'
        delay_file_list.append(delay_file)
    delay_file_existed = ut.get_file_list(delay_file_list)

    if len(delay_file_existed) == len(date_list):
        print 'no missing files'
    else:
        print 'no. of date files found:', len(delay_file_existed)
        print 'no. of dates:', len(date_list)

    #*****Calculating delays***/
    print 'calculating delays'

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    #initialise delay files
    date_num = len(date_list)
    trop_ts = np.zeros((date_num, length, width), np.float32)

    #reading wrf files for each epoch and getting delay
    for i in range(date_num):
        delay_file = delay_file_existed[i]
        date = date_list[i]
        print 'calculating delay for date', date
        trop_ts[i] = get_delay(delay_file, atr, inps.lookup_file, cinc)

    print 'Delays Calculated'
    # Convert relative phase delay on reference date
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    print 'convert to relative phase delay with reference date: ' + ref_date
    ref_idx = date_list.index(ref_date)
    trop_ts -= np.tile(trop_ts[ref_idx, :, :], (date_num, 1, 1))

    ## Write tropospheric delay to HDF5
    tropFile = 'GACOSdelays' + '.h5'
    print 'writing >>> %s' % (tropFile)
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')
    print 'number of acquisitions: ' + str(date_num)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    # Write Attributes
    for key, value in atr.iteritems():
        group_trop.attrs[key] = value
    h5trop.close()

    ## Write corrected Time series to HDF5
    if k == 'timeseries':
        if not inps.out_file:
            inps.out_file = os.path.splitext(
                inps.timeseries_file)[0] + '_' + 'GACOS' + '.h5'
        print 'writing trop corrected timeseries file %s' % (inps.out_file)
        h5ts = h5py.File(inps.timeseries_file, 'r')
        h5tsCor = h5py.File(inps.out_file, 'w')
        group_tsCor = h5tsCor.create_group('timeseries')
        print 'number of acquisitions: ' + str(date_num)
        prog_bar = ptime.progress_bar(maxValue=date_num)
        for i in range(date_num):
            date = date_list[i]
            print date
            ts = h5ts['timeseries'].get(date)[:]
            group_tsCor.create_dataset(date,
                                       data=ts - trop_ts[i],
                                       compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        h5ts.close()
        # Write Attributes
        for key, value in atr.iteritems():
            group_tsCor.attrs[key] = value
        h5tsCor.close()
        print 'delays written to %s' % (inps.out_file)

    print 'finished'
    return inps.out_file
Ejemplo n.º 29
0
def load_file(fileList, inps_dict=dict(), outfile=None, file_type=None):
    '''Load input file(s) into one HDF5 file 
    It supports ROI_PAC files only for now.
    Inputs:
        fileList  - string / list of string, path of files to load
        inps_dict - dict, including the following attributes
                    PROJECT_NAME   : KujuAlosAT422F650  (extra attribute dictionary to add to output file)
                    sensor         : (optional)
                    timeseries_dir : directory of time series analysis, e.g. KujuAlosAT422F650/PYSAR
                    insarProcessor: InSAR processor, roipac, isce, gamma, doris
        outfile   - string, output file name
        file_type - string, group name for output HDF5 file, interferograms, coherence, dem, etc.
    Output:
        outfile - string, output file name
    Example:
        unwrapIfgram.h5 = load_file('filt*.unw', inps_dict=vars(inps))
    '''
    # Get project_name from input template file
    if not 'project_name' in inps_dict.keys(
    ) and 'template_file' in inps_dict.keys():
        template_filename_list = [
            os.path.basename(i) for i in inps_dict['template_file']
        ]
        try:
            template_filename_list.remove('pysarApp_template.txt')
        except:
            pass
        if template_filename_list:
            inps_dict['project_name'] = os.path.splitext(
                template_filename_list[0])[0]

    #Sensor
    inps_dict['PLATFORM'] = project_name2sensor(inps_dict['project_name'])

    # Input file(s) info
    fileList = ut.get_file_list(fileList, abspath=True)
    if not fileList:
        return None

    ##### Prepare attributes file
    processor = inps_dict['insarProcessor']
    print '--------------------------------------------'
    print 'preparing attributes files using prep_%s.py ...' % processor
    # prepare multiple files input for cmd calling
    files_input = ''
    for x in fileList:
        files_input += x + ' '
    # call prepare_*.py
    if processor == 'gamma':
        prepCmd = 'prep_gamma.py ' + files_input
        os.system(prepCmd)
    elif processor == 'roipac':
        prepCmd = 'prep_roipac.py ' + files_input
        os.system(prepCmd)
    elif processor == 'isce':
        prepCmd = 'prep_isce.py ' + files_input
        #os.system(prepCmd)
    else:
        print 'Un-supported InSAR processor: ' + processor
        print 'Skip preparing attributes files'

    print '----------------------------'
    print 'loading files ...'
    atr = readfile.read_attribute(fileList[0])
    k = atr['FILE_TYPE']
    print 'Input file(s) is ' + atr['PROCESSOR'] + ' ' + k

    # Get output file type
    if not file_type:
        if k in ['.unw']: file_type = 'interferograms'
        elif k in ['.cor']: file_type = 'coherence'
        elif k in ['.int']: file_type = 'wrapped'
        elif k in ['.byt']: file_type = 'snaphu_connect_component'
        elif k in ['.msk']: file_type = 'mask'
        elif k in ['.hgt', '.dem', 'dem', '.hgt_sim']:
            file_type = 'dem'
        elif k in ['.trans', '.utm_to_rdc', 'geometry']:
            file_type = 'geometry'
        else:
            file_type = k

    # Get output file name
    if not outfile:
        # output file basename
        if file_type == 'interferograms': outfile = 'unwrapIfgram.h5'
        elif file_type == 'coherence': outfile = 'coherence.h5'
        elif file_type == 'wrapped': outfile = 'wrapIfgram.h5'
        elif file_type == 'snaphu_connect_component':
            outfile = 'snaphuConnectComponent.h5'
        elif file_type == 'mask':
            outfile = 'mask.h5'
        elif file_type == 'dem':
            if 'Y_FIRST' in atr.keys():
                outfile = 'demGeo.h5'
            else:
                outfile = 'demRadar.h5'

        # output directory
        if 'timeseries_dir' in inps_dict.keys(
        ) and inps_dict['timeseries_dir']:
            outdir = inps_dict['timeseries_dir']
        else:
            outdir = os.path.abspath(os.getcwd())
        if outfile:
            outfile = outdir + '/' + outfile
    if outfile:
        outfile = os.path.abspath(outfile)

    # Convert
    if file_type in multi_group_hdf5_file:
        outfile = load_multi_group_hdf5(file_type,
                                        fileList,
                                        outfile=outfile,
                                        exDict=inps_dict)[0]

    elif file_type in single_dataset_hdf5_file:
        outfile = load_single_dataset_hdf5(file_type,
                                           fileList[-1],
                                           outfile=outfile,
                                           exDict=inps_dict)

    elif file_type in ['geometry', '.trans', '.utm_to_rdc', '.UTM_TO_RDC']:
        outfile = load_geometry_hdf5(file_type,
                                     fileList,
                                     outfile=outfile,
                                     exDict=inps_dict)
    else:
        warnings.warn('Un-supported file type: ' + file_type)

    return outfile
Ejemplo n.º 30
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(inps.template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceBased'
    if key in key_list and template[key] in ['auto', 'yes']:
        inps.coherence_based = True

    key = prefix + 'keepMinSpanTree'
    if key in key_list and template[key] in ['no']:
        inps.keep_mst = False

    key = prefix + 'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    # find coherence file from input files if inps.coherence_file does not exists.
    if inps.coherence_based and not os.path.isfile(inps.coherence_file):
        k_list = [readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file]
        try:
            coh_file_idx = k_list.index('coherence')
        except ValueError:
            print 'No coherence file found! Can not use coherence-based method without it.'
        inps.coherence_file = inps.file[coh_file_idx]

    key = prefix + 'minCoherence'
    if key in key_list:
        if template[key] == 'auto':
            inps.min_coherence = 0.7
        else:
            inps.min_coherence = float(template[key])

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            try:
                inps.mask_file = ut.get_file_list(['maskLand.h5',
                                                   'mask.h5'])[0]
            except:
                inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'maskAoi.yx'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_pix_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
            sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])

    key = prefix + 'maskAoi.lalo'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_geo_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
            sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
            # Check lookup file
            if not inps.lookup_file:
                print 'Warning: no lookup table file found! Can not use ' + key + ' option without it.'
                print 'skip this option.'
                inps.aoi_pix_box = None

    ## Network Modification based on thresholds
    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_temp_baseline = float(value)

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_perp_baseline = float(value)

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_date = [i for i in value.replace(',', ' ').split()]

    key = prefix + 'excludeIfgIndex'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_ifg_index = [
                i for i in value.replace(',', ' ').split()
            ]

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.start_date = ptime.yymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.end_date = ptime.yymmdd(value)

    return inps
Ejemplo n.º 31
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print 'input file(s) to be modified: ' + str(inps.file)
    print 'number of interferograms: ' + str(len(date12_orig))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based,\
                           inps.start_date, inps.end_date, inps.reset]):
        # Display network for manually modification when there is no other modification input.
        print 'No input option found to remove interferogram'
        if inps.template_file:
            print 'Keep all interferograms by enable --reset option'
            inps.reset = True
        else:
            print 'To manually modify network, please use --manual option '
            return

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            reset_pairs(file)
        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print rmCmd
            os.system(rmCmd)
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file,
                                              check_drop_ifgram=True)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        date12_to_rmv_temp = []
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.lookup_file:
            print 'input AOI in (lon0, lat1, lon1, lat0): ' + str(
                inps.aoi_geo_box)
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.lookup_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box)

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            print 'date12 with 1) average coherence < ' + str(
                inps.min_coherence) + ' AND 2) not in MST network: '
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print 'date12 with average coherence < ' + str(inps.min_coherence)
            mst_date12_list = []

        date12_to_rmv_temp = []
        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.2 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.3 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        date12_to_rmv_temp = []
        for i in range(len(ifg_bperp_list)):
            if abs(ifg_bperp_list[i]) > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print 'number of pairs to drop: %d' % (len(date12_to_rmv_temp))
        print date12_to_rmv_temp

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.exclude_date)
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than start-date: ' + inps.start_date
        min_date = int(ptime.yyyymmdd(inps.start_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than end-date: ' + inps.end_date
        max_date = int(ptime.yyyymmdd(inps.end_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = sorted(list(set(date12_to_rmv)))
    date12_keep = sorted(list(set(date12_orig) - set(date12_to_rmv)))
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'number of interferograms kept     : ' + str(len(date12_keep))

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5, print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        date12_to_rmv = []

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms' and inps.update_aux:
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                inps.mask_file = 'mask.h5'
                print 'writing >>> ' + inps.mask_file
                ut.nonzero_mask(Modified_File, inps.mask_file)

            elif k == 'coherence' and inps.update_aux:
                inps.coherence_file = Modified_File
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'averageSpatialCoherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)

                # Touch spatial average txt file of coherence if it's existed
                coh_spatialAverage_file = os.path.splitext(
                    Modified_File)[0] + '_spatialAverage.txt'
                if os.path.isfile(coh_spatialAverage_file):
                    touchCmd = 'touch ' + coh_spatialAverage_file
                    print touchCmd
                    os.system(touchCmd)

    # Plot result
    if inps.plot:
        print '\nplot modified network and save to file.'
        plotCmd = 'plot_network.py ' + inps.coherence_file + ' --coherence ' + inps.coherence_file + ' --nodisplay'
        if inps.template_file:
            plotCmd += ' --template ' + inps.template_file
        print plotCmd
        os.system(plotCmd)

    print 'Done.'
    return
Ejemplo n.º 32
0
def main(argv):

    global outName
    parallel = 'no'

    ############### Check Inputs ###############
    if len(sys.argv)>3:
        try:
            opts, args = getopt.getopt(argv,'f:l:L:o:t:x:y:r:',['lat=','lon=','row=','col=',\
                                            'parallel','outfill=','outfill-nan','outfill-zero'])
        except getopt.GetoptError:
            print 'Error while getting args'
            Usage() ; sys.exit(1)

        for opt,arg in opts:
            if   opt == '-f':   File         = arg.split(',')
            elif opt == '-o':   outName      = arg
            elif opt == '-t':   templateFile = arg
            elif opt == '-r':   refFile      = arg
            elif opt in ['-x','--col']  :   sub_x   = [int(i)   for i in arg.split(':')];    sub_x.sort()
            elif opt in ['-y','--row']  :   sub_y   = [int(i)   for i in arg.split(':')];    sub_y.sort()
            elif opt in ['-l','--lat']  :   sub_lat = [float(i) for i in arg.split(':')];  sub_lat.sort()
            elif opt in ['-L','--lon']  :   sub_lon = [float(i) for i in arg.split(':')];  sub_lon.sort()
            elif opt in '--parallel'    :   parallel = 'yes'
            elif opt in '--outfill'     :   out_fill = float(arg)
            elif opt in '--outfill-nan' :   out_fill = np.nan
            elif opt in '--outfill-zero':   out_fill = 0.0

    elif len(sys.argv)==3:
        File         = argv[0].split(',')
        templateFile = argv[1]
    elif len(sys.argv)==2:
        if argv[0] in ['-h','--help']:  Usage(); sys.exit()
        else: print '\nERROR: A minimum of 3 inputs is needed.\n'; Usage(); sys.exit()
    else: Usage(); sys.exit(1)

    ##### Check Input file Info
    print '\n**************** Subset *********************'
    fileList = ut.get_file_list(File)
    print 'number of file: '+str(len(fileList))
    print fileList
    atr = readfile.read_attributes(fileList[0])

    if len(fileList) == 1 and parallel == 'yes':
        print 'parallel is disabled for one input file.'
        parallel = 'no'

    ################## Subset Setting ###########
    try:
        atr['X_FIRST']
        print 'geo coordinate'
    except:
        print 'radar coordinate'
    ## Read Subset Inputs
    try:
        templateFile
        template = readfile.read_template(templateFile)
    except: pass

    try:
        refFile
        atr_ref = readfile.read_attributes(refFile)
        box_ref = geo_box(atr_ref)
        lat_ref = [box_ref[3],box_ref[1]]
        lon_ref = [box_ref[0],box_ref[2]]
    except: pass

    try:
        sub_lat
        sub_lon
    except:
        try:
            sub_lat = lat_ref
            sub_lon = lon_ref
        except:
            try:
                sub = template['pysar.subset.lalo'].split(',')
                sub_lat = [float(i) for i in sub[0].split(':')];  sub_lat.sort()
                sub_lon = [float(i) for i in sub[1].split(':')];  sub_lon.sort()
            except: pass; #print 'No pysar.subset.lalo option found in template file!'

    try:
        sub_y
        sub_x
    except:
        try:
            sub = template['pysar.subset.yx'].split(',')
            sub_y = [int(i) for i in sub[0].split(':')];  sub_y.sort()
            sub_x = [int(i) for i in sub[1].split(':')];  sub_x.sort()
        except: pass; #print 'No pysar.subset.yx option found in template file!'

    ## Check Subset Inputs Existed or not
    try:     sub_y
    except:
        try: sub_x
        except:
            try: sub_lat
            except:
                try: sub_lon
                except: print 'ERROR: no subset is setted.'; Usage(); sys.exit(1)

    ##### Subset range radar to geo
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    print 'input file length: '+str(length)
    print 'input file width : '+str(width)

    try: sub_y = coord_geo2radar(sub_lat,atr,'latitude')
    except:
        try:    sub_y
        except: sub_y = [0,length]
    try: sub_x = coord_geo2radar(sub_lon,atr,'longitude')
    except:
        try:    sub_x
        except: sub_x = [0,width]

    ##### Check subset range
    try:
        out_fill
    except:
        sub_y,sub_x = check_subset_range(sub_y,sub_x,atr)
        out_fill = np.nan
        if sub_y[1]-sub_y[0] == length and sub_x[1]-sub_x[0] == width:
            print 'Input subset range == data size, no need to subset.'
            sys.exit(0)

    ################### Subset #######################
    if parallel == 'no':
        for file in fileList:
            print '-------------------------------------------'
            print 'subseting  : '+file
            try:    subset_file(file,sub_x,sub_y,out_fill,outName)
            except: subset_file(file,sub_x,sub_y,out_fill)

    else:
        print '-------------------------'
        print 'parallel subseting ...'
        print '-------------------------'
        from joblib import Parallel, delayed
        import multiprocessing
        num_cores = multiprocessing.cpu_count()
        Parallel(n_jobs=num_cores)(delayed(subset_file)(file,sub_x,sub_y,out_fill) for file in fileList)

    print 'Done.'