示例#1
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'file type: ' + k

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: ' + str(refList)
            print 'Input file     epoch number: ' + str(epochNum)
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + outName
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print 'number of acquisitions: ' + str(epochNum)
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(epochNum)
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print 'writing >>> ' + outName
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
示例#2
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)
        k = atr['FILE_TYPE']
        if 'ref_y' not in atr.keys() and inps.ref_yx:
            print 'No reference info found in input file, use input ref_yx: '+str(inps.ref_yx)
            atr['ref_y'] = inps.ref_yx[0]
            atr['ref_x'] = inps.ref_yx[1]

    ##Read Incidence angle: to map the zenith delay to the slant delay
    if os.path.isfile(inps.inc_angle):
        inps.inc_angle = readfile.read(inps.inc_angle, epoch='incidenceAngle')[0]
    else:
        inps.inc_angle = float(inps.inc_angle)
        print 'incidence angle: '+str(inps.inc_angle)
    inps.inc_angle = inps.inc_angle*np.pi/180.0

    ##Prepare DEM file in ROI_PAC format for PyAPS to read
    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file, epoch='height')
            if 'Y_FIRST' in atr.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if   inps.weather_model in ['ECMWF','ERA-Interim']:   inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA'  :                   inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':                   inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR' :                   inps.grib_source = 'NARR'
    else: raise Reception('Unrecognized weather model: '+inps.weather_model)
    print 'grib source: '+inps.grib_source

    # Get weather directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: '+inps.weather_dir

    # Get date list to download
    if not inps.date_list_file:
        print 'read date list info from: '+inps.timeseries_file
        h5 = h5py.File(inps.timeseries_file, 'r')
        if 'timeseries' in h5.keys():
            date_list = sorted(h5[k].keys())
        elif k in ['interferograms','coherence','wrapped']:
            ifgram_list = sorted(h5[k].keys())
            date12_list = ptime.list_ifgram2date12(ifgram_list)
            m_dates = [i.split('-')[0] for i in date12_list]
            s_dates = [i.split('-')[1] for i in date12_list]
            date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        else:
            raise ValueError('Un-support input file type:'+k)
        h5.close()
    else:
        date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print 'read date list info from: '+inps.date_list_file

    # Get Acquisition time - hour
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
    print 'Time of cloest available product: '+inps.hour

    ## Download data using PyAPS
    inps.grib_file_list = dload_grib(date_list, inps.hour, inps.weather_model, inps.weather_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Calculate tropo delay using pyaps
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    date_num = len(date_list)
    trop_ts = np.zeros((date_num, length, width), np.float32)
    for i in range(date_num):
        grib_file = inps.grib_file_list[i] 
        date = date_list[i]
        print 'calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file))
        trop_ts[i] = get_delay(grib_file, atr, vars(inps))

    ## Convert relative phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = date_list[0]
    print 'convert to relative phase delay with reference date: '+ref_date
    ref_idx = date_list.index(ref_date)
    trop_ts -= np.tile(trop_ts[ref_idx,:,:], (date_num, 1, 1))

    ## Write tropospheric delay to HDF5
    tropFile = inps.grib_source+'.h5'
    print 'writing >>> %s' % (tropFile)
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')
    print 'number of acquisitions: '+str(date_num)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    # Write Attributes
    for key,value in atr.iteritems():
        group_trop.attrs[key] = value
    h5trop.close()

    ## Write corrected Time series to HDF5
    if k == 'timeseries':
        if not inps.out_file:
            inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
        print 'writing >>> %s' % (inps.out_file)
        h5ts = h5py.File(inps.timeseries_file, 'r')
        h5tsCor = h5py.File(inps.out_file, 'w')    
        group_tsCor = h5tsCor.create_group('timeseries')
        print 'number of acquisitions: '+str(date_num)
        prog_bar = ptime.progress_bar(maxValue=date_num)
        for i in range(date_num):
            date = date_list[i]
            ts = h5ts['timeseries'].get(date)[:]
            group_tsCor.create_dataset(date, data=ts-trop_ts[i], compression='gzip')
            prog_bar.update(i+1, suffix=date)
        prog_bar.close()
        h5ts.close()
        # Write Attributes
        for key,value in atr.iteritems():
            group_tsCor.attrs[key] = value
        h5tsCor.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm %s %s.rsc' % (inps.dem_file, inps.dem_file)
        print rmCmd
        os.system(rmCmd)
    print 'Done.'
    return inps.out_file
示例#3
0
def subset_file(File, subset_dict_input, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print 'subset ' + k + ' file: ' + File + ' ...'

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in subset_dict.keys() and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print 'data   range in y/x: ' + str(data_box)
    print 'subset range in y/x: ' + str(pix_box)
    print 'data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict))
    print 'subset range in lat/lon: ' + str(geo_box)

    if pix_box == data_box:
        print 'Subset range == data coverage, no need to subset. Skip.'
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            if 'tight' in subset_dict.keys() and subset_dict['tight']:
                outFile = os.path.splitext(
                    File)[0] + '_tight' + os.path.splitext(File)[1]
            else:
                outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print 'writing >>> ' + outFile

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: ' + str(epochNum)
        else:
            print 'number of interferograms: ' + str(epochNum)

        ##### Open Output File
        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)
        prog_bar = ptime.progress_bar(maxValue=epochNum)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box, print_msg=False)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k in ['.trans', '.utm_to_rdc', '.UTM_TO_RDC']:
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
示例#4
0
def main(argv):
    inps = cmdLineParse()

    atr = readfile.read_attribute(inps.velocity_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # Check subset input
    if inps.subset_y:
        inps.subset_y = sorted(inps.subset_y)
        print 'subset in y/azimuth direction: ' + str(inps.subset_y)
    else:
        inps.subset_y = [0, length]

    if inps.subset_x:
        inps.subset_x = sorted(inps.subset_x)
        print 'subset in x/range direction: ' + str(inps.subset_x)
    else:
        inps.subset_x = [0, width]
    y0, y1 = inps.subset_y
    x0, x1 = inps.subset_x

    # Read velocity/rate
    velocity = readfile.read(inps.velocity_file)[0]
    print 'read velocity file: ' + inps.velocity_file

    k = 'interferograms'
    h5 = h5py.File(inps.ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    print 'number of interferograms: ' + str(ifgram_num)

    ##### Select interferograms with unwrapping error
    if inps.percentage > 0.0:
        mask = readfile.read(inps.mask_file)[0]
        print 'read mask for pixels with unwrapping error from file: ' + inps.mask_file

        unw_err_ifgram_num = int(np.rint(inps.percentage * ifgram_num))
        unw_err_ifgram_idx = random.sample(range(ifgram_num),
                                           unw_err_ifgram_num)
        unw_err_ifgram_list = [ifgram_list[i] for i in unw_err_ifgram_idx]
        unw_err_date12_list = [date12_list[i] for i in unw_err_ifgram_idx]
        print 'randomly choose the following %d interferograms with unwrapping error' % unw_err_ifgram_num
        print unw_err_date12_list

        unit_unw_err = 2.0 * np.pi * mask
    else:
        unw_err_ifgram_list = []

    ###### Generate simulated interferograms
    m_dates = ptime.yyyymmdd([i.split('-')[0] for i in date12_list])
    s_dates = ptime.yyyymmdd([i.split('-')[1] for i in date12_list])
    range2phase = -4.0 * np.pi / float(atr['WAVELENGTH'])

    print 'writing simulated interferograms file: ' + inps.outfile
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group('interferograms')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # Get temporal baseline in years
        t1 = datetime.datetime(*time.strptime(m_dates[i], "%Y%m%d")[0:5])
        t2 = datetime.datetime(*time.strptime(s_dates[i], "%Y%m%d")[0:5])
        dt = (t2 - t1)
        dt = float(dt.days) / 365.25

        # Simuated interferograms with unwrap error
        unw = velocity * dt * range2phase
        if ifgram in unw_err_ifgram_list:
            rand_int = random.sample(range(1, 10), 1)[0]
            unw += rand_int * unit_unw_err
            print ifgram + '  - add unwrapping error of %d*2*pi' % rand_int
        else:
            print ifgram

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram,
                                 data=unw[y0:y1, x0:x1],
                                 compression='gzip')

        for key, value in h5[k][ifgram].attrs.iteritems():
            gg.attrs[key] = value
        if ifgram in unw_err_ifgram_list:
            gg.attrs['unwrap_error'] = 'yes'
        else:
            gg.attrs['unwrap_error'] = 'no'
        gg.attrs['FILE_LENGTH'] = y1 - y0
        gg.attrs['WIDTH'] = x1 - x0
    h5.close()
    h5out.close()
    print 'Done.'
    return inps.outfile
示例#5
0
def ifgram_inversion(ifgramFile='unwrapIfgram.h5',
                     coherenceFile='coherence.h5',
                     meta=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        meta          - dict, including the following options:
                        weight_function
                        chunk_size - float, max number of data (ifgram_num*row_num*col_num)
                                     to read per loop; to control the memory
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    Example:
        meta = dict()
        meta['weight_function'] = 'variance'
        meta['chunk_size'] = 0.5e9
        meta['timeseriesFile'] = 'timeseries_var.h5'
        meta['tempCohFile'] = 'temporalCoherence_var.h5'
        ifgram_inversion('unwrapIfgram.h5', 'coherence.h5', meta)
    '''
    if 'tempCohFile' not in meta.keys():
        meta['tempCohFile'] = 'temporalCoherence.h5'
    total = time.time()

    if not meta:
        meta = vars(cmdLineParse())

    if meta['update_mode'] and not ut.update_file(meta['timeseriesFile'],
                                                  ifgramFile):
        return meta['timeseriesFile'], meta['tempCohFile']

    ##### Basic Info
    # length/width
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    meta['length'] = length
    meta['width'] = width

    # ifgram_list
    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    #if meta['weight_function'] in ['no','uniform']:
    #    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    meta['ifgram_list'] = ifgram_list
    ifgram_num = len(ifgram_list)

    # date12_list/date8_list/tbase_diff
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    meta['date8_list'] = date8_list
    meta['date12_list'] = date12_list

    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((-1, 1))
    meta['tbase_diff'] = tbase_diff

    print 'number of interferograms: %d' % (ifgram_num)
    print 'number of acquisitions  : %d' % (date_num)
    print 'number of columns: %d' % (width)
    print 'number of lines  : %d' % (length)

    ##### ref_y/x/value
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
        ref_value = np.zeros((ifgram_num, 1), np.float32)
        for j in range(ifgram_num):
            ifgram = ifgram_list[j]
            dset = h5ifgram['interferograms'][ifgram].get(ifgram)
            ref_value[j] = dset[ref_y, ref_x]
        meta['ref_y'] = ref_y
        meta['ref_x'] = ref_x
        meta['ref_value'] = ref_value
    except:
        if meta['skip_ref']:
            meta['ref_value'] = 0.0
            print 'Skip checking reference pixel info - This is for SIMULATION ONLY.'
        else:
            print 'ERROR: No ref_x/y found! Can not inverse interferograms without reference in space.'
            print 'run seed_data.py ' + ifgramFile + ' --mark-attribute for a quick referencing.'
            sys.exit(1)
    h5ifgram.close()

    ##### Rank of Design matrix for weighted inversion
    A, B = ut.design_matrix(ifgramFile, date12_list)
    print '-------------------------------------------------------------------------------'
    if meta['weight_function'] in ['no', 'uniform']:
        print 'ordinary least square (OLS) inversion with min-norm phase velocity'
        print '    based on Berardino et al. (2002, IEEE-TGRS)'
        if np.linalg.matrix_rank(A) < date_num - 1:
            print 'WARNING: singular design matrix! Inversion result can be biased!'
            print 'continue using its SVD solution'
    else:
        print 'weighted least square (WLS) inversion with min-norm phase, pixelwise'
        if np.linalg.matrix_rank(A) < date_num - 1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not inverse the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)
    print '-------------------------------------------------------------------------------'

    ##### Inverse time-series phase
    ##Check parallel environment
    if meta['weight_function'] in ['no', 'uniform']:
        meta['parallel'] = False
    if meta['parallel']:
        num_cores, meta['parallel'], Parallel, delayed = ut.check_parallel(
            1000, print_msg=False)

    ##Split into chunks to reduce memory usage
    r_step = meta['chunk_size'] / ifgram_num / width  #split in lines
    if meta['weight_function'] not in [
            'no', 'uniform'
    ]:  #more memory usage (coherence) for WLS
        r_step /= 2.0
        if meta['parallel']:
            r_step /= num_cores
    r_step = int(ceil_to_1(r_step))
    meta['row_step'] = r_step
    chunk_num = int((length - 1) / r_step) + 1

    if chunk_num > 1:
        print 'maximum chunk size: %.1E' % (meta['chunk_size'])
        print 'split %d lines into %d patches for processing' % (length,
                                                                 chunk_num)
        print '    with each patch up to %d lines' % (r_step)
        if meta['parallel']:
            print 'parallel processing using %d cores ...' % (min(
                [num_cores, chunk_num]))

    ##Computing the inversion
    box_list = []
    for i in range(chunk_num):
        r0 = i * r_step
        r1 = min([length, r0 + r_step])
        box = (0, r0, width, r1)
        box_list.append(box)
    box_num = len(box_list)

    if not meta['parallel']:
        timeseries = np.zeros((date_num, length, width), np.float32)
        temp_coh = np.zeros((length, width), np.float32)
        for i in range(box_num):
            if box_num > 1:
                print '\n------- Processing Patch %d out of %d --------------' % (
                    i + 1, box_num)
            box = box_list[i]
            ts, tcoh = ifgram_inversion_patch(ifgramFile, coherenceFile, meta,
                                              box)
            timeseries[:, box[1]:box[3], box[0]:box[2]] = ts
            temp_coh[box[1]:box[3], box[0]:box[2]] = tcoh

    else:
        ##Temp file list
        meta['ftemp_base'] = 'timeseries_temp_'
        temp_file_list = [
            meta['ftemp_base'] + str(i) + '.h5' for i in range(chunk_num)
        ]

        ##Computation
        Parallel(n_jobs=num_cores)(delayed(ifgram_inversion_patch)\
                                   (ifgramFile, coherenceFile, meta, box) for box in box_list)

        ##Concatenate temp files
        print 'concatenating temporary timeseries files ...'
        timeseries = np.zeros((date_num, length, width), np.float32)
        temp_coh = np.zeros((length, width), np.float32)
        rmCmd = 'rm'
        for i in range(chunk_num):
            fname = temp_file_list[i]
            box = box_list[i]
            print 'reading ' + fname
            h5temp = h5py.File(fname, 'r')
            dset = h5temp['timeseries'].get('timeseries')
            timeseries[:, box[1]:box[3], box[0]:box[2]] = dset[0:-1, :, :]
            temp_coh[box[1]:box[3], box[0]:box[2]] = dset[-1, :, :]
            h5temp.close()
            rmCmd += ' ' + fname
        print rmCmd
        os.system(rmCmd)

    ##### Calculate time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(
        ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(
        None, '[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None, '[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None, '[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    ##### Output
    ## 1. Write time-series file
    meta['timeseriesFile'] = write_timeseries_hdf5_file(timeseries, date8_list, atr,\
                                                timeseriesFile=meta['timeseriesFile'])

    ## 2. Write Temporal Coherence File
    print 'writing >>> ' + meta['tempCohFile']
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    meta['tempCohFile'] = writefile.write(temp_coh, atr, meta['tempCohFile'])

    print 'Time series inversion took ' + str(time.time() -
                                              total) + ' secs\nDone.'
    return meta['timeseriesFile'], meta['tempCohFile']
示例#6
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print 'multilooking ' + k + ' file ' + infile
    print 'number of looks in y / azimuth direction: %d' % lks_y
    print 'number of looks in x / range   direction: %d' % lks_x

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print 'writing >>> ' + outfile

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print 'number of interferograms: ' + str(len(epochList))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.iteritems():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k == '.trans':
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
示例#7
0
文件: lod.py 项目: Chenjiajun01/PySAR
def correct_lod_file(File, rangeDistFile=None, outFile=None):
    # Check Sensor Type
    print 'correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)'
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    if not rangeDistFile:
        print 'calculate range distance from input file attributes'
        width = int(atr['WIDTH'])
        length = int(atr['FILE_LENGTH'])
        range_resolution = float(atr['RANGE_PIXEL_SIZE'])
        rangeDist1D = range_resolution * np.linspace(0, width - 1, width)
        rangeDist = np.tile(rangeDist1D, (length, 1))
    else:
        print 'read range distance from file: %s' % (rangeDistFile)
        rangeDist = readfile.read(rangeDistFile, epoch='slantRangeDistance')[0]

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    rangeDist -= rangeDist[yref][xref]
    Ramp = np.array(rangeDist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())
        epochNum = len(epochList)

        print 'writing >>> %s' % (outFile)
        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        prog_bar = ptime.progress_bar(maxValue=epochNum)
        if k in ['interferograms', 'wrapped']:
            Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
            print 'number of interferograms: ' + str(epochNum)
            date12List = ptime.list_ifgram2date12(epochList)
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = dates[1] - dates[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12List[i])

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(epochNum):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)
        prog_bar.close()
        h5.close()
        h5out.close()

    elif k in ['.unw']:
        data, atr = readfile.read(File)
        Ramp *= -4 * np.pi / float(atr['WAVELENGTH'])
        dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
        dates = ptime.yyyymmdd2years(dates)
        dt = dates[1] - dates[0]
        data -= Ramp * dt
        print 'writing >>> %s' % (outFile)
        writefile.write(data, atr, outFile)
    else:
        print 'No need to correct for LOD for %s file' % (k)

    return outFile
示例#8
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print 'read mask file: ' + maskFile
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print 'use mask of the whole area'

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'Input file is ' + k
    print 'remove ramp type: ' + surf_type

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print 'writing >>> ' + outFile

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)

        if k == 'interferograms':
            mask_bk = np.zeros(Mask.shape)
            mask_bk = Mask
            print 'do not consider zero value pixel for interferograms'

        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]
            if k == 'interferograms':
                Mask = mask_bk
                Mask[data == 0.] = 0

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print 'Removing ' + surf_type + ' from ' + k

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print 'writing >>> ' + outFile
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print 'Remove ' + surf_type + ' took ' + str(time.time() - start) + ' secs'
    return outFile
示例#9
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print 'input file(s) to be modified: ' + str(inps.file)
    print 'number of interferograms: ' + str(len(date12_orig))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            reset_pairs(file)

        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print rmCmd
            os.system(rmCmd)

        return

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based, inps.start_date, inps.end_date]):
        # Display network for manually modification when there is no other modification input.
        print 'No input option found to remove interferogram'
        print 'To manually modify network, please use --manual option '
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print date12

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.trans_file:
            print 'input AOI in (lon0, lat1, lon1, lat0): ' + str(
                inps.aoi_geo_box)
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.trans_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box)

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            print 'date12 with 1) average coherence < ' + str(
                inps.min_coherence) + ' AND 2) not in MST network: '
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print 'date12 with average coherence < ' + str(inps.min_coherence)
            mst_date12_list = []

        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                print date12

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.exclude_date)
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than start-date: ' + inps.start_date
        min_date = int(ptime.yyyymmdd(inps.start_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print date12

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than end-date: ' + inps.end_date
        max_date = int(ptime.yyyymmdd(inps.end_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print date12

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'list   of interferograms to remove:'
    print date12_to_rmv

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5,
                                            atr,
                                            ifgram_list_all,
                                            print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        return

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                inps.mask_file = 'mask.h5'
                print 'writing >>> ' + inps.mask_file
                ut.nonzero_mask(Modified_File, inps.mask_file)
            elif k == 'coherence':
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'averageSpatialCoherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print '\nplot modified network and save to file.'
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            if inps.mask_file:
                plotCmd += ' --mask ' + inps.mask_file
            print plotCmd
            os.system(plotCmd)

        print 'Done.'
        return
    else:
        print 'No new interferograms to drop, skip update.'
        return
示例#10
0
def modify_file_date12_list(File,
                            date12_to_rmv,
                            mark_attribute=False,
                            outFile=None):
    '''Update multiple group hdf5 file using date12 to remove
    Inputs:
        File          - multi_group HDF5 file, i.e. unwrapIfgram.h5, coherence.h5
        date12_to_rmv - list of string indicating interferograms in YYMMDD-YYMMDD format
        mark_attribute- bool, if True, change 'drop_ifgram' attribute only; otherwise, write
                        resutl to a new file
        outFile       - string, output file name
    Output:
        outFile       - string, output file name, if mark_attribute=True, outFile = File
    '''
    k = readfile.read_attribute(File)['FILE_TYPE']
    print '----------------------------------------------------------------------------'
    print 'file: ' + File

    if mark_attribute:
        print "set drop_ifgram to 'yes' for all interferograms to remove, and 'no' for all the others."
        h5 = h5py.File(File, 'r+')
        ifgram_list = sorted(h5[k].keys())
        for ifgram in ifgram_list:
            if h5[k][ifgram].attrs['DATE12'] in date12_to_rmv:
                h5[k][ifgram].attrs['drop_ifgram'] = 'yes'
            else:
                h5[k][ifgram].attrs['drop_ifgram'] = 'no'
        h5.close()
        outFile = File

    else:
        date12_orig = pnet.get_date12_list(File)
        date12_to_write = sorted(list(set(date12_orig) - set(date12_to_rmv)))
        print 'number of interferograms in file      : ' + str(
            len(date12_orig))
        print 'number of interferograms to keep/write: ' + str(
            len(date12_to_write))
        print 'list   of interferograms to keep/write: '
        print date12_to_write
        date12Num = len(date12_to_write)

        if not outFile:
            outFile = 'Modified_' + os.path.basename(File)
        print 'writing >>> ' + outFile
        h5out = h5py.File(outFile, 'w')
        gg = h5out.create_group(k)

        h5 = h5py.File(File, 'r')
        igramList = sorted(h5[k].keys())
        date12_list = ptime.list_ifgram2date12(igramList)
        prog_bar = ptime.progress_bar(maxValue=date12Num, prefix='writing: ')
        for i in range(date12Num):
            date12 = date12_to_write[i]
            idx = date12_orig.index(date12)
            igram = igramList[idx]

            data = h5[k][igram].get(igram)[:]
            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=data, compression='gzip')
            for key, value in h5[k][igram].attrs.iteritems():
                group.attrs[key] = value
            group.attrs['drop_ifgram'] = 'no'
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        h5.close()
        h5out.close()
        print 'finished writing >>> ' + outFile

    return outFile
示例#11
0
def geocode_file_geo_lut(fname, lookup_file, fname_out, inps):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lookup_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        interp_method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_'+fname

    ##### Interpolate value on irregular radar coordinates (from lookup table file value)
    ##### with known value on regular radar coordinates (from radar file attribute)
    ## Grid/regular coordinates from row/column number in radar file
    print '------------------------------------------------------'
    print 'geocoding file: '+fname
    atr_rdr = readfile.read_attribute(fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_old = (np.arange(len_rdr), np.arange(wid_rdr))

    ## Irregular coordinates from data value in lookup table
    print 'reading lookup table file: '+lookup_file
    atr_lut = readfile.read_attribute(lookup_file)
    rg = readfile.read(lookup_file, epoch='range')[0]
    az = readfile.read(lookup_file, epoch='azimuth')[0]
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in atr_rdr.keys():
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print '\tinput radar coord file has been subsetted, adjust lookup table value'

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az>0.0)*(az<=len_rdr)*(rg>0.0)*(rg<=wid_rdr)
    pts_new = np.hstack((az[idx].reshape(-1,1), rg[idx].reshape(-1,1)))
    del az, rg

    print 'geocoding using scipy.interpolate.RegularGridInterpolator ...'
    data_geo = np.empty((len_geo, wid_geo))
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of datasets: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_geo_lut(atr_rdr, atr_lut)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                               bounds_error=False, fill_value=inps.fill_value)
                data_geo[idx] = RGI_func(pts_new)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_geo_lut(h5[k][ifgram].attrs, atr_lut, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_old, data, method=inps.interp_method,\
                       bounds_error=False, fill_value=inps.fill_value)
        data_geo[idx] = RGI_func(pts_new)

        print 'update attributes'
        atr = update_attribute_geo_lut(atr_rdr, atr_lut)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out
示例#12
0
def geocode_file_radar_lut(fname, lookup_file, fname_out=None, inps=None):
    '''Geocode file using lookup table file in radar coordinates (isce).
    Two solutions:
    1) scipy.interpolate.griddata, with a speed up solution from Jaime and Jeff (Stack Overflow)
        https://stackoverflow.com/questions/20915502/speedup-scipy-griddata-for-multiple-interpo
        lations-between-two-irregular-grids
    2) matplotlib.tri, interpolation from triangular grid to quad grid, which is much slower than 1).

    Inputs:
        fname       : string, file to be geocoded
        lookup_file : string, lookup table file, geometryRadar.h5
        fname_out   : string, optional, output geocoded filename
        inps        : namespace, object with the following items:
                      interp_method : string, interpolation/resampling method, supporting linear
                      fill_value    : value used for points outside of the interpolation domain
    Output:
        fname_out  : string, optional, output geocoded filename
    '''
    start = time.time()
    ## Default Inputs and outputs
    if not inps:
        inps = cmdLineParse()

    if inps.interp_method != 'linear':
        print 'ERROR: Supported interpolation method: linear'
        print 'Input method is '+inps.interp_method
        sys.exit(-1)

    if not fname_out:
        fname_out = 'geo_'+fname

    ## Read lookup table file
    atr_rdr = readfile.read_attribute(fname)
    length = int(atr_rdr['FILE_LENGTH'])
    width = int(atr_rdr['WIDTH'])
    print 'reading lookup table file '+lookup_file
    lat = readfile.read(lookup_file, epoch='latitude')[0]
    lon = readfile.read(lookup_file, epoch='longitude')[0]

    #####Prepare output pixel grid: lat/lon range and step
    if os.path.isfile(inps.lalo_step):
        print 'use file %s as reference for output grid lat/lon range and step' % (inps.lalo_step)
        atr_ref = readfile.read_attribute(inps.lalo_step)
        inps.lat_step = float(atr_ref['Y_STEP'])
        inps.lon_step = float(atr_ref['X_STEP'])
        inps.lat_num = int(atr_ref['FILE_LENGTH'])
        inps.lon_num = int(atr_ref['WIDTH'])
        inps.lat0 = float(atr_ref['Y_FIRST'])
        inps.lon0 = float(atr_ref['X_FIRST'])
        inps.lat1 = inps.lat0 + inps.lat_step*inps.lat_num
        inps.lon1 = inps.lon0 + inps.lon_step*inps.lon_num
    else:
        try:
            inps.lat_step = -1*abs(float(inps.lalo_step))
            inps.lon_step = abs(float(inps.lalo_step))
            inps.lat0 = np.nanmax(lat)
            inps.lat1 = np.nanmin(lat)
            inps.lon0 = np.nanmin(lon)
            inps.lon1 = np.nanmax(lon)
            inps.lat_num = int((inps.lat1-inps.lat0)/inps.lat_step)
            inps.lon_num = int((inps.lon1-inps.lon0)/inps.lon_step)
            inps.lat_step = (inps.lat1 - inps.lat0)/inps.lat_num
            inps.lon_step = (inps.lon1 - inps.lon0)/inps.lon_num
        except ValueError:
            print 'Input lat/lon step is neither a float number nor a file in geo-coord, please try again.'

    print 'output lat range: %f - %f' % (inps.lat0, inps.lat1)
    print 'output lon range: %f - %f' % (inps.lon0, inps.lon1)
    print 'output lat_step : %f' % (inps.lat_step)
    print 'output lon_step : %f' % (inps.lon_step)
    print 'input  file size in   y/x  : %d/%d' % (length, width)
    print 'output file size in lat/lon: %d/%d' % (inps.lat_num, inps.lon_num)

    grid_lat, grid_lon = np.mgrid[inps.lat0:inps.lat1:inps.lat_num*1j,\
                                  inps.lon0:inps.lon1:inps.lon_num*1j]


    ##### Interpolate value on regular geo coordinates (from lookup table file attributes, 2D ndarray)
    ##### with known value on irregular geo coordinates (from lookup table file value, tuple of ndarray of float)

    ##Solution 1 - qhull
    print 'calculate triangulation and coordinates transformation using scipy.spatial.qhull.Delaunay ...'
    pts_old = np.hstack((lat.reshape(-1,1), lon.reshape(-1,1)))
    pts_new = np.hstack((grid_lat.reshape(-1,1), grid_lon.reshape(-1,1)))
    vtx, wts = interp_weights(pts_old, pts_new)
    del pts_old, pts_new, grid_lat, grid_lon

    ##Solution 2 - matplotlib.tri
    #triang = mtri.Triangulation(lat.flatten(),lon.flatten())

    data_geo = np.empty((inps.lat_num, inps.lon_num)).flatten()
    data_geo.fill(inps.fill_value)
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+fname_out

        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: '+str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                dset = group.create_dataset(date, data=data_geo, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            prog_bar.close()

            print 'update attributes'
            atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)
            for key,value in atr.iteritems():
                group.attrs[key] = value

        elif k in multi_group_hdf5_file:
            print 'number of interferograms: '+str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_geo, compression='gzip')

                atr = update_attribute_radar_lut(h5[k][ifgram].attrs, inps, lat, lon, print_msg=False)
                for key, value in atr.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print 'reading '+fname
        data = readfile.read(fname)[0]

        ##Solution 1 - qhull
        data_geo = interpolate(data.flatten(), vtx, wts).reshape(inps.lat_num, inps.lon_num)

        ###Solution 2 - matplotlib.tri
        #interp_lin = mtri.LinearTriInterpolator(triang, data.flatten())
        #data_geo = interp_lin(grid_lat.flatten(), grid_lon.flatten())
        #interp_cubic = mtri.CubicTriInterpolator(triang, data, kind='geom')
        #data_geo = interp_cubic(grid_lat, grid_lon)

        print 'update attributes'
        atr = update_attribute_radar_lut(atr_rdr, inps, lat, lon)

        print 'writing >>> '+fname_out
        writefile.write(data_geo, atr, fname_out)

    del data_geo, vtx, wts
    s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
    print 'Time used: %02d hours %02d mins %02d secs' % (h, m, s)
    return fname_out