예제 #1
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print('Reference value: ')
    print(refList)

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('file type: ' + k)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print('\nERROR: Reference value has different epoch number'+\
                  'from input file.')
            print('Reference List epoch number: ' + str(refList))
            print('Input file     epoch number: ' + str(epochNum))
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + outName)
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print('number of acquisitions: ' + str(epochNum))
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(epochNum))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.items():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print('writing >>> ' + outName)
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
예제 #2
0
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print('estimate phase ramp during the correction')
    print('ramp type: '+ramp_type)

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i],x_list[i]] == 0:
            print('\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (y_list[i],x_list[i]))
            sys.exit(1)
    print('Number of bridges: '+str(len(x_list)/2))
    print('Bonding points coordinates:\nx: '+str(x_list)+'\ny: '+str(y_list))

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx  = ''
        n_bridge = len(x)/2
        for i in range(n_bridge):
            pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
            if not i == n_bridge-1:
                point_yx += pair_yx+','
                line_yx  += pair_yx+';'
            else:
                point_yx += pair_yx
                line_yx  += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print(plot_cmd)
            os.system(plot_cmd)
        except: pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
    except:
        sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!')

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor'+ext
    ifgram_cor_deramp_file = os.path.splitext(ifgram_cor_file)[0]+'_'+ramp_type+ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file,'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file,'w')
        group = h5out.create_group(k)
        print('writing >>> '+ifgram_cor_file)

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file,'w')
            group_deramp = h5out_deramp.create_group(k)
            print('writing >>> '+ifgram_cor_deramp_file)

        ##### Loop
        print('Number of interferograms: '+str(ifgram_num))
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram, data=data_derampCor-ramp, compression='gzip')
            for key, value in h5[k][ifgram].attrs.items():
                gg.attrs[key]=value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram, data=data_derampCor, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg_deramp.attrs[key]=value
            prog_bar.update(i+1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try: h5out_deramp.close()
        except: pass

    #### .unw file
    elif ext == '.unw':
        print('read '+ifgram_file)
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
        data_derampCor = bridging_data(data_deramp,mask,x_list,y_list)
        data_cor = data_derampCor - ramp

        print('writing >>> '+ifgram_cor_file)
        ifgram_cor_file        = writefile.write(data_cor,       atr, ifgram_cor_file)
        if save_cor_deramp_file:
            print('writing >>> '+ifgram_cor_deramp_file)
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr, ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: '+ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
예제 #3
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print('input file(s) to be modified: ' + str(inps.file))
    print('number of interferograms: ' + str(len(date12_orig)))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if inps.reset:
        print(
            '----------------------------------------------------------------------------'
        )
        for file in inps.file:
            reset_pairs(file)

        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print(rmCmd)
            os.system(rmCmd)

        return

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based, inps.start_date, inps.end_date]):
        # Display network for manually modification when there is no other modification input.
        print('No input option found to remove interferogram')
        print('To manually modify network, please use --manual option ')
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print('Unrecoganized input: ' + index)
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print(
            '----------------------------------------------------------------------------'
        )
        print('use reference pairs info from file: ' + inps.reference_file)
        print('number of interferograms in reference: ' +
              str(len(date12_to_keep)))
        print('date12 not in reference file:')
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print(date12)

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print(
            '----------------------------------------------------------------------------'
        )
        print(
            'use coherence-based network modification from coherence file: ' +
            inps.coherence_file)
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.trans_file:
            print('input AOI in (lon0, lat1, lon1, lat0): ' +
                  str(inps.aoi_geo_box))
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.trans_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print('input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box))

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print(
                'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            )
            print('date12 with 1) average coherence < ' +
                  str(inps.min_coherence) + ' AND 2) not in MST network: ')
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print('date12 with average coherence < ' + str(inps.min_coherence))
            mst_date12_list = []

        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                print(date12)

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with perpendicular spatial baseline > ' +
              str(inps.max_perp_baseline) + ' meters')
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with temporal baseline > ' +
              str(inps.max_temp_baseline) + ' days')
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print(
            '----------------------------------------------------------------------------'
        )
        print('drop date12/pair with the following index number:')
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print(str(index) + '    ' + date12)

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs including the following dates: \n' +
              str(inps.exclude_date))
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print(date12)

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with date earlier than start-date: ' +
              inps.start_date)
        min_date = int(ptime.yyyymmdd(inps.start_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print(date12)

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print(
            '----------------------------------------------------------------------------'
        )
        print('Drop pairs with date earlier than end-date: ' + inps.end_date)
        max_date = int(ptime.yyyymmdd(inps.end_date))
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                print(date12)

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print('date12 selected to remove:')
        print(date12_click)
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print(
        '----------------------------------------------------------------------------'
    )
    print('number of interferograms to remove: ' + str(len(date12_to_rmv)))
    print('list   of interferograms to remove:')
    print(date12_to_rmv)

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5,
                                            atr,
                                            ifgram_list_all,
                                            print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print(
            'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        )
        return

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print('update mask file for input ' + k + ' file based on ' +
                      Modified_File)
                inps.mask_file = 'mask.h5'
                print('writing >>> ' + inps.mask_file)
                ut.nonzero_mask(Modified_File, inps.mask_file)
            elif k == 'coherence':
                print('update average spatial coherence for input ' + k +
                      ' file based on: ' + Modified_File)
                outFile = 'averageSpatialCoherence.h5'
                print('writing >>> ' + outFile)
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print('\nplot modified network and save to file.')
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            if inps.mask_file:
                plotCmd += ' --mask ' + inps.mask_file
            print(plotCmd)
            os.system(plotCmd)

        print('Done.')
        return
    else:
        print('No new interferograms to drop, skip update.')
        return
예제 #4
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is '+k+' file: '+fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print('writing >>> '+fname_out)

        if k == 'timeseries':
            print('number of acquisitions: '+str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print('number of interferograms: '+str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(data_out, atr, fname_out)

    return fname_out
예제 #5
0
파일: subset.py 프로젝트: Ovec8hkin/PySAR
def subset_file(File, subset_dict_input, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print('subset ' + k + ' file: ' + File + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in list(subset_dict.keys()) and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print('data   range in y/x: ' + str(data_box))
    print('subset range in y/x: ' + str(pix_box))
    print('data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict)))
    print('subset range in lat/lon: ' + str(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            if 'tight' in list(subset_dict.keys()) and subset_dict['tight']:
                outFile = os.path.splitext(
                    File)[0] + '_tight' + os.path.splitext(File)[1]
            else:
                outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print('writing >>> ' + outFile)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print('number of acquisitions: ' + str(epochNum))
        else:
            print('number of interferograms: ' + str(epochNum))

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)
        prog_bar = ptime.progress_bar(maxValue=epochNum)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box, print_msg=False)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
예제 #6
0
def modify_file_date12_list(File,
                            date12_to_rmv,
                            mark_attribute=False,
                            outFile=None):
    '''Update multiple group hdf5 file using date12 to remove
    Inputs:
        File          - multi_group HDF5 file, i.e. unwrapIfgram.h5, coherence.h5
        date12_to_rmv - list of string indicating interferograms in YYMMDD-YYMMDD format
        mark_attribute- bool, if True, change 'drop_ifgram' attribute only; otherwise, write
                        resutl to a new file
        outFile       - string, output file name
    Output:
        outFile       - string, output file name, if mark_attribute=True, outFile = File
    '''
    k = readfile.read_attribute(File)['FILE_TYPE']
    print(
        '----------------------------------------------------------------------------'
    )
    print('file: ' + File)

    if mark_attribute:
        print(
            "set drop_ifgram to 'yes' for all interferograms to remove, and 'no' for all the others."
        )
        h5 = h5py.File(File, 'r+')
        ifgram_list = sorted(h5[k].keys())
        for ifgram in ifgram_list:
            if h5[k][ifgram].attrs['DATE12'] in date12_to_rmv:
                h5[k][ifgram].attrs['drop_ifgram'] = 'yes'
            else:
                h5[k][ifgram].attrs['drop_ifgram'] = 'no'
        h5.close()
        outFile = File

    else:
        date12_orig = pnet.get_date12_list(File)
        date12_to_write = sorted(list(set(date12_orig) - set(date12_to_rmv)))
        print('number of interferograms in file      : ' +
              str(len(date12_orig)))
        print('number of interferograms to keep/write: ' +
              str(len(date12_to_write)))
        print('list   of interferograms to keep/write: ')
        print(date12_to_write)
        date12Num = len(date12_to_write)

        if not outFile:
            outFile = 'Modified_' + os.path.basename(File)
        print('writing >>> ' + outFile)
        h5out = h5py.File(outFile, 'w')
        gg = h5out.create_group(k)

        h5 = h5py.File(File, 'r')
        igramList = sorted(h5[k].keys())
        date12_list = ptime.list_ifgram2date12(igramList)
        prog_bar = ptime.progress_bar(maxValue=date12Num, prefix='writing: ')
        for i in range(date12Num):
            date12 = date12_to_write[i]
            idx = date12_orig.index(date12)
            igram = igramList[idx]

            data = h5[k][igram].get(igram)[:]
            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=data, compression='gzip')
            for key, value in h5[k][igram].attrs.items():
                group.attrs[key] = value
            group.attrs['drop_ifgram'] = 'no'
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        h5.close()
        h5out.close()
        print('finished writing >>> ' + outFile)

    return outFile
예제 #7
0
def add_files(fname_list, fname_out=None):
    '''Generate sum of all input files
    Inputs:
        fname_list - list of string, path/name of input files to be added
        fname_out  - string, optional, path/name of output file
    Output:
        fname_out  - string, path/name of output file
    Example:
        'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    '''
    # Default output file name
    ext = os.path.splitext(fname_list[0])[1]
    if not fname_out:
        fname_out = os.path.splitext(fname_list[0])[0]
        for i in range(1, len(fname_list)):
            fname_out += '_plus_' + os.path.splitext(
                os.path.basename(fname_list[i]))[0]
        fname_out += ext

    # Basic Info
    atr = readfile.read_attribute(fname_list[0])
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    print('First input file is ' + atr['PROCESSOR'] + ' ' + k)

    ## Multi-dataset/group file
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        # File Type Check
        for i in range(1, len(fname_list)):
            ki = readfile.read_attribute(fname_list[i])['FILE_TYPE']
            if (k in multi_dataset_hdf5_file and ki in multi_dataset_hdf5_file
                    or k in multi_group_hdf5_file
                    and ki in multi_group_hdf5_file):
                pass
            else:
                print('Input files structure are not the same: ' + k +
                      ' v.s. ' + ki)
                sys.exit(1)

        print('writing >>> ' + fname_out)
        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)

        h5 = h5py.File(fname_list[0], 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in multi_dataset_hdf5_file:
        print('number of acquisitions: %d' % epoch_num)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                d = h5file[k].get(epoch)[:]
                data = add_matrix(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        for key, value in atr.items():
            group.attrs[key] = value
        h5out.close()
        h5.close()
        prog_bar.close()

    elif k in multi_group_hdf5_file:
        print('number of interferograms: %d' % epoch_num)
        date12_list = ptime.list_ifgram2date12(epoch_list)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                temp_k = list(h5file.keys())[0]
                temp_epoch_list = sorted(h5file[temp_k].keys())
                d = h5file[temp_k][temp_epoch_list[i]].get(
                    temp_epoch_list[i])[:]
                data = add_matrix(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])
        h5out.close()
        h5.close()
        prog_bar.close()

    ## Single dataset files
    else:
        data = np.zeros((length, width))
        for fname in fname_list:
            print('loading ' + fname)
            d, r = readfile.read(fname)
            data = add_matrix(data, d)

        print('writing >>> ' + fname_out)
        writefile.write(data, atr, fname_out)

    return fname_out
예제 #8
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print('read mask file: ' + maskFile)
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print('use mask of the whole area')

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('Input file is ' + k)
    print('remove ramp type: ' + surf_type)

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print('writing >>> ' + outFile)

    if k in ['timeseries']:
        print('number of acquisitions: ' + str(len(epochList)))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(len(epochList)))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print('Removing ' + surf_type + ' from ' + k)

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print('writing >>> ' + outFile)
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print('Remove ' + surf_type + ' took ' + str(time.time() - start) +
          ' secs')
    return outFile
예제 #9
0
def main(argv):
    inps = cmdLineParse()

    atr = readfile.read_attribute(inps.velocity_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # Check subset input
    if inps.subset_y:
        inps.subset_y = sorted(inps.subset_y)
        print('subset in y/azimuth direction: ' + str(inps.subset_y))
    else:
        inps.subset_y = [0, length]

    if inps.subset_x:
        inps.subset_x = sorted(inps.subset_x)
        print('subset in x/range direction: ' + str(inps.subset_x))
    else:
        inps.subset_x = [0, width]
    y0, y1 = inps.subset_y
    x0, x1 = inps.subset_x

    # Read velocity/rate
    velocity = readfile.read(inps.velocity_file)[0]
    print('read velocity file: ' + inps.velocity_file)

    k = 'interferograms'
    h5 = h5py.File(inps.ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    print('number of interferograms: ' + str(ifgram_num))

    ##### Select interferograms with unwrapping error
    if inps.percentage > 0.0:
        mask = readfile.read(inps.mask_file)[0]
        print('read mask for pixels with unwrapping error from file: ' +
              inps.mask_file)

        unw_err_ifgram_num = int(np.rint(inps.percentage * ifgram_num))
        unw_err_ifgram_idx = random.sample(list(range(ifgram_num)),
                                           unw_err_ifgram_num)
        unw_err_ifgram_list = [ifgram_list[i] for i in unw_err_ifgram_idx]
        unw_err_date12_list = [date12_list[i] for i in unw_err_ifgram_idx]
        print(
            'randomly choose the following %d interferograms with unwrapping error'
            % unw_err_ifgram_num)
        print(unw_err_date12_list)

        unit_unw_err = 2.0 * np.pi * mask
    else:
        unw_err_ifgram_list = []

    ###### Generate simulated interferograms
    m_dates = ptime.yyyymmdd([i.split('-')[0] for i in date12_list])
    s_dates = ptime.yyyymmdd([i.split('-')[1] for i in date12_list])
    range2phase = -4.0 * np.pi / float(atr['WAVELENGTH'])

    print('writing simulated interferograms file: ' + inps.outfile)
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group('interferograms')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # Get temporal baseline in years
        t1 = datetime.datetime(*time.strptime(m_dates[i], "%Y%m%d")[0:5])
        t2 = datetime.datetime(*time.strptime(s_dates[i], "%Y%m%d")[0:5])
        dt = (t2 - t1)
        dt = float(dt.days) / 365.25

        # Simuated interferograms with unwrap error
        unw = velocity * dt * range2phase
        if ifgram in unw_err_ifgram_list:
            rand_int = random.sample(list(range(1, 10)), 1)[0]
            unw += rand_int * unit_unw_err
            print(ifgram + '  - add unwrapping error of %d*2*pi' % rand_int)
        else:
            print(ifgram)

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram,
                                 data=unw[y0:y1, x0:x1],
                                 compression='gzip')

        for key, value in h5[k][ifgram].attrs.items():
            gg.attrs[key] = value
        if ifgram in unw_err_ifgram_list:
            gg.attrs['unwrap_error'] = 'yes'
        else:
            gg.attrs['unwrap_error'] = 'no'
        gg.attrs['FILE_LENGTH'] = y1 - y0
        gg.attrs['WIDTH'] = x1 - x0
    h5.close()
    h5out.close()
    print('Done.')
    return inps.outfile
예제 #10
0
def main(argv):

    ##### Inputs
    try:
        ifgram_file = argv[0]
        timeseries_file = argv[1]
    except:
        usage()
        sys.exit(1)

    try:
        outfile = argv[2]
    except:
        outfile = 'reconstructed_' + ifgram_file

    atr = readfile.read_attribute(timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series file
    print('loading timeseries ...')
    h5ts = h5py.File(timeseries_file, 'r')
    date_list = sorted(h5ts['timeseries'].keys())
    date_num = len(date_list)
    timeseries = np.zeros((date_num, length * width))

    print('number of acquisitions: ' + str(date_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5ts['timeseries'].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5ts.close()
    del d

    range2phase = -4 * np.pi / float(atr['WAVELENGTH'])
    timeseries = range2phase * timeseries

    #####  Estimate interferograms from timeseries
    print(
        'estimating interferograms from timeseries using design matrix from input interferograms'
    )
    A, B = ut.design_matrix(ifgram_file)
    p = -1 * np.ones([A.shape[0], 1])
    Ap = np.hstack((p, A))
    estData = np.dot(Ap, timeseries)
    del timeseries

    ##### Write interferograms file
    print('writing >>> ' + outfile)
    h5 = h5py.File(ifgram_file, 'r')
    ifgram_list = sorted(h5['interferograms'].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)

    h5out = h5py.File(outfile, 'w')
    group = h5out.create_group('interferograms')

    print('number of interferograms: ' + str(ifgram_num))
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        data = np.reshape(estData[i, :], (length, width))

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=data, compression='gzip')
        for key, value in h5['interferograms'][ifgram].attrs.items():
            gg.attrs[key] = value
        prog_bar.update(i + 1, suffix=date12_list[i])
    prog_bar.close()
    h5.close()
    h5out.close()
    print('Done.')
    return outfile
예제 #11
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking ' + k + ' file ' + infile)
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print('writing >>> ' + outfile)

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print('number of interferograms: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print('number of acquisitions: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.items():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k == '.trans':
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
예제 #12
0
def filter_file(fname, filter_type, filter_par=None, fname_out=None):
    '''Filter 2D matrix with selected filter
    Inputs:
        fname       : string, name/path of file to be filtered
        filter_type : string, filter type
        filter_par  : string, optional, parameter for low/high pass filter
                      for low/highpass_avg, it's kernel size in int
                      for low/highpass_gaussain, it's sigma in float
    Output:
        fname_out   : string, optional, output file name/path
    '''

    # Basic info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    try:
        ref_yx = [int(atr['ref_y']), int(atr['ref_x'])]
    except:
        ref_yx = None

    filter_type = filter_type.lower()
    MSG = 'filtering ' + k + ' file: ' + fname + ' using ' + filter_type + ' filter'
    if filter_type.endswith('avg'):
        if not filter_par:
            filter_par = 5
        MSG += ' with kernel size of %d' % int(filter_par)
    elif filter_type.endswith('gaussian'):
        if not filter_par:
            filter_par = 3.0
        MSG += ' with sigma of %.1f' % filter_par
    print(MSG)

    if not fname_out:
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0] + '_' + filter_type + ext

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                dset = group.create_dataset(date,
                                            data=data_filt,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx and k in ['interferograms']:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_filt,
                                         compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_filt = filter_data(data, filter_type, filter_par)
        if ref_yx and k in ['.unw', 'velocity']:
            data_filt -= data_filt[ref_yx[0], ref_yx[1]]
        print('writing >>> ' + fname_out)
        writefile.write(data_filt, atr, fname_out)

    return fname_out
예제 #13
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print('------------------------------------------------------')
    print('geocoding file: ' + fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print('reading lookup table file: ' + lut_file)
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in list(atr_rdr.keys()):
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print(
            '\tinput radar coord file has been subsetted, adjust lookup table value'
        )

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print('geocoding using scipy.interpolate.RegularGridInterpolator ...')
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print('update attributes')
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print('reading ' + fname)
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print('update attributes')
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print('writing >>> ' + fname_out)
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print('Time used: %02d hours %02d mins %02d secs' % (h, m, s))
    return fname_out