Example #1
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage();  sys.exit(1)
    
    try:    outFile = argv[1]
    except: outFile = 'rangeDistance.h5'
    
    # Calculate look angle
    range_dis = ut.range_distance(atr, dimension=2)
    
    # Geo coord
    if 'Y_FIRST' in list(atr.keys()):
        print('Input file is geocoded, only center range distance is calculated: ')
        print(range_dis)
        return range_dis

    # Radar coord
    else:
        print('writing >>> '+outFile)
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 'm'
        writefile.write(range_dis, atr, outFile)
        return outFile
Example #2
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'incidenceAngle.h5'

    # Calculate look angle
    angle = ut.incidence_angle(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in list(atr.keys()):
        print(
            'Input file is geocoded, only center incident angle is calculated: '
        )
        print(angle)
        return angle

    # Radar coord
    else:
        print('writing >>> ' + outFile)
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 'degree'
        writefile.write(angle, atr, outFile)
        return outFile
Example #3
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
        epoch = argv[1]
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[2]
    except:
        outFile = 'perpBaseline.h5'

    # Calculate look angle
    pbase = ut.perp_baseline_timeseries(atr, dimension=1)

    if pbase.shape[1] == 1:
        print(pbase)
        return pbase

    k = atr['FILE_TYPE']
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])

    h5 = h5py.File(File, 'r')
    epochList = sorted(h5[k].keys())
    epoch = ptime.yyyymmdd(epoch)
    epoch_idx = epochList.index(epoch)

    pbase_y = pbase[epoch_idx, :].reshape(length, 1)
    pbase_xy = np.tile(pbase_y, (1, width))

    print('writing >>> ' + outFile)
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'm'
    writefile.write(pbase_xy, atr, outFile)
    return outFile
Example #4
0
def match_two_files(File1, File2, outName=None, manual_match=False, disp_fig=False):
    '''Match two geocoded files by estimating their offset.
    Better for two files with common area overlaping.
    '''
    
    # Read Input Files
    V1, atr1 = readfile.read(File1)
    V2, atr2 = readfile.read(File2)
    k = atr1['FILE_TYPE']
    print('---------------------------')
    print('matching 2 '+k+' files:\n'+File1+'\n'+File2)
    
    # Get Coverage Info 
    # Boundary Info - 2 Input Files
    West1,East1,North1,South1,width1,length1 = corners(atr1)
    West2,East2,North2,South2,width2,length2 = corners(atr2)
    # Boundary Info - Output File
    print('finding the corners of the whole area')
    West  = min(West1, West2)
    East  = max(East1, East2)
    North = max(North1,North2)
    South = min(South1,South2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width  = int(round((East - West )/lon_step + 1.0))
    length = int(round((South - North)/lat_step + 1.0))

    # Get Index of Input Files in Output Files
    lon_seq = np.arange(West, West +width *lon_step, lon_step) 
    lat_seq = np.arange(North, North+length*lat_step, lat_step)
    indx1 = nearest(West1,  lon_seq)[0]
    indy1 = nearest(North1, lat_seq)[0]
    indx2 = nearest(West2,  lon_seq)[0]
    indy2 = nearest(North2, lat_seq)[0]

    # Estimate Offset of overlaping area
    VV1 = np.zeros([length,width])
    VV2 = np.zeros([length,width])
    VV1[:,:] = np.nan
    VV2[:,:] = np.nan
    VV1[indy1:indy1+length1, indx1:indx1+width1] = V1
    VV2[indy2:indy2+length2, indx2:indx2+width2] = V2

    if not manual_match:
        VV_diff = VV2 - VV1
        offset = np.nansum(VV_diff) / np.sum(np.isfinite(VV_diff))  

    if np.isnan(offset):
        print('**************************************************')
        print('WARNING:')
        print('')
        print('No common area found between two velocity maps')
        print('At least one common pixel is required.')
        print('No matching applied. ')
        print('Continue with manual matching ...')
        print('    by selecting two line from each dataset to calculate the offset')
        print('**************************************************')
        manual_matching = True
    if manual_match:
        offset = manual_offset_estimate(V1, V2)

    # Adjust File2 value using offset
    if np.isnan(offset):
        print('**************************************************')
        print('WARNING:')
        print('')
        print('No offset is estimated and no matching applied.')
        print('Continue to merge two input files without any adjustment.')
        print('**************************************************')   
    else:
        print('Average offset between two velocity in the common area is: ' + str(offset))
        V2 = V2 - offset

    # Get merged data matrix value
    indv2 = np.isfinite(V2)
    VV = np.zeros([length,width])
    VV[:,:] = np.nan
    VV[indy1:indy1+length1, indx1:indx1+width1] = V1
    VV[indy2:indy2+length2, indx2:indx2+width2][indv2] = V2[indv2]
    
    # Write Output File
    if not outName:
        ext = os.path.splitext(File1)[1]
        outName = os.path.splitext(os.path.basename(File1))[0]+'_'+\
                  os.path.splitext(os.path.basename(File2))[0]+ext
    print('writing >>> '+outName)
    atr = atr1.copy()
    atr['WIDTH'] = width
    atr['FILE_LENGTH'] = length
    atr['X_FIRST'] = West
    atr['Y_FIRST'] = North
    writefile.write(VV, atr, outName)

    # Display
    fig_size = [16.0,16.0]
    fig = plt.figure(figsize=fig_size)
    print('plotting result ...')
    fig=plt.subplot(2,2,1);  plt.imshow(VV1);      plt.title(File1);     plt.colorbar()
    fig=plt.subplot(2,2,2);  plt.imshow(VV2);      plt.title(File2);     plt.colorbar()
    fig=plt.subplot(2,2,3);  plt.imshow(VV);       plt.title(outName);   plt.colorbar()
    fig=plt.subplot(2,2,4);  plt.imshow(VV_diff);  plt.title('Offset');  plt.colorbar()
    plt.tight_layout()
    plt.savefig(outName+'.png', bbox_inches='tight', transparent=True, dpi=150)
    print('save figure to '+outName+'.png')

    if disp_fig:
        print('showing ...')
        plt.show()

    return outName
Example #5
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print('convert DEM file to ROIPAC format')
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in list(atr_dem.keys()):
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print('*******************************************************************************')
    print('Downloading weather model data ...')

    ## Get Grib Source
    if   inps.weather_model in ['ECMWF','ERA-Interim']:   inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA'  :                   inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':                   inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR' :                   inps.grib_source = 'NARR'
    else: raise Reception('Unrecognized weather model: '+inps.weather_model)
    print('grib source: '+inps.grib_source)

    # Get weather directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print('Store weather data into directory: '+inps.weather_dir)

    # Get date list to download
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print('read date list info from: '+inps.timeseries_file)
    else:
        dateList = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print('read date list info from: '+inps.date_list_file)

    # Get Acquisition time - hour
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
    print('Time of cloest available product: '+inps.hour)

    ## Download data using PyAPS
    inps.grib_file_list = dload_grib(dateList, inps.hour, inps.weather_model, inps.weather_dir)

    if inps.download:
        print('Download completed, exit as planned.')
        return

    print('*******************************************************************************')
    print('Calcualting delay for each epoch.')

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print('incidence angle: '+str(inps.incidence_angle))
    else:
        print('calculating incidence angle ...')
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle*np.pi/180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source+'.h5'
    print('writing >>> '+tropFile)
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
    print('writing >>> '+inps.out_file)
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = dateList[0]
    print('calculating phase delay on reference date: '+ref_date)
    ref_date_grib_file = None
    for fname in inps.grib_file_list:
        if ref_date in fname:
            ref_date_grib_file = fname
    phs_ref = get_delay(ref_date_grib_file, atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        grib_file = inps.grib_file_list[i] 
        date = re.findall('\d{8}', grib_file)[0]

        # Get phase delay
        if date != ref_date:
            print('calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file)))
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print('writing to HDF5 files ...')
        data = h5timeseries['timeseries'].get(date)[:]
        dset  = group_tropCor.create_dataset(date, data=data-phs, compression='gzip')
        dset  = group_trop.create_dataset(date, data=phs, compression='gzip')

    ## Write Attributes
    for key,value in atr.items():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value
    
    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm '+inps.dem_file+' '+inps.dem_file+'.rsc '
        print(rmCmd)
        os.system(rmCmd)
    
    print('Done.')

    return
Example #6
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print('read mask file: ' + maskFile)
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print('use mask of the whole area')

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('Input file is ' + k)
    print('remove ramp type: ' + surf_type)

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print('writing >>> ' + outFile)

    if k in ['timeseries']:
        print('number of acquisitions: ' + str(len(epochList)))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(len(epochList)))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print('Removing ' + surf_type + ' from ' + k)

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print('writing >>> ' + outFile)
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print('Remove ' + surf_type + ' took ' + str(time.time() - start) +
          ' secs')
    return outFile
Example #7
0
def geocode_file_with_geo_lut(fname,
                              lut_file=None,
                              method='nearest',
                              fill_value=np.nan,
                              fname_out=None):
    '''Geocode file using ROI_PAC/Gamma lookup table file.
    Related module: scipy.interpolate.RegularGridInterpolator

    Inputs:
        fname      : string, file to be geocoded
        lut_file   : string, optional, lookup table file genereated by ROIPAC or Gamma
                     i.e. geomap_4rlks.trans           from ROI_PAC
                          sim_150911-150922.UTM_TO_RDC from Gamma
        method     : string, optional, interpolation/resampling method, supporting nearest, linear
        fill_value : value used for points outside of the interpolation domain.
                     If None, values outside the domain are extrapolated.
        fname_out  : string, optional, output geocoded filename
    Output:
        fname_out  : string, optional, output geocoded filename
    '''

    start = time.time()
    ## Default Inputs and outputs
    if not fname_out:
        fname_out = 'geo_' + fname

    # Default lookup table file:
    atr_rdr = readfile.read_attribute(fname)
    if not lut_file:
        if atr_rdr['INSAR_PROCESSOR'] == 'roipac':
            lut_file = ['geomap*lks_tight.trans', 'geomap*lks.trans']
        elif atr_rdr['INSAR_PROCESSOR'] == 'gamma':
            lut_file = ['sim*_tight.UTM_TO_RDC', 'sim*.UTM_TO_RDC']

    try:
        lut_file = ut.get_file_list(lut_file)[0]
    except:
        lut_file = None
    if not lut_file:
        sys.exit(
            'ERROR: No lookup table file found! Can not geocoded without it.')

    ## Original coordinates: row/column number in radar file
    print('------------------------------------------------------')
    print('geocoding file: ' + fname)
    len_rdr = int(atr_rdr['FILE_LENGTH'])
    wid_rdr = int(atr_rdr['WIDTH'])
    pts_rdr = (np.arange(len_rdr), np.arange(wid_rdr))

    ## New coordinates: data value in lookup table
    print('reading lookup table file: ' + lut_file)
    rg, az, atr_lut = readfile.read(lut_file)
    len_geo = int(atr_lut['FILE_LENGTH'])
    wid_geo = int(atr_lut['WIDTH'])

    # adjustment if input radar file has been subseted.
    if 'subset_x0' in list(atr_rdr.keys()):
        x0 = float(atr_rdr['subset_x0'])
        y0 = float(atr_rdr['subset_y0'])
        rg -= x0
        az -= y0
        print(
            '\tinput radar coord file has been subsetted, adjust lookup table value'
        )

    # extract pixels only available in radar file (get ride of invalid corners)
    idx = (az > 0.0) * (az <= len_rdr) * (rg > 0.0) * (rg <= wid_rdr)
    pts_geo = np.hstack((az[idx].reshape(-1, 1), rg[idx].reshape(-1, 1)))
    del az, rg

    print('geocoding using scipy.interpolate.RegularGridInterpolator ...')
    data_geo = np.empty((len_geo, wid_geo)) * fill_value
    k = atr_rdr['FILE_TYPE']
    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                dset = group.create_dataset(date,
                                            data=data_geo,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            prog_bar.close()

            print('update attributes')
            atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]
                RGI_func = RGI(pts_rdr,
                               data,
                               method,
                               bounds_error=False,
                               fill_value=fill_value)

                data_geo.fill(fill_value)
                data_geo[idx] = RGI_func(pts_geo)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_geo,
                                         compression='gzip')

                atr = geocode_attribute_with_geo_lut(h5[k][ifgram].attrs,
                                                     atr_lut,
                                                     print_msg=False)
                for key, value in atr.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])
        h5.close()
        h5out.close()

    ##### Single Dataset File
    else:
        print('reading ' + fname)
        data = readfile.read(fname)[0]
        RGI_func = RGI(pts_rdr,
                       data,
                       method,
                       bounds_error=False,
                       fill_value=fill_value)

        data_geo.fill(fill_value)
        data_geo[idx] = RGI_func(pts_geo)

        print('update attributes')
        atr = geocode_attribute_with_geo_lut(atr_rdr, atr_lut)

        print('writing >>> ' + fname_out)
        writefile.write(data_geo, atr, fname_out)

    del data_geo
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print('Time used: %02d hours %02d mins %02d secs' % (h, m, s))
    return fname_out
Example #8
0
def mask_file(File, maskFile, outFile=None, inps_dict=None):
    ''' Mask input File with maskFile
    Inputs:
        File/maskFile - string, 
        inps_dict - dictionary including the following options:
                    subset_x/y - list of 2 ints, subset in x/y direction
                    thr - float, threshold/minValue to generate mask
    Output:
        outFile - string
    '''

    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('masking ' + k + ' file: ' + File + ' ...')

    # Read maskFile
    atrm = readfile.read_attribute(maskFile)
    km = atrm['FILE_TYPE']
    if km not in multi_group_hdf5_file + multi_dataset_hdf5_file:
        print('reading mask file: ' + maskFile)
        mask = readfile.read(maskFile)[0]
        if inps_dict:
            mask = update_mask(mask, inps_dict)

    if not outFile:
        outFile = os.path.splitext(File)[0] + '_masked' + os.path.splitext(
            File)[1]

    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())

        h5out = h5py.File(outFile, 'w')
        print('writing >>> ' + outFile)

    ##### Multiple Dataset File
    if k == 'timeseries':
        print('number of acquisitions: ' + str(len(epochList)))
        group = h5out.create_group(k)
        for d in epochList:
            print(d)
            unw = h5file[k].get(d)[:]

            unw = mask_matrix(unw, mask)

            dset = group.create_dataset(d, data=unw, compression='gzip')
        for key, value in atr.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(len(epochList)))
        gg = h5out.create_group(k)

        # Mask multi group file with multi group coherence file
        if km == 'coherence':
            h5mask = h5py.File(maskFile, 'r')
            cohList = sorted(h5mask[km].keys())
            if len(cohList) != len(epochList):
                sys.exit('ERROR: cohERROR: erence mask file has different\
                number of interferograms than input file!')

        for i in range(len(epochList)):
            igram = epochList[i]
            print(igram)
            unw = h5file[k][igram].get(igram)[:]

            if km == 'coherence':
                coh = cohList[i]
                print(coh)
                mask = h5mask[km][coh].get(coh)[:]
                if not inps_dict:
                    mask = update_mask(mask, inps_dict)

            unw = mask_matrix(unw, mask)

            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=unw, compression='gzip')
            for key, value in h5file[k][igram].attrs.items():
                group.attrs[key] = value

    ##### Single Dataset File
    else:
        unw, atr = readfile.read(File)
        unw = mask_matrix(unw, mask)
        print('writing >>> ' + outFile)
        writefile.write(unw, atr, outFile)

    try:
        h5file.close()
    except:
        pass
    try:
        h5out.close()
    except:
        pass
    try:
        h5mask.close()
    except:
        pass
    return outFile
Example #9
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is '+k+' file: '+fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not fname_out:
        if   operator in ['+','plus',  'add',      'addition']:        suffix = 'plus'
        elif operator in ['-','minus', 'substract','substraction']:    suffix = 'minus'
        elif operator in ['*','times', 'multiply', 'multiplication']:  suffix = 'multiply'
        elif operator in ['/','obelus','divide',   'division']:        suffix = 'divide'
        elif operator in ['^','pow','power']:                          suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0]+'_'+suffix+str(operand)+ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file+multi_dataset_hdf5_file:
        h5 = h5py.File(fname,'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out,'w')
        group = h5out.create_group(k)
        print('writing >>> '+fname_out)

        if k == 'timeseries':
            print('number of acquisitions: '+str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date, data=data_out, compression='gzip')
                prog_bar.update(i+1, suffix=date)
            for key,value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms','wrapped','coherence']:
            print('number of interferograms: '+str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram, data=data_out, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print('writing >>> '+fname_out)
        writefile.write(data_out, atr, fname_out)

    return fname_out
Example #10
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    ## input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking ' + k + ' file ' + infile)
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    ## output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    print('writing >>> ' + outfile)

    ###############################################################################
    ## Read/Write multi-dataset files
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print('number of interferograms: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                data_mli = multilook_matrix(data, lks_y, lks_x)
                atr_mli = multilook_attribute(atr,
                                              lks_y,
                                              lks_x,
                                              print_msg=False)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch,
                                         data=data_mli,
                                         compression='gzip')
                for key, value in atr_mli.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        elif k == 'timeseries':
            print('number of acquisitions: ' + str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_mli = multilook_matrix(data, lks_y, lks_x)

                dset = group.create_dataset(epoch,
                                            data=data_mli,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=epoch)
            atr = h5[k].attrs
            atr_mli = multilook_attribute(atr, lks_y, lks_x)
            for key, value in atr_mli.items():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    ## Read/Write single-dataset files
    elif k == '.trans':
        rg, az, atr = readfile.read(infile)
        rgmli = multilook_matrix(rg, lks_y, lks_x)
        #rgmli *= 1.0/lks_x
        azmli = multilook_matrix(az, lks_y, lks_x)
        #azmli *= 1.0/lks_y
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(rgmli, azmli, atr, outfile)
    else:
        data, atr = readfile.read(infile)
        data_mli = multilook_matrix(data, lks_y, lks_x)
        atr = multilook_attribute(atr, lks_y, lks_x)
        writefile.write(data_mli, atr, outfile)

    return outfile
Example #11
0
def filter_file(fname, filter_type, filter_par=None, fname_out=None):
    '''Filter 2D matrix with selected filter
    Inputs:
        fname       : string, name/path of file to be filtered
        filter_type : string, filter type
        filter_par  : string, optional, parameter for low/high pass filter
                      for low/highpass_avg, it's kernel size in int
                      for low/highpass_gaussain, it's sigma in float
    Output:
        fname_out   : string, optional, output file name/path
    '''

    # Basic info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    try:
        ref_yx = [int(atr['ref_y']), int(atr['ref_x'])]
    except:
        ref_yx = None

    filter_type = filter_type.lower()
    MSG = 'filtering ' + k + ' file: ' + fname + ' using ' + filter_type + ' filter'
    if filter_type.endswith('avg'):
        if not filter_par:
            filter_par = 5
        MSG += ' with kernel size of %d' % int(filter_par)
    elif filter_type.endswith('gaussian'):
        if not filter_par:
            filter_par = 3.0
        MSG += ' with sigma of %.1f' % filter_par
    print(MSG)

    if not fname_out:
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0] + '_' + filter_type + ext

    ##### Multiple Dataset File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + fname_out)

        if k == 'timeseries':
            print('number of acquisitions: ' + str(epoch_num))
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                dset = group.create_dataset(date,
                                            data=data_filt,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            for key, value in atr.items():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print('number of interferograms: ' + str(epoch_num))
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_filt = filter_data(data, filter_type, filter_par)
                if ref_yx and k in ['interferograms']:
                    data_filt -= data_filt[ref_yx[0], ref_yx[1]]

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_filt,
                                         compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_filt = filter_data(data, filter_type, filter_par)
        if ref_yx and k in ['.unw', 'velocity']:
            data_filt -= data_filt[ref_yx[0], ref_yx[1]]
        print('writing >>> ' + fname_out)
        writefile.write(data_filt, atr, fname_out)

    return fname_out
Example #12
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + suffix + os.path.splitext(
                inps.timeseries_file)[1]

    # 1. template_file
    if inps.template_file:
        print('read option from template file: ' + inps.template_file)
        inps = read_template2inps(inps.template_file, inps)

    # Read Time Series
    print("loading time series: " + inps.timeseries_file)
    atr = readfile.read_attribute(inps.timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    print('number of acquisitions: ' + str(date_num))

    # Exclude date info
    #inps.ex_date = ['20070115','20100310']
    if inps.ex_date:
        inps = get_exclude_date(inps, date_list)
        if inps.ex_date:
            inps.ex_flag = np.array([i not in inps.ex_date for i in date_list])

    timeseries = np.zeros((len(date_list), length * width), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten('F')
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Perpendicular Baseline
    print('read perpendicular baseline')
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0)
        if inps.pbase.shape[1] > 1:
            print('\tconsider P_BASELINE variation in azimuth direction')
        else:
            pbase = inps.pbase
    except:
        print('\tCannot find P_BASELINE_TIMESERIES from timeseries file.')
        print('\tTrying to calculate it from interferograms file')
        if inps.ifgram_file:
            inps.pbase = np.array(
                ut.perp_baseline_ifgram2timeseries(
                    inps.ifgram_file)[0]).reshape(date_num, 1)
        else:
            message = 'No interferogram file input!\n'+\
                      'Can not correct for DEM residula without perpendicular base info!'
            raise Exception(message)

    # Temporal Baseline
    print('read temporal baseline')
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(
        date_num, 1)

    # Incidence angle (look angle in the paper)
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            print('reading incidence angle from file: ' + inps.incidence_angle)
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            try:
                inps.incidence_angle = np.array(float(inps.incidence_angle))
                print('use input incidence angle : ' +
                      str(inps.incidence_angle))
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.incidence_angle))
    else:
        print('calculate incidence angle using attributes of time series file')
        if inps.pbase.shape[1] > 1:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=2)
        else:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=1)
    inps.incidence_angle *= np.pi / 180.0

    # Range distance
    if inps.range_dis:
        if os.path.isfile(inps.range_dis):
            print('reading range distance from file: ' + inps.range_dis)
            inps.range_dis = readfile.read(inps.range_dis)[0]
        else:
            try:
                inps.range_dis = np.array(float(inps.range_dis))
                print('use input range distance : ' + str(inps.range_dis))
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.range_dis))
    else:
        print(
            'calculate range distance using attributes from time series file')
        if inps.pbase.shape[1] > 1:
            inps.range_dis = ut.range_distance(atr, dimension=2)
        else:
            inps.range_dis = ut.range_distance(atr, dimension=1)

    # Design matrix - temporal deformation model using tbase
    print('-------------------------------------------------')
    if inps.phase_velocity:
        print('using phase velocity history')
        A1 = np.ones((date_num - 1, 1))
        A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0
        A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]**
              3) / np.diff(inps.tbase, axis=0) / 6.0
        #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\
        #      inps.tbase[0:date_num-1]**2) / 6.0
    else:
        print('using phase history')
        A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
        A2 = inps.tbase**2 / 2.0
        A3 = inps.tbase**3 / 6.0

    # Polynomial order of model
    print("temporal deformation model's polynomial order = " +
          str(inps.poly_order))
    if inps.poly_order == 1: A_def = A1
    elif inps.poly_order == 2: A_def = np.hstack((A1, A2))
    elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3))

    # step function
    if inps.step_date:
        print("temporal deformation model's step function step at " +
              inps.step_date)
        step_yy = ptime.yyyymmdd2years(inps.step_date)
        yy_list = ptime.yyyymmdd2years(date_list)
        flag_array = np.array(yy_list) >= step_yy
        A_step = np.zeros((date_num, 1))
        A_step[flag_array] = 1.0
        A_def = np.hstack((A_def, A_step))

    # Heresh's original code for phase history approach
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))
    print('-------------------------------------------------')

    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    delta_z_mat = np.zeros([length, width], dtype=np.float32)
    resid_n = np.zeros([A_def.shape[0], length * width], dtype=np.float32)
    constC = np.zeros([length, width], dtype=np.float32)
    #delta_a_mat = np.zeros([length, width])
    if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2:
        print('inversing using L2-norm minimization (unweighted least squares)'\
              ' pixel by pixel: %d loops in total' % (length*width))
        prog_bar = ptime.progress_bar(maxValue=length * width,
                                      prefix='calculating: ')
        for i in range(length * width):
            row = i % length
            col = i / length
            range_dis = inps.range_dis[row, col]
            inc_angle = inps.incidence_angle[row, col]
            # Consider P_BASELINE variation within one interferogram
            if inps.pbase.shape[1] > 1:
                pbase = inps.pbase[:, row].reshape(date_num, 1)

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i] = ts_dis - np.dot(A, X)

            # Update DEM error / timeseries matrix
            delta_z = X[0]
            delta_z_mat[row, col] = delta_z
            if inps.update_timeseries:
                timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten()
            prog_bar.update(i + 1, every=length * width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1:
        print('inversing using L2-norm minimization (unweighted least squares)'\
              ' column by column: %d loops in total' % (width))
        prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ')
        for i in range(width):
            range_dis = inps.range_dis[i]
            inc_angle = inps.incidence_angle[i]

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i * length:(i + 1) * length]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag, :])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X)
            constC[:, i] = X[1].reshape((1, length))

            # Update DEM error / timeseries matrix
            delta_z = X[0].reshape((1, length))
            delta_z_mat[:, i] = delta_z
            if inps.update_timeseries:
                timeseries[:, i * length:(i + 1) * length] -= np.dot(
                    A_delta_z, delta_z)
            prog_bar.update(i + 1, every=width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0:
        print(
            'inversing using L2-norm minimization (unweighted least squares) for the whole area'
        )

        # Design matrix - DEM error using pbase, range distance and incidence angle
        A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle))
        if inps.phase_velocity:
            pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
            A_delta_z_v = pbase_v / (inps.range_dis *
                                     np.sin(inps.incidence_angle))
            A = np.hstack((A_delta_z_v, A_def))
        else:
            A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

        # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
        if inps.phase_velocity:
            timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase,
                                                               axis=0)

        if inps.ex_date:
            X = np.dot(A_inv, timeseries[inps.ex_flag, :])
        else:
            X = np.dot(A_inv, timeseries)

        # Residual vector n
        resid_n = ts_dis - np.dot(A, X)

        # Update DEM error / timeseries matrix
        delta_z_mat = X[0].reshape((1, length * width))
        if inps.update_timeseries:
            timeseries -= np.dot(A_delta_z, delta_z_mat)
        delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F')

    else:
        print(
            'ERROR: Script only support same dimension for both incidence angle and range distance matrix.'
        )
        print('dimension of incidence angle: ' +
              str(inps.incidence_angle.ndim))
        print('dimension of range distance: ' + str(inps.range_dis.ndim))
        sys.exit(1)

    ##------------------------------------------------ Output  --------------------------------------------##
    # DEM error file
    if 'Y_FIRST' in list(atr.keys()):
        dem_error_file = 'demGeo_error.h5'
    else:
        dem_error_file = 'demRadar_error.h5'
    #if inps.phase_velocity:  suffix = '_pha_poly'+str(inps.poly_order)
    #else:                    suffix = '_vel_poly'+str(inps.poly_order)
    #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1]
    print('writing >>> ' + dem_error_file)
    atr_dem_error = atr.copy()
    atr_dem_error['FILE_TYPE'] = 'dem'
    atr_dem_error['UNIT'] = 'm'
    writefile.write(delta_z_mat, atr_dem_error, dem_error_file)

    ## Phase Constant C = resid_n[0,:]
    #atrC = atr.copy()
    #atrC['FILE_TYPE'] = 'mask'
    #atrC['UNIT'] = 'm'
    #writefile.write(constC, atrC, 'constD.h5')

    ## Corrected DEM file
    #if inps.dem_file:
    #    inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1]
    #    print '--------------------------------------'
    #    print 'writing >>> '+inps.dem_outfile
    #    dem, atr_dem = readfile.read(inps.dem_file)
    #    writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile)

    #outfile = 'delta_acc.h5'
    #print 'writing >>> '+outfile
    #atr_dem_error = atr.copy()
    #atr_dem_error['FILE_TYPE'] = 'velocity'
    #atr_dem_error['UNIT'] = 'm/s'
    #writefile.write(delta_a_mat, atr_dem_error, outfile)
    #print '**************************************'

    # Corrected Time Series
    if inps.update_timeseries:
        print('writing >>> ' + inps.outfile)
        print('number of dates: ' + str(len(date_list)))
        h5out = h5py.File(inps.outfile, 'w')
        group = h5out.create_group('timeseries')
        prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ')
        for i in range(date_num):
            date = date_list[i]
            d = np.reshape(timeseries[i][:], [length, width], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        for key, value in atr.items():
            group.attrs[key] = value
        h5out.close()

    outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5'
    print('writing >>> ' + outFile)
    print('number of dates: ' + str(A_def.shape[0]))
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ')
    for i in range(A_def.shape[0]):
        date = date_list[i]
        d = np.reshape(resid_n[i][:], [length, width], order='F')
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    # Attribute
    for key, value in atr.items():
        group.attrs[key] = value
    if A_def.shape[0] == date_num:
        group.attrs['UNIT'] = 'm'
    else:
        group.attrs['UNIT'] = 'm/yr'
    h5out.close()

    return
Example #13
0
def main(argv):
    
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    print('input file(s): '+str(len(inps.file)))
    print(inps.file)
    
    #print '\n*************** Phase Ramp Removal ***********************'
    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    # Read mask file if inputed
    if inps.mask_file == 'no':  inps.mask_file = None
    if inps.mask_file:
        try:
            mask_atr = readfile.read_attribute(inps.mask_file)
        except:
            print('Can not open mask file: '+inps.mask_file)
            inps.mask_file = None

    # Update mask for multiple surfaces
    if inps.ysub:
        # Read mask
        if not inps.mask_file:
            Mask_temp = readfile.read(inps.mask_file)[0]
            Mask = np.zeros((length, width), dtype=np.float32)
            Mask[Mask_temp!=0] = 1
        else:
            Mask = np.ones((length, width))
        
        # Update mask for multiple surface from inps.ysub
        mask_multiSurface = np.zeros((length,width), dtype=np.float32)
        surfNum = len(inps.ysub)/2
        if surfNum == 1:
            mask_multiSurface = Mask
        else:
            i = 0
            mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i+1],:] = Mask[inps.ysub[2*i]:inps.ysub[2*i+1],:]
            for i in range(1,surfNum):
                if inps.ysub[2*i] < inps.ysub[2*i-1]:
                    mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i-1],:]  += Mask[inps.ysub[2*i]:inps.ysub[2*i-1],:]*(i+1)
                    mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i-1],:]  /= 2
                    mask_multiSurface[inps.ysub[2*i-1]:inps.ysub[2*i+1],:] = Mask[inps.ysub[2*i-1]:inps.ysub[2*i+1],:]*(i+1)
                else:
                    mask_multiSurface[inps.ysub[2*i]:inps.ysub[2*i+1],:]   = Mask[inps.ysub[2*i]:inps.ysub[2*i+1],:]*(i+1)
         
        # Write updated mask for multiple surfaces into file
        outFile = 'mask_'+str(surfNum)+inps.surface_type+'.h5'
        atr['FILE_TYPE'] = 'mask'
        writefile.write(mask_multiSurface, atr, outFile)
        print('saved mask to '+outFile)

    ############################## Removing Phase Ramp #######################################
    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(len(inps.file))

    if len(inps.file) == 1:
        rm.remove_surface(inps.file[0], inps.surface_type, inps.mask_file, inps.outfile, inps.ysub)

    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(rm.remove_surface)(file, inps.surface_type, inps.mask_file, ysub=inps.ysub)\
                                   for file in inps.file)

    else:
        for File in inps.file:
            print('------------------------------------------')
            rm.remove_surface(File, inps.surface_type, inps.mask_file, ysub=inps.ysub)
    
    print('Done.')
    return
Example #14
0
def main(argv):
    try:
        File = argv[0]
    except:
        usage()
        sys.exit(1)

    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    atr['PROCESSOR'] = 'roipac'

    h5file = h5py.File(File, 'r')

    if k == 'velocity':
        dset = h5file['velocity'].get('velocity')
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        print("converting velocity to a 1 year interferogram.")
        wvl = float(h5file[k].attrs['WAVELENGTH'])
        data = (-4 * pi / wvl) * data

        outname = File.split('.')[0] + '.unw'
        print('writing >>> ' + outname)
        writefile.write(data, atr, outname)

    elif k == 'timeseries':
        dateList = list(h5file['timeseries'].keys())
        ## Input
        if len(sys.argv) == 2:
            print('No input date specified >>> continue with the last date')
            dateList = list(h5file['timeseries'].keys())
            d = dateList[-1]
        elif len(sys.argv) == 3:
            d = sys.argv[2]
        elif len(sys.argv) == 4:
            ds = sorted(sys.argv[2:4])
            d_ref = ds[0]
            d = ds[1]
        else:
            usage()
            sys.exit(1)
        d = ptime.yyyymmdd(d)
        try:
            d_ref = ptime.yyyymmdd(d_ref)
        except:
            pass

        ## Data
        print('reading ' + d + ' ... ')
        data = h5file['timeseries'].get(d)[:]
        try:
            print('reading ' + d_ref + ' ... ')
            data_ref = h5file['timeseries'].get(d_ref)[:]
            data = data - data_ref
        except:
            pass
        wvl = float(atr['WAVELENGTH'])
        data *= -4 * pi / wvl

        ## outName
        try:
            master_d = d_ref
        except:
            try:
                master_d = atr['ref_date']
            except:
                master_d = dateList[0]
        if len(master_d) == 8: master_d = master_d[2:8]
        if len(d) == 8: d = d[2:8]
        outname = master_d + '_' + d + '.unw'

        ## Attributes
        atr['FILE_TYPE'] = '.unw'
        atr['P_BASELINE_TIMESERIES'] = '0.0'
        atr['UNIT'] = 'radian'
        atr['DATE'] = master_d
        atr['DATE12'] = master_d + '-' + d

        ## Writing
        print('writing >>> ' + outname)
        writefile.write(data, atr, outname)

    elif k in ['interferograms', 'coherence', 'wrapped']:
        ## Check input
        igramList = list(h5file[k].keys())
        try:
            d = sys.argv[2]
            for i in range(len(igramList)):
                if d in igramList[i]:
                    igram = igramList[i]
        except:
            igram = igramList[-1]
            print('No input date specified >>> continue with the last date')

        ## Read and Write
        print('reading ' + igram + ' ... ')
        data = h5file[k][igram].get(igram)[:]
        atr = dict(h5file[k][igram].attrs)
        atr['PROCESSOR'] = 'roipac'
        outname = igram

        print('writing >>> ' + outname)
        writefile.write(data, atr, outname)

    else:
        dset = h5file[k].get(k)
        data = dset[0:dset.shape[0], 0:dset.shape[1]]
        if k in ['temporal_coherence']:
            outname = File.split('.')[0] + '.cor'
        elif k in ['dem', '.hgt', '.dem']:
            atr['FILE_TYPE'] = '.dem'
            outname = os.path.splitext(File)[0] + '.dem'
        else:
            outname = File.split('.')[0] + '.unw'
        print('writing >>> ' + outname)
        writefile.write(data, atr, outname)

    h5file.close()
    return
Example #15
0
def subset_file(File, subset_dict_input, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print('subset ' + k + ' file: ' + File + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in list(subset_dict.keys()) and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print('data   range in y/x: ' + str(data_box))
    print('subset range in y/x: ' + str(pix_box))
    print('data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict)))
    print('subset range in lat/lon: ' + str(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            if 'tight' in list(subset_dict.keys()) and subset_dict['tight']:
                outFile = os.path.splitext(
                    File)[0] + '_tight' + os.path.splitext(File)[1]
            else:
                outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print('writing >>> ' + outFile)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print('number of acquisitions: ' + str(epochNum))
        else:
            print('number of interferograms: ' + str(epochNum))

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)
        prog_bar = ptime.progress_bar(maxValue=epochNum)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box, print_msg=False)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
Example #16
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Extract the common area of two input files
    # Basic info
    atr1 = readfile.read_attribute(inps.file[0])
    atr2 = readfile.read_attribute(inps.file[1])
    if any('X_FIRST' not in i for i in [atr1,atr2]):
        sys.exit('ERROR: Not all input files are geocoded.')

    k1 = atr1['FILE_TYPE']
    print('Input 1st file is '+k1)

    # Common AOI in lalo
    west, east, south, north = get_overlap_lalo(atr1, atr2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width  = int(round((east  - west )/lon_step))
    length = int(round((south - north)/lat_step))

    # Read data in common AOI: LOS displacement, heading angle, incident angle
    u_los = np.zeros((2, width*length))
    heading = []
    incidence = []
    for i in range(len(inps.file)):
        fname = inps.file[i]
        print('---------------------')
        print('reading '+fname)
        atr = readfile.read_attribute(fname)

        [x0,x1] = subset.coord_geo2radar([west,east], atr, 'lon')
        [y0,y1] = subset.coord_geo2radar([north,south], atr, 'lat')
        V = readfile.read(fname, (x0,y0,x1,y1))[0]
        u_los[i,:] = V.flatten(0)

        heading_angle = float(atr['HEADING'])
        if heading_angle < 0.:
            heading_angle += 360.
        print('heading angle: '+str(heading_angle))
        heading_angle *= np.pi/180.
        heading.append(heading_angle)
        
        inc_angle = float(ut.incidence_angle(atr, dimension=0))
        #print 'incidence angle: '+str(inc_angle)
        inc_angle *= np.pi/180.
        incidence.append(inc_angle)


    ##### 2. Project displacement from LOS to Horizontal and Vertical components
    # math for 3D: cos(theta)*Uz - cos(alpha)*sin(theta)*Ux + sin(alpha)*sin(theta)*Uy = Ulos
    # math for 2D: cos(theta)*Uv - sin(alpha-az)*sin(theta)*Uh = Ulos   #Uh_perp = 0.0
    # This could be easily modified to support multiple view geometry (e.g. two adjcent tracks from asc & desc) to resolve 3D

    # Design matrix
    A = np.zeros((2,2));
    for i in range(len(inps.file)):
        A[i,0] = np.cos(incidence[i])
        A[i,1] = np.sin(incidence[i]) * np.sin(heading[i]-inps.azimuth)

    A_inv = np.linalg.pinv(A)
    u_vh = np.dot(A_inv, u_los)

    u_v = np.reshape(u_vh[0,:], (length, width))
    u_h = np.reshape(u_vh[1,:], (length, width))

    ##### 3. Output
    # Attributes
    atr = atr1.copy()
    atr['WIDTH'] = str(width)
    atr['FILE_LENGTH'] = str(length)
    atr['X_FIRST'] = str(west)
    atr['Y_FIRST'] = str(north)
    atr['X_STEP'] = str(lon_step)
    atr['Y_STEP'] = str(lat_step)

    print('---------------------')
    outname = inps.outfile[0]
    print('writing   vertical component to file: '+outname)
    writefile.write(u_v, atr, outname)

    outname = inps.outfile[1]
    print('writing horizontal component to file: '+outname)
    writefile.write(u_h, atr, outname)

    print('Done.')
    return
Example #17
0
def main(argv):
    inps = cmdLineParse()

    #print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print('input ' + k + ' file: ' + inps.timeseries_file)
    if not k == 'timeseries':
        sys.exit('ERROR: input file is not timeseries!')
    h5file = h5py.File(inps.timeseries_file)

    #####################################
    ## Date Info
    dateListAll = sorted(h5file[k].keys())
    print('--------------------------------------------')
    print('Dates from input file: ' + str(len(dateListAll)))
    print(dateListAll)

    inps.ex_date = get_exclude_date(inps, dateListAll)

    dateList = sorted(list(set(dateListAll) - set(inps.ex_date)))
    print('--------------------------------------------')
    if len(dateList) == len(dateListAll):
        print('using all dates to calculate the velocity')
    else:
        print('Dates used to estimate the velocity: ' + str(len(dateList)))
        print(dateList)
    print('--------------------------------------------')

    # Date Aux Info
    dates, datevector = ptime.date_list2vector(dateList)

    #####################################
    ## Inversion
    # Design matrix
    B = np.ones([len(datevector), 2])
    B[:, 0] = datevector
    #B_inv = np.linalg.pinv(B)
    B_inv = np.dot(np.linalg.inv(np.dot(B.T, B)), B.T)
    B_inv = np.array(B_inv, np.float32)

    # Loading timeseries
    print("Loading time series file: " + inps.timeseries_file + ' ...')
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    dateNum = len(dateList)
    timeseries = np.zeros([dateNum, length * width], np.float32)
    prog_bar = ptime.progress_bar(maxValue=dateNum, prefix='loading: ')
    for i in range(dateNum):
        date = dateList[i]
        timeseries[i, :] = h5file[k].get(date)[:].flatten()
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    h5file.close()

    # Velocity Inversion
    print('Calculating velocity ...')
    X = np.dot(B_inv, timeseries)
    velocity = np.reshape(X[0, :], [length, width])

    print('Calculating rmse ...')
    timeseries_linear = np.dot(B, X)
    timeseries_residual = timeseries - timeseries_linear
    rmse = np.reshape(np.sqrt((np.sum((timeseries_residual)**2, 0)) / dateNum),
                      [length, width])

    print('Calculating the standard deviation of the estimated velocity ...')
    s1 = np.sqrt(np.sum(timeseries_residual**2, 0) / (dateNum - 2))
    s2 = np.sqrt(np.sum((datevector - np.mean(datevector))**2))
    std = np.reshape(s1 / s2, [length, width])

    # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################
    # covariance of the velocities

    #####################################
    # Output file name
    if not inps.outfile:
        inps.outfile = 'velocity.h5'

    inps.outfile_rmse = os.path.splitext(
        inps.outfile)[0] + 'Rmse' + os.path.splitext(inps.outfile)[1]
    inps.outfile_std = os.path.splitext(
        inps.outfile)[0] + 'Std' + os.path.splitext(inps.outfile)[1]
    inps.outfile_r2 = os.path.splitext(
        inps.outfile)[0] + 'R2' + os.path.splitext(inps.outfile)[1]

    # Attributes
    atr['date1'] = datevector[0]
    atr['date2'] = datevector[dateNum - 1]

    # File Writing
    print('--------------------------------------')
    atr['FILE_TYPE'] = 'velocity'
    print('writing >>> ' + inps.outfile)
    writefile.write(velocity, atr, inps.outfile)

    #atr['FILE_TYPE'] = 'rmse'
    print('writing >>> ' + inps.outfile_rmse)
    writefile.write(rmse, atr, inps.outfile_rmse)

    #atr['FILE_TYPE'] = 'rmse'
    print('writing >>> ' + inps.outfile_std)
    writefile.write(std, atr, inps.outfile_std)

    print('Done.\n')
    return inps.outfile
Example #18
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print('Reference value: ')
    print(refList)

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print('file type: ' + k)

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Input File Info
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print('\nERROR: Reference value has different epoch number'+\
                  'from input file.')
            print('Reference List epoch number: ' + str(refList))
            print('Input file     epoch number: ' + str(epochNum))
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print('writing >>> ' + outName)
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print('number of acquisitions: ' + str(epochNum))
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        atr = seed_attributes(atr, ref_x, ref_y)
        for key, value in atr.items():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print('number of interferograms: ' + str(epochNum))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr = h5file[k][epoch].attrs

            data -= refList[i]
            atr = seed_attributes(atr, ref_x, ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.items():
                gg.attrs[key] = value

            prog_bar.update(i + 1, suffix=date12_list[i])

    ##### Single Dataset File
    else:
        print('writing >>> ' + outName)
        data, atr = readfile.read(File)
        data -= refList
        atr = seed_attributes(atr, ref_x, ref_y)
        writefile.write(data, atr, outName)

    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
Example #19
0
def main(argv):
    inps = cmdLineParse()

    # Input File Info
    atr = readfile.read_attribute(inps.file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    k = atr['FILE_TYPE']
    print('Input file is '+k+': '+inps.file)

    # default output filename
    if not inps.outfile:
        if k == 'temporal_coherence':
            inps.outfile = 'maskTempCoh.h5'
        else:
            inps.outfile = 'mask.h5'
        if inps.file.startswith('geo_'):
            inps.outfile = 'geo_'+inps.outfile

    ##### Mask: Non-zero
    if inps.nonzero:
        print('generate mask for all pixels with non-zero value')
        inps.outfile = ut.nonzero_mask(inps.file, inps.outfile)
        return inps.outfile

    ##### Mask: Threshold 
    print('create initial mask with the same size as the input file and all = 1')
    mask = np.ones((length, width), dtype=np.float32)

    data, atr = readfile.read(inps.file, inps.epoch)

    # min threshold
    if inps.vmin:
        mask[data<inps.vmin] = 0
        print('all pixels with value < %s = 0' % str(inps.vmin))

    # max threshold
    if inps.vmax:
        mask[data>inps.vmax] = 0
        print('all pixels with value > %s = 0' % str(inps.vmax))

    # nan value
    mask[np.isnan(data)] = 0
    print('all pixels with nan value = 0')

    # subset in Y
    if inps.subset_y:
        y0,y1 = sorted(inps.subset_y)
        mask[0:y0,:] = 0
        mask[y1:length,:] = 0
        print('all pixels with y OUT of [%d, %d] = 0' % (y0,y1))

    # subset in x
    if inps.subset_x:
        x0,x1 = sorted(inps.subset_x)
        mask[:,0:x0] = 0
        mask[:,x1:width] = 0
        print('all pixels with x OUT of [%d, %d] = 0' % (x0,x1))
  
    ## Write mask file
    print('writing >>> '+inps.outfile)
    atr['FILE_TYPE'] = 'mask'
    writefile.write(mask, atr, inps.outfile)
    return inps.outfile
Example #20
0
def unwrap_error_correction_bridging(ifgram_file, mask_file, y_list, x_list, ramp_type='plane',\
                                     ifgram_cor_file=None, save_cor_deramp_file=False):
    '''Unwrapping error correction with bridging.
    Inputs:
        ifgram_file : string, name/path of interferogram(s) to be corrected
        mask_file   : string, name/path of mask file to mark different patches 
        y/x_list    : list of int, bonding points in y/x 
        ifgram_cor_file : string, optional, output file name
        save_cor_deramp_file : bool, optional
    Output:
        ifgram_cor_file
    Example:
        y_list = [235, 270, 350, 390]
        x_list = [880, 890, 1200, 1270]
        unwrap_error_correction_bridging('unwrapIfgram.h5', 'mask_all.h5', y_list, x_list, 'quadratic')
    '''
    ##### Mask and Ramp
    mask = readfile.read(mask_file)[0]
    ramp_mask = mask == 1
    print('estimate phase ramp during the correction')
    print('ramp type: '+ramp_type)

    ##### Bridge Info
    # Check
    for i in range(len(x_list)):
        if mask[y_list[i],x_list[i]] == 0:
            print('\nERROR: Connecting point (%d,%d) is out of masked area! Select them again!\n' % (y_list[i],x_list[i]))
            sys.exit(1)
    print('Number of bridges: '+str(len(x_list)/2))
    print('Bonding points coordinates:\nx: '+str(x_list)+'\ny: '+str(y_list))

    # Plot Connecting Pair of Points
    plot_bonding_points = False
    if plot_bonding_points:
        point_yx = ''
        line_yx  = ''
        n_bridge = len(x)/2
        for i in range(n_bridge):
            pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
            if not i == n_bridge-1:
                point_yx += pair_yx+','
                line_yx  += pair_yx+';'
            else:
                point_yx += pair_yx
                line_yx  += pair_yx

        try:
            plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                       '" --nodisplay -o bonding_points.png -f '+maskFile
            print(plot_cmd)
            os.system(plot_cmd)
        except: pass

    # Basic info
    ext = os.path.splitext(ifgram_file)[1]
    atr = readfile.read_attribute(ifgram_file)
    k = atr['FILE_TYPE']

    try:
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
    except:
        sys.exit('ERROR: Can not find ref_y/x value, input file is not referenced in space!')

    # output file name
    if not ifgram_cor_file:
        ifgram_cor_file = os.path.splitext(ifgram_file)[0]+'_unwCor'+ext
    ifgram_cor_deramp_file = os.path.splitext(ifgram_cor_file)[0]+'_'+ramp_type+ext

    ##### HDF5 file
    if ext == '.h5':
        ##### Read
        h5 = h5py.File(ifgram_file,'r')
        ifgram_list = sorted(h5[k].keys())
        ifgram_num = len(ifgram_list)

        h5out = h5py.File(ifgram_cor_file,'w')
        group = h5out.create_group(k)
        print('writing >>> '+ifgram_cor_file)

        if save_cor_deramp_file:
            h5out_deramp = h5py.File(ifgram_cor_deramp_file,'w')
            group_deramp = h5out_deramp.create_group(k)
            print('writing >>> '+ifgram_cor_deramp_file)

        ##### Loop
        print('Number of interferograms: '+str(ifgram_num))
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        date12_list = ptime.list_ifgram2date12(ifgram_list)
        for i in range(ifgram_num):
            ifgram = ifgram_list[i]
            data = h5[k][ifgram].get(ifgram)[:]
            data -= data[ref_y, ref_x]

            data_deramp, ramp = rm.remove_data_surface(data, ramp_mask, ramp_type)
            data_derampCor = bridging_data(data_deramp, mask, x_list, y_list)

            gg = group.create_group(ifgram)
            dset = gg.create_dataset(ifgram, data=data_derampCor-ramp, compression='gzip')
            for key, value in h5[k][ifgram].attrs.items():
                gg.attrs[key]=value

            if save_cor_deramp_file:
                gg_deramp = group_deramp.create_group(ifgram)
                dset = gg_deramp.create_dataset(ifgram, data=data_derampCor, compression='gzip')
                for key, value in h5[k][ifgram].attrs.items():
                    gg_deramp.attrs[key]=value
            prog_bar.update(i+1, suffix=date12_list[i])

        prog_bar.close()
        h5.close()
        h5out.close()
        try: h5out_deramp.close()
        except: pass

    #### .unw file
    elif ext == '.unw':
        print('read '+ifgram_file)
        data = readfile.read(ifgram_file)[0]
        data -= data[ref_y, ref_x]

        data_deramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
        data_derampCor = bridging_data(data_deramp,mask,x_list,y_list)
        data_cor = data_derampCor - ramp

        print('writing >>> '+ifgram_cor_file)
        ifgram_cor_file        = writefile.write(data_cor,       atr, ifgram_cor_file)
        if save_cor_deramp_file:
            print('writing >>> '+ifgram_cor_deramp_file)
            ifgram_cor_deramp_file = writefile.write(data_derampCor, atr, ifgram_cor_deramp_file)

    else:
        sys.exit('Un-supported file type: '+ext)

    return ifgram_cor_file, ifgram_cor_deramp_file
Example #21
0
def add_files(fname_list, fname_out=None):
    '''Generate sum of all input files
    Inputs:
        fname_list - list of string, path/name of input files to be added
        fname_out  - string, optional, path/name of output file
    Output:
        fname_out  - string, path/name of output file
    Example:
        'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    '''
    # Default output file name
    ext = os.path.splitext(fname_list[0])[1]
    if not fname_out:
        fname_out = os.path.splitext(fname_list[0])[0]
        for i in range(1, len(fname_list)):
            fname_out += '_plus_' + os.path.splitext(
                os.path.basename(fname_list[i]))[0]
        fname_out += ext

    # Basic Info
    atr = readfile.read_attribute(fname_list[0])
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    print('First input file is ' + atr['PROCESSOR'] + ' ' + k)

    ## Multi-dataset/group file
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        # File Type Check
        for i in range(1, len(fname_list)):
            ki = readfile.read_attribute(fname_list[i])['FILE_TYPE']
            if (k in multi_dataset_hdf5_file and ki in multi_dataset_hdf5_file
                    or k in multi_group_hdf5_file
                    and ki in multi_group_hdf5_file):
                pass
            else:
                print('Input files structure are not the same: ' + k +
                      ' v.s. ' + ki)
                sys.exit(1)

        print('writing >>> ' + fname_out)
        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)

        h5 = h5py.File(fname_list[0], 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in multi_dataset_hdf5_file:
        print('number of acquisitions: %d' % epoch_num)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                d = h5file[k].get(epoch)[:]
                data = add_matrix(data, d)

            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)

        for key, value in atr.items():
            group.attrs[key] = value
        h5out.close()
        h5.close()
        prog_bar.close()

    elif k in multi_group_hdf5_file:
        print('number of interferograms: %d' % epoch_num)
        date12_list = ptime.list_ifgram2date12(epoch_list)
        for i in range(epoch_num):
            epoch = epoch_list[i]
            data = np.zeros((length, width))
            for fname in fname_list:
                h5file = h5py.File(fname, 'r')
                temp_k = list(h5file.keys())[0]
                temp_epoch_list = sorted(h5file[temp_k].keys())
                d = h5file[temp_k][temp_epoch_list[i]].get(
                    temp_epoch_list[i])[:]
                data = add_matrix(data, d)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in h5[k][epoch].attrs.items():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])
        h5out.close()
        h5.close()
        prog_bar.close()

    ## Single dataset files
    else:
        data = np.zeros((length, width))
        for fname in fname_list:
            print('loading ' + fname)
            d, r = readfile.read(fname)
            data = add_matrix(data, d)

        print('writing >>> ' + fname_out)
        writefile.write(data, atr, fname_out)

    return fname_out