Exemplo n.º 1
0
def roipac_nonzero_mask(unwFileList, maskFile='Mask.h5'):
    '''Generate mask for non-zero amplitude pixel of ROI_PAC .unw file list.'''
    unwFileList, width, length = check_file_size(unwFileList)
    if unwFileList:
        # Initial mask value
        if os.path.isfile(maskFile):
            maskZero, atr = readfile.read(maskFile)
            print 'update existing mask file: '+maskFile
        else:
            maskZero = np.ones([int(length), int(width)])
            atr = None
            print 'create initial mask matrix'

        # Update mask from input .unw file list
        fileNum = len(unwFileList)
        for i in range(fileNum):
            file = unwFileList[i]
            amp, unw, rsc = readfile.read_float32(file)
            
            maskZero *= amp
            ut.print_progress(i+1, fileNum, prefix='loading', suffix=os.path.basename(file))
        mask = np.ones([int(length), int(width)])
        mask[maskZero==0] = 0
        
        # write mask hdf5 file
        print 'writing >>> '+maskFile
        h5 = h5py.File(maskFile,'w')
        group = h5.create_group('mask')
        dset = group.create_dataset('mask', data=mask, compression='gzip')
        # Attribute - *.unw.rsc
        for key,value in rsc.iteritems():
            group.attrs[key] = value
        # Attribute - *baseline.rsc
        d1, d2 = rsc['DATE12'].split('-')
        baseline_file = os.path.dirname(file)+'/'+d1+'_'+d2+'_baseline.rsc'
        baseline_rsc = readfile.read_roipac_rsc(baseline_file)
        for key,value in baseline_rsc.iteritems():
            group.attrs[key] = value
        # Attribute - existed file
        if atr:
            for key, value in atr.iteritems():
                group.attrs[key] = value

    return maskFile, unwFileList
Exemplo n.º 2
0
def modify_file_date12_list(File, date12_to_rmv, outFile=None):
    '''Update multiple group hdf5 file using date12 to remove/keep'''
    k = readfile.read_attribute(File)['FILE_TYPE']
    date12_orig = pnet.get_date12_list(File)
    date12_to_write = sorted(list(set(date12_orig) - set(date12_to_rmv)))
    print '----------------------------------------------------------------------------'
    print 'file: ' + File
    print 'number of interferograms in file      : ' + str(len(date12_orig))
    print 'number of interferograms to keep/write: ' + str(
        len(date12_to_write))
    print 'list   of interferograms to keep/write: '
    print date12_to_write
    date12Num = len(date12_to_write)

    if not outFile:
        outFile = 'Modified_' + os.path.basename(File)
    print 'writing >>> ' + outFile
    h5out = h5py.File(outFile, 'w')
    gg = h5out.create_group(k)

    h5 = h5py.File(File, 'r')
    igramList = sorted(h5[k].keys())
    for i in range(date12Num):
        date12 = date12_to_write[i]
        idx = date12_orig.index(date12)
        igram = igramList[idx]
        ut.print_progress(i + 1, date12Num, prefix='', suffix=igram)

        data = h5[k][igram].get(igram)[:]
        group = gg.create_group(igram)
        dset = group.create_dataset(igram, data=data, compression='gzip')
        for key, value in h5[k][igram].attrs.iteritems():
            group.attrs[key] = value
    h5.close()
    h5out.close()
    print 'finished writing >>> ' + outFile

    return outFile
Exemplo n.º 3
0
def subset_file(File, subset_dict, outFile=None):
    '''Subset file with
    Inputs:
        File        : str, path/name of file
        outFile     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
    Outputs:
        outFile :  str, path/name of output file; 
                   outFile = 'subset_'+File, if File is in current directory;
                   outFile = File, if File is not in the current directory.
    '''

    # Input File Info
    try:
        atr_dict = readfile.read_attribute(File)
    except:
        return None
    width = int(atr_dict['WIDTH'])
    length = int(atr_dict['FILE_LENGTH'])
    k = atr_dict['FILE_TYPE']
    print 'subset ' + k + ' file: ' + File + ' ...'

    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr_dict)

    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    try:
        subset_dict['fill_value']
        if subset_dict['fill_value']:
            outfill = True
    except:
        outfill = False
    if not outfill:
        pix_box = check_box_within_data_coverage(pix_box, atr_dict)
        subset_dict['fill_value'] = np.nan

    geo_box = box_pixel2geo(pix_box, atr_dict)
    data_box = (0, 0, width, length)
    print 'data   range in y/x: ' + str(data_box)
    print 'subset range in y/x: ' + str(pix_box)
    print 'data   range in lat/lon: ' + str(box_pixel2geo(data_box, atr_dict))
    print 'subset range in lat/lon: ' + str(geo_box)

    if pix_box == data_box:
        print 'Subset range == data coverage, no need to subset. Skip.'
        return File

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not outFile:
        if os.getcwd() == os.path.dirname(os.path.abspath(File)):
            outFile = 'subset_' + os.path.basename(File)
        else:
            outFile = os.path.basename(File)
    print 'writing >>> ' + outFile

    ##### Multiple Dataset File
    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        ##### Open Input File
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epochNum = len(epochList)
        if k in multi_dataset_hdf5_file:
            print 'number of acquisitions: ' + str(epochNum)
        else:
            print 'number of interferograms: ' + str(epochNum)

        ##### Open Output File
        h5out = h5py.File(outFile)
        group = h5out.create_group(k)

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            ut.print_progress(i + 1, epochNum, prefix='', suffix=epoch)

            dset = h5file[k].get(epoch)
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr_dict = subset_attribute(atr_dict, pix_box)
        for key, value in atr_dict.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        for i in range(epochNum):
            epoch = epochList[i]
            ut.print_progress(i + 1, epochNum, prefix='', suffix=epoch)

            dset = h5file[k][epoch].get(epoch)
            atr_dict = h5file[k][epoch].attrs
            data_overlap = dset[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]

            data = np.ones(
                (pix_box[3] - pix_box[1],
                 pix_box[2] - pix_box[0])) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

            atr_dict = subset_attribute(atr_dict, pix_box)
            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr_dict.iteritems():
                gg.attrs[key] = value

    ##### Single Dataset File
    elif k in ['.jpeg', '.jpg', '.png', '.ras', '.bmp']:
        data, atr_dict = readfile.read(File, pix_box)
        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    elif k == '.trans':
        rg_overlap, az_overlap, atr_dict = readfile.read(File, pix_box4data)

        rg = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        rg[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = rg_overlap

        az = np.ones((pix_box[3] - pix_box[1],
                      pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        az[pix_box4subset[1]:pix_box4subset[3],
           pix_box4subset[0]:pix_box4subset[2]] = az_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(rg, az, atr_dict, outFile)
    else:
        data_overlap, atr_dict = readfile.read(File, pix_box4data)

        data = np.ones((pix_box[3] - pix_box[1],
                        pix_box[2] - pix_box[0])) * subset_dict['fill_value']
        data[pix_box4subset[1]:pix_box4subset[3],
             pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        atr_dict = subset_attribute(atr_dict, pix_box)
        writefile.write(data, atr_dict, outFile)

    ##### End Cleaning
    try:
        h5file.close()
        h5out.close()
    except:
        pass

    return outFile
Exemplo n.º 4
0
def main(argv):
    try:
        igramsFile = argv[0]
        timeSeriesFile = argv[1]
    except:
        usage()
        sys.exit(1)

    try:
        tempCohFile = argv[2]
    except:
        tempCohFile = 'temporal_coherence.h5'

    ########################################################
    #print '\n********** Temporal Coherence ****************'
    print "load time series: " + timeSeriesFile
    atr_ts = readfile.read_attribute(timeSeriesFile)
    h5timeseries = h5py.File(timeSeriesFile)
    dateList = h5timeseries['timeseries'].keys()
    numDates = len(dateList)

    print 'number of epoch: ' + str(numDates)
    dateIndex = {}
    for ni in range(numDates):
        dateIndex[dateList[ni]] = ni

    dset = h5timeseries['timeseries'].get(h5timeseries['timeseries'].keys()[0])
    nrows, ncols = np.shape(dset)
    timeseries = np.zeros((len(h5timeseries['timeseries'].keys()),
                           np.shape(dset)[0] * np.shape(dset)[1]), np.float32)

    for i in range(numDates):
        date = dateList[i]
        dset = h5timeseries['timeseries'].get(date)
        d = dset[0:dset.shape[0], 0:dset.shape[1]]
        timeseries[dateIndex[date]][:] = d.flatten(0)
        ut.print_progress(i + 1, numDates, 'loading:', date)
    del d
    h5timeseries.close()

    lt, numpixels = np.shape(timeseries)
    range2phase = -4 * np.pi / float(atr_ts['WAVELENGTH'])
    timeseries = range2phase * timeseries

    ######################################################
    print "load interferograms: " + igramsFile
    h5igrams = h5py.File(igramsFile)
    ifgramList = h5igrams['interferograms'].keys()
    numIfgrams = len(ifgramList)
    print 'number of epochs: ' + str(numIfgrams)
    A, B = design_matrix(h5igrams)
    p = -1 * np.ones([A.shape[0], 1])
    Ap = np.hstack((p, A))

    print 'calculating temporal coherence ...'
    #data = np.zeros((numIfgrams,numpixels),np.float32)
    qq = np.zeros(numpixels) + 0j
    for ni in range(numIfgrams):
        ## read interferogram
        igram = ifgramList[ni]
        data = h5igrams['interferograms'][igram].get(igram)[:].flatten(0)

        ## calculate difference between observed and estimated data
        ## interferogram by interferogram, less memory, Yunjun - 2016.06.10
        dataEst = np.dot(Ap[ni, :], timeseries)
        dataDiff = data - dataEst
        qq += np.exp(1j * dataDiff)

        ## progress bar
        ut.print_progress(ni + 1, numIfgrams, 'calculating:', igram)
    del timeseries, data, dataEst, dataDiff
    h5igrams.close()

    #qq=np.absolute(np.sum(np.exp(1j*dataDiff),0))/numIfgrams
    qq = np.absolute(qq) / numIfgrams
    Temp_Coh = np.reshape(qq, [nrows, ncols])

    ##### write temporal coherence file ####################
    print 'writing >>> ' + tempCohFile
    h5TempCoh = h5py.File(tempCohFile, 'w')
    group = h5TempCoh.create_group('temporal_coherence')
    dset = group.create_dataset(os.path.basename('temporal_coherence'),
                                data=Temp_Coh,
                                compression='gzip')
    for key, value in atr_ts.iteritems():
        group.attrs[key] = value
    group.attrs['UNIT'] = '1'
    h5TempCoh.close()
Exemplo n.º 5
0
def main(argv):
    
    
    inps = cmdLineParse()
    #print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print 'input '+k+' file: '+inps.timeseries_file
    if not k == 'timeseries':
        sys.exit('ERROR: input file is not timeseries!') 
    h5file = h5py.File(inps.timeseries_file)
  
    #####################################
    ## Date Info
    dateListAll = sorted(h5file[k].keys())
    dateListAll = ptime.yyyymmdd(dateListAll)
    yyListAll = ptime.yyyymmdd2years(dateListAll)
    print '--------------------------------------------'
    print 'Dates from input file: '+str(len(dateListAll))
    print dateListAll
    
    # Extrac exclude dates from input arguments
    inps.datesNot2include = []
    # 1. template_file
    if inps.template_file:
        inps = update_inps_from_template(inps, inps.template_file)
    
    # 2. ex_date 
    if inps.ex_date:
        for ex_date in inps.ex_date:
            if os.path.isfile(ex_date):
                ex_date = ptime.read_date_list(ex_date)
            else:
                ex_date = [ptime.yyyymmdd(ex_date)]
            inps.datesNot2include += list(set(ex_date) - set(inps.datesNot2include))
        # delete dates not existed in input file
        inps.datesNot2include = list(set(inps.datesNot2include).intersection(dateListAll))
        print 'date excluded:'+str(inps.datesNot2include)
    
    # 3. min_date
    if inps.min_date:
        inps.min_date = ptime.yyyymmdd(inps.min_date)
        print 'minimum date: '+inps.min_date
        yy_min = ptime.yyyymmdd2years(inps.min_date)
        for i in range(len(dateListAll)):
            date = dateListAll[i]
            if yyListAll[i] < yy_min and date not in inps.datesNot2include:
                print '  remove date: '+date
                inps.datesNot2include.append(date)
    
    # 4. max_date
    if inps.max_date:
        inps.max_date = ptime.yyyymmdd(inps.max_date)
        print 'minimum date: '+inps.max_date
        yy_max = ptime.yyyymmdd2years(inps.max_date)
        for i in range(len(dateListAll)):
            date = dateListAll[i]
            if yyListAll[i] > yy_max and date not in inps.datesNot2include:
                print '  remove date: '+date
                inps.datesNot2include.append(date)
    
    # Summary
    dateList = sorted(list(set(dateListAll) - set(inps.datesNot2include)))
    print '--------------------------------------------'
    if len(dateList) == len(dateListAll):
        print 'using all dates to calculate the velocity'
    else:
        print 'Dates used to estimate the velocity: '+str(len(dateList))
        print dateList
    print '--------------------------------------------'

    # Date Aux Info
    dates, datevector = ptime.date_list2vector(dateList)

    #####################################
    ## Inversion
    # Design matrix
    B=np.ones([len(datevector),2])
    B[:,0]=datevector
    #B1 = np.linalg.pinv(B)
    B1 = np.dot(np.linalg.inv(np.dot(B.T,B)),B.T)
    B1 = np.array(B1,np.float32)
    
    # Loading timeseries
    print "Loading time series file: "+inps.timeseries_file+' ...'
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    dateNum = len(dateList)
    timeseries = np.zeros([dateNum,length*width],np.float32)
    start_time = time.time()
    for i in range(dateNum):
        date = dateList[i]
        ut.print_progress(i+1, dateNum, prefix='loading:', suffix=date, elapsed_time=time.time()-start_time)
        timeseries[i,:] = h5file[k].get(date)[:].flatten()
    h5file.close()

    # Velocity Inversion
    print 'Calculating velocity ...'
    x = np.dot(B1,timeseries)
    velocity = np.reshape(x[0,:],[length,width])
    
    print 'Calculating rmse ...'
    timeseries_linear = np.dot(B,x)
    rmse = np.reshape(np.sqrt((np.sum((timeseries_linear-timeseries)**2,0))/dateNum),[length,width])
    
    print 'Calculating the standard deviation of the estimated velocity ...'
    residual = timeseries_linear - timeseries
    s1 = np.sqrt(np.sum(residual**2,0)/(dateNum-2))
    s2 = np.sqrt(np.sum((datevector-np.mean(datevector))**2))
    std = np.reshape(s1/s2,[length,width])
     
    # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################  
    # covariance of the velocities

    #####################################
    # Output file name
    if not inps.outfile:
        inps.outfile = 'velocity.h5'
        if inps.datesNot2include:
            inps.outfile = os.path.splitext(inps.outfile)[0]+'_ex'+os.path.splitext(inps.outfile)[1]

    inps.outfile_rmse = 'rmse_'+inps.outfile
    inps.outfile_std = 'std_'+inps.outfile
    inps.outfile_r2 = 'R2_'+inps.outfile
    
    # Attributes
    atr['date1'] = datevector[0]
    atr['date2'] = datevector[dateNum-1]
    
    # File Writing
    print '--------------------------------------'
    atr['FILE_TYPE'] = 'velocity'
    print 'writing >>> '+inps.outfile
    writefile.write(velocity, atr, inps.outfile)
    
    atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> '+inps.outfile_rmse
    writefile.write(rmse, atr, inps.outfile_rmse)
    
    atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> '+inps.outfile_std
    writefile.write(std, atr, inps.outfile_std)
    
    print 'Done.'
    return inps.outfile
Exemplo n.º 6
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'file type: '+k

    ##### Multiple Dataset File
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = sorted(h5file[k].keys())
        epochNum  = len(epochList)
        print 'number of epochs: '+str(epochNum)
        
        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: '+str(refList)
            print 'Input file     epoch number: '+str(epochNum)
            sys.exit(1)
  
        ##### Output File Info
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+outName

    ## Loop
    if k == 'timeseries':
        for i in range(epochNum):
            epoch = epochList[i]
            print epoch
            data = h5file[k].get(epoch)[:]
            
            data -= refList[i]
  
            dset = group.create_dataset(epoch, data=data, compression='gzip')

        atr  = seed_attributes(atr,ref_x,ref_y)
        for key,value in atr.iteritems():   group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr  = h5file[k][epoch].attrs

            data -= refList[i]
            atr  = seed_attributes(atr,ref_x,ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():    gg.attrs[key] = value

            ut.print_progress(i+1,epochNum,'seeding:',epoch)
  
    ##### Single Dataset File
    else:
        print 'writing >>> '+outName
        data,atr = readfile.read(File)
        data -= refList
        atr  = seed_attributes(atr,ref_x,ref_y)
        writefile.write(data,atr,outName)
  
    ##### End & Cleaning
    try:
        h5file.close()
        h5out.close()
    except: pass

    return outName
Exemplo n.º 7
0
def main(argv):

    ## Default value
    phase_velocity = 'no'  # 'no' means use 'phase history'
    update_timeseries = 'yes'

    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(
                argv, 'h:f:F:o:v:', ['phase-velocity', 'no-timeseries-update'])
        except getopt.GetoptError:
            print 'Error in reading input options!'
            usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ['-h', '--help']:
                usage()
                sys.exit()
            elif opt == '-f':
                timeSeriesFile = arg
            elif opt == '-F':
                igramsFile = arg
            elif opt == '-o':
                outname = arg
            elif opt == '--phase-velocity':
                phase_velocity = 'yes'
            elif opt == '--no-timeseries-update':
                update_timeseries = 'no'

    elif len(sys.argv) == 2:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit(1)
        else:
            timeSeriesFile = argv[0]
    else:
        usage()
        sys.exit(1)

    try:
        outname
    except:
        outname = timeSeriesFile.replace('.h5', '') + '_demCor.h5'

    ##### Read Time Series
    #print '\n*************** Topographic Error Correction ****************'
    print "Loading time series: " + timeSeriesFile
    atr = readfile.read_attribute(timeSeriesFile)
    h5timeseries = h5py.File(timeSeriesFile)
    dateList = sorted(h5timeseries['timeseries'].keys())
    lt = len(dateList)
    print 'number of epochs: ' + str(lt)

    dateIndex = {}
    for ni in range(len(dateList)):
        dateIndex[dateList[ni]] = ni

    nrows = int(atr['FILE_LENGTH'])
    ncols = int(atr['WIDTH'])
    timeseries = np.zeros((len(dateList), nrows * ncols), np.float32)
    for i in range(lt):
        date = dateList[i]
        ut.print_progress(i + 1, lt, prefix='loading:', suffix=date)
        d = h5timeseries['timeseries'].get(date)[:]
        timeseries[dateIndex[date]][:] = d.flatten('F')
    del d
    h5timeseries.close()
    print '**************************************'

    ##### Temporal Baseline
    print 'read temporal baseline'
    tbase, date_dict = ptime.date_list2tbase(dateList)
    tbase = np.array(tbase).reshape(lt, 1)

    ##### Perpendicular Baseline
    try:
        Bp = [float(i) for i in atr['P_BASELINE_TIMESERIES'].split()]
        Bp = np.array(Bp).reshape(lt, 1)
    except:
        print 'Cannot find P_BASELINE_TIMESERIES from timeseries file.'
        print 'Trying to calculate it from interferograms file'
        try:
            Bp = ut.Baseline_timeseries(igramsFile)
            Bp = np.array(Bp).reshape(lt, 1)
        except:
            print 'Error in calculating baseline time series!'
            sys.exit(1)
    Bp_v = (Bp[1:lt] - Bp[0:lt - 1]) / (tbase[1:lt] - tbase[0:lt - 1])

    ##### Cubic Temporal Deformation Model
    ## Formula (10) in (Fattahi and Amelung, 2013, TGRS)
    if phase_velocity == 'yes':
        print 'using phase velocity history'
        M1 = np.ones((lt - 1, 1))
        M2 = (tbase[1:lt] + tbase[0:lt - 1]) / 2
        M3 = (tbase[1:lt]**2 + tbase[1:lt] * tbase[0:lt - 1] +
              tbase[0:lt - 1]**2) / 6
        M = np.hstack((M1, M2, M3))
    else:
        print 'using phase history'
        M = np.hstack((.5 * tbase**2, tbase, np.ones((lt, 1))))

    ## Testing
    #teta = (tetaN+tetaF)/2
    #r = (rN+rF)/2
    #teta=19.658799999999999*np.pi/180
    #r=846848.2
    #Bperp=1000*np.random.random((lt,1))

    ##### Range and Look Angle
    near_range = float(atr['STARTING_RANGE1'])
    dR = float(atr['RANGE_PIXEL_SIZE'])
    r = float(atr['EARTH_RADIUS'])
    H = float(atr['HEIGHT'])
    far_range = near_range + dR * (ncols - 1)
    incidence_n = np.pi - np.arccos(
        (r**2 + near_range**2 - (r + H)**2) / (2 * r * near_range))
    incidence_f = np.pi - np.arccos(
        (r**2 + far_range**2 - (r + H)**2) / (2 * r * far_range))

    various_range = 'yes'
    if various_range == 'yes':
        range_x = np.linspace(near_range,
                              far_range,
                              num=ncols,
                              endpoint='FALSE')
        look_angle_x = np.linspace(incidence_n,
                                   incidence_f,
                                   num=ncols,
                                   endpoint='FALSE')
    else:
        print 'using center range and look angle to represent the whole area'
        center_range = (near_range + far_range) / 2
        center_look_angle = np.pi - np.arccos(
            (r**2 + center_range**2 - (r + H)**2) / (2 * r * center_range))
        range_x = np.tile(center_range, ncols)
        look_angle_x = np.tile(center_look_angle, ncols)
    #C1_v = Bp_v / (center_range * np.sin(center_look_angle))
    #C1   = Bp   / (center_range * np.sin(center_look_angle))
    #timeseries_v = (timeseries[1:lt,:] - timeseries[0:lt-1,:]) / (tbase[1:lt] - tbase[0:lt-1])

    ##### Inversion column by column
    print 'inversing using L2-norm minimization (unweighted least squares)...'
    dz = np.zeros([1, nrows * ncols])

    for i in range(ncols):
        ## Design Matrix Inversion
        C1_v = Bp_v / (range_x[i] * np.sin(look_angle_x[i]))
        C1 = Bp / (range_x[i] * np.sin(look_angle_x[i]))
        if phase_velocity == 'yes': C = np.hstack((M, C1_v))
        else: C = np.hstack((M, C1))

        #print '    rank of the design matrix : '+str(np.linalg.matrix_rank(C))
        #if np.linalg.matrix_rank(C) == 4:  print '    design matrix has full rank'
        Cinv = np.linalg.pinv(C)

        ## (Phase) Velocity History
        ts_x = timeseries[:, i * nrows:(i + 1) * nrows]
        ts_xv = (ts_x[1:lt, :] - ts_x[0:lt - 1, :]) / (tbase[1:lt] -
                                                       tbase[0:lt - 1])

        ## DEM error
        if phase_velocity == 'yes': par = np.dot(Cinv, ts_xv)
        else: par = np.dot(Cinv, ts_x)
        dz_x = par[3].reshape((1, nrows))

        ## Update DEM error matrix and timeseries matrix
        dz[0][i * nrows:(i + 1) * nrows] = dz_x
        timeseries[:, i * nrows:(i + 1) * nrows] -= np.dot(C1, dz_x)

        ut.print_progress(i + 1, ncols)

    #dz[0][:] = par[3][:]
    dz = np.reshape(dz, [nrows, ncols], order='F')

    ########## Output - DEM error #######################
    #print '**************************************'
    #print 'writing DEM_error.hgt'
    #writefile.write_float32(dz,'DEM_error.hgt')
    #f = open('DEM_error.hgt.rsc','w')
    #f.write('FILE_LENGTH       '+str(int(nrows))+'\n')
    #f.write('WIDTH             '+str(int(ncols))+'\n')
    #print '**************************************'

    h5fileDEM = 'DEM_error.h5'
    print 'writing >>> ' + h5fileDEM
    h5rmse = h5py.File(h5fileDEM, 'w')
    group = h5rmse.create_group('dem')
    dset = group.create_dataset(os.path.basename('dem'),
                                data=dz,
                                compression='gzip')
    for key, value in atr.iteritems():
        group.attrs[key] = value
    group.attrs['UNIT'] = 'm'
    print '**************************************'

    ########### Output - Corrected Time Series ##########
    if update_timeseries == 'yes':
        print 'writing >>> ' + outname
        print 'number of dates: ' + str(len(dateList))

        h5timeseriesDEMcor = h5py.File(outname, 'w')
        group = h5timeseriesDEMcor.create_group('timeseries')
        for i in range(lt):
            date = dateList[i]
            ut.print_progress(i + 1, lt, prefix='writing:', suffix=date)
            d = np.reshape(timeseries[i][:], [nrows, ncols], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
        #for date in dateList:
        #    print date
        #    if not date in h5timeseriesDEMcor['timeseries']:
        #        d = np.reshape(timeseries[dateIndex[date]][:],[nrows,ncols],order='F')
        #        dset = group.create_dataset(date, data=d, compression='gzip')
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5timeseriesDEMcor.close()