コード例 #1
0
ファイル: variance.py プロジェクト: ymcmrs/MintPy
def structure_function(data,
                       lat,
                       lon,
                       step=5e3,
                       min_pair_num=100e3,
                       print_msg=True):
    num_sample = len(data)
    distance = np.zeros((num_sample**2))
    variance = np.zeros((num_sample**2))
    if print_msg:
        prog_bar = ptime.progress_bar(maxValue=num_sample)
    for i in range(num_sample):
        distance[i * num_sample:(i + 1) * num_sample] = get_distance(
            lat, lon, i)
        variance[i * num_sample:(i + 1) * num_sample] = np.square(data -
                                                                  data[i])
        if print_msg:
            prog_bar.update(i + 1, every=10)
    if print_msg:
        prog_bar.close()

    bin_dist, bin_struct_func, bin_struct_func_std = bin_variance(
        distance,
        variance,
        step=step,
        min_pair_num=min_pair_num,
        print_msg=print_msg)
    return bin_dist, bin_struct_func, bin_struct_func_std
コード例 #2
0
ファイル: variance.py プロジェクト: ymcmrs/MintPy
def bin_variance(distance,
                 variance,
                 step=5e3,
                 min_pair_num=100e3,
                 print_msg=True):
    x_steps = np.arange(0, np.max(distance), step)
    num_step = len(x_steps)
    var = np.zeros(x_steps.shape)
    var_std = np.zeros(var.shape)
    p_num = np.zeros(x_steps.shape)

    if print_msg:
        prog_bar = ptime.progress_bar(maxValue=num_step)
    for i in range(num_step):
        x = x_steps[i]
        idx = (distance > max(0, x - step / 2.)) * (distance < x + step / 2.)
        p_num[i] = np.sum(idx)
        var[i] = np.mean(variance[idx])
        var_std[i] = np.std(variance[idx])
        if print_msg:
            prog_bar.update(i + 1, every=10)
    if print_msg:
        prog_bar.close()

    max_step_idx = int(max(np.argwhere(p_num > min_pair_num)))
    return x_steps[0:max_step_idx], var[0:max_step_idx], var_std[
        0:max_step_idx]
コード例 #3
0
ファイル: variance.py プロジェクト: hfattahi/PySAR
def structure_function(data, lat, lon, step=5e3, min_pair_num=100e3, print_msg=True):    
    num_sample = len(data)
    distance = np.zeros((num_sample**2))
    variance = np.zeros((num_sample**2))
    if print_msg:
        prog_bar = ptime.progress_bar(maxValue=num_sample)
    for i in range(num_sample):
        distance[i*num_sample:(i+1)*num_sample] = get_distance(lat, lon, i)
        variance[i*num_sample:(i+1)*num_sample] = np.square(data - data[i])
        if print_msg:
            prog_bar.update(i+1, every=10)
    if print_msg:
        prog_bar.close()

    bin_dist, bin_struct_func, bin_struct_func_std = bin_variance(distance, variance,
                                                                  step=step,
                                                                  min_pair_num=min_pair_num,
                                                                  print_msg=print_msg)
    return bin_dist, bin_struct_func, bin_struct_func_std
コード例 #4
0
ファイル: variance.py プロジェクト: hfattahi/PySAR
def bin_variance(distance, variance, step=5e3, min_pair_num=100e3, print_msg=True):
    x_steps = np.arange(0,np.max(distance),step)
    num_step = len(x_steps)
    var = np.zeros(x_steps.shape)
    var_std = np.zeros(var.shape)
    p_num = np.zeros(x_steps.shape)
    
    if print_msg:
        prog_bar = ptime.progress_bar(maxValue=num_step)
    for i in range(num_step):
        x = x_steps[i]
        idx = (distance > max(0, x-step/2.)) * (distance < x+step/2.)
        p_num[i] = np.sum(idx)
        var[i] = np.mean(variance[idx])
        var_std[i] = np.std(variance[idx])
        if print_msg:
            prog_bar.update(i+1, every=10)
    if print_msg:
        prog_bar.close()
    
    max_step_idx = int(max(np.argwhere(p_num > min_pair_num)))
    return x_steps[0:max_step_idx], var[0:max_step_idx], var_std[0:max_step_idx]
コード例 #5
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)
        k = atr['FILE_TYPE']
        if 'ref_y' not in list(atr.keys()) and inps.ref_yx:
            print('No reference info found in input file, use input ref_yx: ' +
                  str(inps.ref_yx))
            atr['ref_y'] = inps.ref_yx[0]
            atr['ref_x'] = inps.ref_yx[1]

    #****reading incidence angle file***/
    if os.path.isfile(inps.inc_angle):
        inps.inc_angle = readfile.read(inps.inc_angle,
                                       datasetName='incidenceAngle')[0]
        inps.inc_angle = np.nan_to_num(inps.inc_angle)
    else:
        inps.inps.inc_angle = float(inps.inc_angle)
        print('incidence angle: ' + str(inps.inc_angle))
    cinc = np.cos(inps.inc_angle * np.pi / 180.0)

    #****look up file****/
    if inps.lookup_file:
        inps.lookup_file = ut.get_file_list(
            [inps.lookup_file])[0]  #'geomap_32rlks_tight.trans'

    #****GACOS****/
    delay_source = 'GACOS'
    # Get weather directory
    if not inps.GACOS_dir:
        if inps.timeseries_file:
            inps.GACOS_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER/GACOS'
        elif inps.lookup_file:
            inps.GACOS_dir = os.path.dirname(os.path.abspath(
                inps.lookup_file)) + '/../WEATHER/GACOS'
        else:
            inps.GACOS_dir = os.path.abspath(os.getcwd())

    print('Store weather data into directory: ' + inps.GACOS_dir)

    #source_dir=os.path.dirname(os.path.abspath('timeseries_file'))+'/Agung/GACOS/data';print source_dir
    #os.makedirs(GACOS_dir)  -----------------------------------------------add part to copy/download weather data------#
    #----get date list-----#
    if not inps.date_list_file:
        print('read date list info from: ' + inps.timeseries_file)
        h5 = h5py.File(inps.timeseries_file, 'r')
        if 'timeseries' in list(h5.keys()):
            date_list = sorted(h5[k].keys())
        elif k in ['interferograms', 'coherence', 'wrapped']:
            ifgram_list = sorted(h5[k].keys())
            date12_list = pnet.get_date12_list(inps.timeseries_file)
            m_dates = [i.split('-')[0] for i in date12_list]
            s_dates = [i.split('-')[1] for i in date12_list]
            date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        else:
            raise ValueError('Un-support input file type:' + k)
        h5.close()
    else:
        date_list = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print('read date list info from: ' + inps.date_list_file)

    #****cheacking availability of delays****/
    print('checking availability of delays')
    delay_file_list = []
    for d in date_list:
        if delay_source == 'GACOS':
            delay_file = inps.GACOS_dir + '/' + d + '.ztd'
        delay_file_list.append(delay_file)
    delay_file_existed = ut.get_file_list(delay_file_list)

    if len(delay_file_existed) == len(date_list):
        print('no missing files')
    else:
        print('no. of date files found:', len(delay_file_existed))
        print('no. of dates:', len(date_list))

    #*****Calculating delays***/
    print('calculating delays')

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    #initialise delay files
    date_num = len(date_list)
    trop_ts = np.zeros((date_num, length, width), np.float32)

    #reading wrf files for each epoch and getting delay
    for i in range(date_num):
        delay_file = delay_file_existed[i]
        date = date_list[i]
        print('calculating delay for date', date)
        trop_ts[i] = get_delay(delay_file, atr, inps.lookup_file, cinc)

    print('Delays Calculated')
    # Convert relative phase delay on reference date
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    print('convert to relative phase delay with reference date: ' + ref_date)
    ref_idx = date_list.index(ref_date)
    trop_ts -= np.tile(trop_ts[ref_idx, :, :], (date_num, 1, 1))

    ## Write tropospheric delay to HDF5
    tropFile = 'GACOSdelays' + '.h5'
    print('writing >>> %s' % (tropFile))
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')
    print('number of acquisitions: ' + str(date_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    # Write Attributes
    for key, value in atr.items():
        group_trop.attrs[key] = value
    h5trop.close()

    ## Write corrected Time series to HDF5
    if k == 'timeseries':
        if not inps.out_file:
            inps.out_file = os.path.splitext(
                inps.timeseries_file)[0] + '_' + 'GACOS' + '.h5'
        print('writing trop corrected timeseries file %s' % (inps.out_file))
        h5ts = h5py.File(inps.timeseries_file, 'r')
        h5tsCor = h5py.File(inps.out_file, 'w')
        group_tsCor = h5tsCor.create_group('timeseries')
        print('number of acquisitions: ' + str(date_num))
        prog_bar = ptime.progress_bar(maxValue=date_num)
        for i in range(date_num):
            date = date_list[i]
            print(date)
            ts = h5ts['timeseries'].get(date)[:]
            group_tsCor.create_dataset(date,
                                       data=ts - trop_ts[i],
                                       compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        h5ts.close()
        # Write Attributes
        for key, value in atr.items():
            group_tsCor.attrs[key] = value
        h5tsCor.close()
        print('delays written to %s' % (inps.out_file))

    print('finished')
    return inps.out_file
コード例 #6
0
ファイル: tropo_gacos.py プロジェクト: hfattahi/PySAR
def main(argv):
    inps = cmdLineParse()
    
    if inps.timeseries_file:
        inps.timeseries_file=ut.get_file_list([inps.timeseries_file])[0]
        atr=readfile.read_attribute(inps.timeseries_file)
        k = atr['FILE_TYPE']
        if 'ref_y' not in list(atr.keys()) and inps.ref_yx:
            print('No reference info found in input file, use input ref_yx: '+str(inps.ref_yx))
            atr['ref_y'] = inps.ref_yx[0]
            atr['ref_x'] = inps.ref_yx[1]

    #****reading incidence angle file***/
    if os.path.isfile(inps.inc_angle):
        inps.inc_angle=readfile.read(inps.inc_angle, datasetName='incidenceAngle')[0]
        inps.inc_angle=np.nan_to_num(inps.inc_angle)
    else:
        inps.inps.inc_angle = float(inps.inc_angle)
        print('incidence angle: '+str(inps.inc_angle))
    cinc=np.cos(inps.inc_angle*np.pi/180.0);

    #****look up file****/
    if inps.lookup_file:
        inps.lookup_file = ut.get_file_list([inps.lookup_file])[0] #'geomap_32rlks_tight.trans'

    #****GACOS****/
    delay_source = 'GACOS'
    # Get weather directory
    if not inps.GACOS_dir:
        if inps.timeseries_file:
            inps.GACOS_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER/GACOS'
        elif inps.lookup_file:
            inps.GACOS_dir = os.path.dirname(os.path.abspath(inps.lookup_file))+'/../WEATHER/GACOS'
        else:
            inps.GACOS_dir = os.path.abspath(os.getcwd())
    
    print('Store weather data into directory: '+inps.GACOS_dir)
        
    #source_dir=os.path.dirname(os.path.abspath('timeseries_file'))+'/Agung/GACOS/data';print source_dir
    #os.makedirs(GACOS_dir)  -----------------------------------------------add part to copy/download weather data------#
    #----get date list-----#
    if not inps.date_list_file:
        print('read date list info from: '+inps.timeseries_file)
        h5=h5py.File(inps.timeseries_file,'r')
        if 'timeseries' in list(h5.keys()):
            date_list=sorted(h5[k].keys())
        elif k in ['interferograms','coherence','wrapped']:
            ifgram_list = sorted(h5[k].keys())
            date12_list = pnet.get_date12_list(inps.timeseries_file)
            m_dates = [i.split('-')[0] for i in date12_list]
            s_dates = [i.split('-')[1] for i in date12_list]
            date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        else:
            raise ValueError('Un-support input file type:'+k)
        h5.close()
    else:
        date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print('read date list info from: '+inps.date_list_file)

    #****cheacking availability of delays****/
    print('checking availability of delays')
    delay_file_list=[]
    for d in date_list:
        if   delay_source == 'GACOS':  delay_file = inps.GACOS_dir+'/'+d+'.ztd';
        delay_file_list.append(delay_file)
    delay_file_existed = ut.get_file_list(delay_file_list)

    if len(delay_file_existed)==len(date_list):
        print('no missing files')
    else:
        print('no. of date files found:', len(delay_file_existed));
        print('no. of dates:', len(date_list))

    #*****Calculating delays***/
    print('calculating delays')

    length=int(atr['FILE_LENGTH'])
    width=int(atr['WIDTH'])
    #initialise delay files
    date_num=len(date_list)
    trop_ts=np.zeros((date_num, length, width), np.float32)

    #reading wrf files for each epoch and getting delay
    for i in range(date_num):
        delay_file=delay_file_existed[i]
        date=date_list[i]
        print('calculating delay for date',date)
        trop_ts[i] =get_delay(delay_file,atr,inps.lookup_file,cinc)  


    print('Delays Calculated')
    # Convert relative phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = date_list[0]
    print('convert to relative phase delay with reference date: '+ref_date)
    ref_idx = date_list.index(ref_date)
    trop_ts -= np.tile(trop_ts[ref_idx,:,:], (date_num, 1, 1))

    ## Write tropospheric delay to HDF5
    tropFile = 'GACOSdelays'+'.h5'
    print('writing >>> %s' % (tropFile))
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')
    print('number of acquisitions: '+str(date_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()

    # Write Attributes
    for key,value in atr.items():
        group_trop.attrs[key] = value
    h5trop.close()

    ## Write corrected Time series to HDF5
    if k == 'timeseries':
        if not inps.out_file:
            inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+'GACOS'+'.h5'
        print('writing trop corrected timeseries file %s' % (inps.out_file))
        h5ts = h5py.File(inps.timeseries_file, 'r')
        h5tsCor = h5py.File(inps.out_file, 'w')
        group_tsCor = h5tsCor.create_group('timeseries')
        print('number of acquisitions: '+str(date_num))
        prog_bar = ptime.progress_bar(maxValue=date_num)
        for i in range(date_num):
            date = date_list[i];print(date)
            ts = h5ts['timeseries'].get(date)[:]
            group_tsCor.create_dataset(date, data=ts-trop_ts[i], compression='gzip')
            prog_bar.update(i+1, suffix=date)
        prog_bar.close()
        h5ts.close()
        # Write Attributes
        for key,value in atr.items():
            group_tsCor.attrs[key] = value
        h5tsCor.close()
        print('delays written to %s' % (inps.out_file))

    print('finished')
    return inps.out_file