Esempio n. 1
0
def run_or_skip(iono_file, grib_files, dis_file, geom_file):
    print('update mode: ON')
    print('output file: {}'.format(iono_file))
    flag = 'skip'

    # check existance and modification time
    if ut.run_or_skip(out_file=iono_file, in_file=grib_files, print_msg=False) == 'run':
        flag = 'run'
        print('1) output file either do NOT exist or is NOT newer than all IONEX files.')

    else:
        print('1) output file exists and is newer than all IONEX files.')

        # check dataset size in space / time
        ds_size_dis = get_dataset_size(dis_file)
        ds_size_ion = get_dataset_size(geom_file)
        date_list_dis = timeseries(dis_file).get_date_list()
        date_list_ion = timeseries(iono_file).get_date_list()
        if ds_size_ion != ds_size_dis or any (x not in date_list_ion for x in date_list_dis):
            flag = 'run'
            print(f'2) output file does NOT have the same len/wid as the geometry file {geom_file} or does NOT contain all dates')
        else:
            print('2) output file has the same len/wid as the geometry file and contains all dates')

            # check if output file is fully written
            with h5py.File(iono_file, 'r') as f:
                if np.all(f['timeseries'][-1,:,:] == 0):
                    flag = 'run'
                    print('3) output file is NOT fully written.')
                else:
                    print('3) output file is fully written.')

    # result
    print('run or skip: {}'.format(flag))
    return flag
def read_date_info(inps):
    """Read dates used in the estimation and its related info.
    Parameters: inps - Namespace
    Returns:    inps - Namespace
    """
    if inps.key == 'timeseries':
        tsobj = timeseries(inps.timeseries_file)
    elif inps.key == 'giantTimeseries':
        tsobj = giantTimeseries(inps.timeseries_file)
    elif inps.key == 'HDFEOS':
        tsobj = HDFEOS(inps.timeseries_file)
    tsobj.open()
    inps.excludeDate = read_exclude_date(inps, tsobj.dateList)

    # exclude dates without obs data [for offset time-series only for now]
    if os.path.basename(inps.timeseries_file).startswith('timeseriesRg'):
        date_list = timeseries(inps.timeseries_file).get_date_list()
        data, atr = readfile.read(inps.timeseries_file)
        flag = np.nansum(data, axis=(1, 2)) == 0
        flag[date_list.index(atr['REF_DATE'])] = 0
        if np.sum(flag) > 0:
            print('number of empty dates to exclude: {}'.format(np.sum(flag)))
            inps.excludeDate += np.array(date_list)[flag].tolist()
            inps.excludeDate = sorted(list(set(inps.excludeDate)))

    # Date used for estimation inps.dateList
    inps.dateList = [i for i in tsobj.dateList if i not in inps.excludeDate]
    inps.numDate = len(inps.dateList)
    inps.startDate = inps.dateList[0]
    inps.endDate = inps.dateList[-1]
    print('-' * 50)
    print('dates from input file: {}\n{}'.format(tsobj.numDate,
                                                 tsobj.dateList))
    print('-' * 50)
    if len(inps.dateList) == len(tsobj.dateList):
        print('using all dates to calculate the velocity')
    else:
        print('dates used to estimate the velocity: {}\n{}'.format(
            inps.numDate, inps.dateList))
    print('-' * 50)

    # flag array for ts data reading
    inps.dropDate = np.array(
        [i not in inps.excludeDate for i in tsobj.dateList], dtype=np.bool_)

    # output file name
    if not inps.outfile:
        fbase = os.path.splitext(os.path.basename(inps.timeseries_file))[0]
        outname = 'velocity'
        if inps.key == 'giantTimeseries':
            prefix = os.path.basename(inps.timeseries_file).split('PARAMS')[0]
            outname = prefix + outname
        elif fbase in ['timeseriesRg', 'timeseriesAz']:
            suffix = fbase.split('timeseries')[-1]
            outname = outname + suffix
        outname += '.h5'
        inps.outfile = outname

    return inps
Esempio n. 3
0
def correct_local_oscilator_drift(fname, rg_dist_file=None, out_file=None):
    print('-'*50)
    print('correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)')
    print('-'*50)
    atr = readfile.read_attribute(fname)

    # Check Sensor Type
    platform = atr['PLATFORM']
    print('platform: '+platform)
    if not platform.lower() in ['env', 'envisat']:
        print('No need to correct LOD for '+platform)
        return

    # output file name
    if not out_file:
        out_file = '{}_LODcor{}'.format(os.path.splitext(fname)[0], os.path.splitext(fname)[1])

    # Get LOD ramp rate from empirical model
    if not rg_dist_file:
        print('calculate range distance from file metadata')
        rg_dist = get_relative_range_distance(atr)
    else:
        print('read range distance from file: %s' % (rg_dist_file))
        rg_dist = readfile.read(rg_dist_file, datasetName='slantRangeDistance', print_msg=False)[0]
        rg_dist -= rg_dist[int(atr['REF_Y']), int(atr['REF_X'])]
    ramp_rate = np.array(rg_dist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input fname
    range2phase = -4*np.pi / float(atr['WAVELENGTH'])
    k = atr['FILE_TYPE']
    if k == 'timeseries':
        # read
        obj = timeseries(fname)
        obj.open()
        data = obj.read()

        # correct LOD
        diff_year = np.array(obj.yearList)
        diff_year -= diff_year[obj.refIndex]
        for i in range(data.shape[0]):
            data[i, :, :] -= ramp_rate * diff_year[i]

        # write
        obj_out = timeseries(out_file)
        obj_out.write2hdf5(data, refFile=fname)

    elif k in ['.unw']:
        data, atr = readfile.read(fname)

        dates = ptime.yyyymmdd2years(ptime.yyyymmdd(atr['DATE12'].split('-')))
        dt = dates[1] - dates[0]
        data -= ramp_rate * range2phase * dt

        writefile.write(data, out_file=out_file, metadata=atr)
    else:
        print('No need to correct for LOD for %s file' % (k))
    return out_file
Esempio n. 4
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read timeseries data
    obj = timeseries(inps.timeseries_file)
    obj.open()
    ts_data = obj.read()
    inps.date_list = list(obj.dateList)

    # read topographic data (DEM)
    dem = read_topographic_data(inps.geom_file, obj.metadata)

    # estimate phase/elevation ratio parameters
    X = estimate_phase_elevation_ratio(dem, ts_data, inps)

    # correct trop delay in timeseries
    trop_data = estimate_tropospheric_delay(dem, X, obj.metadata)
    mask = ts_data == 0.
    ts_data -= trop_data
    ts_data[mask] = 0.

    # write time-series file
    metadata = dict(obj.metadata)
    metadata['mintpy.troposphericDelay.polyOrder'] = str(inps.poly_order)
    if not inps.outfile:
        inps.outfile = '{}_tropHgt.h5'.format(os.path.splitext(inps.timeseries_file)[0])
    writefile.write(ts_data, out_file=inps.outfile, metadata=metadata, ref_file=inps.timeseries_file)
    return inps.outfile
Esempio n. 5
0
def read_timeseries_yx(timeseries_file, y, x, ref_yx=None):
    '''Read time-series displacement on point (y,x) from timeseries_file
    Inputs:
        timeseries_file : string, name/path of timeseries hdf5 file
        y/x : int, row/column number of point of interest
    Output:
        dis_ts : list of float, displacement time-series of point of interest
    '''
    atr = readfile.read_attribute(timeseries_file)
    k = atr['FILE_TYPE']
    dis_ts = []

    if k in ['GIANT_TS']:
        h5 = h5py.File(timeseries_file, 'r')
        date_list = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()]
        dname = [i for i in ['rawts','recons'] if i in list(h5.keys())][0]
        dis_ts = h5[dname][:,y,x]
        if ref_yx is not None:
            dis_ts = h5[dname][:,ref_yx[0],ref_yx[1]]
        h5.close()
    else:
        box = [x, y, x+1, y+1]
        dis_ts = timeseries(timeseries_file).read(box=box, print_msg=False)
        #date_list = list(h5[k].keys())
        #for date in date_list:
        #    dis = h5[k].get(date)[y,x]
        #    if inps.ref_yx:
        #        dis -= h5[k].get(date)[ref_yx[0], ref_yx[1]]
        #    dis_ts.append(dis)
        #dis_ts = np.array(dis_ts)

    return dis_ts
Esempio n. 6
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # --update option
    if inps.update_mode and run_or_skip(inps) == 'skip':
        return inps.outfile

    start_time = time.time()
    inps = read_geometry(inps)

    ## model setup info
    msg = '-'*80
    msg += '\ncorrect topographic phase residual (DEM error) (Fattahi & Amelung, 2013, IEEE-TGRS)'
    msg += '\nordinal least squares (OLS) inversion with L2-norm minimization on: phase'
    if inps.phaseVelocity:
        msg += ' velocity'
    if inps.rangeDist.size != 1:
        msg += ' (pixel-wisely)'
    msg += "\ntemporal deformation model: polynomial order = {}".format(inps.polyOrder)
    if inps.stepFuncDate:
        msg += "\ntemporal deformation model: step functions at {}".format(inps.stepFuncDate)
    msg += '-'*80
    print(msg)

    A_def = get_design_matrix4time_func(date_list=timeseries(inps.timeseries_file).get_date_list(),
                                        poly_order=inps.polyOrder,
                                        step_func_dates=inps.stepFuncDate)

    inps = correct_dem_error(inps, A_def)

    m, s = divmod(time.time()-start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
    return inps.outfile
Esempio n. 7
0
    def open(self):
        atr = readfile.read_attribute(self.insar_file)
        k = atr['FILE_TYPE']
        if k == 'timeseries':
            ts_obj = timeseries(self.insar_file)
        elif k == 'giantTimeseries':
            ts_obj = giantTimeseries(self.insar_file)
        else:
            raise ValueError('Un-supported time-series file: {}'.format(k))
        ts_obj.open(print_msg=False)
        self.metadata = dict(ts_obj.metadata)
        self.num_date = ts_obj.numDate
        # remove time info from insar_datetime to be consistent with gps_datetime
        self.insar_datetime = np.array([i.replace(hour=0, minute=0, second=0, microsecond=0)
                                        for i in ts_obj.times])

        # default start/end
        if self.start_date is None:
            self.start_date = (ts_obj.times[0] - relativedelta(months=1)).strftime('%Y%m%d')
        if self.end_date is None:
            self.end_date = (ts_obj.times[-1] + relativedelta(months=1)).strftime('%Y%m%d')

        # default min_ref_date
        if self.min_ref_date is None:
            self.min_ref_date = ts_obj.times[5].strftime('%Y%m%d')
        elif self.min_ref_date not in ts_obj.dateList:
            raise ValueError('input min_ref_date {} does not exist in insar file {}'.format(
                self.min_ref_date, self.insar_file))

        self.read_gps()
        self.read_insar()
        self.calculate_rmse()
        return
Esempio n. 8
0
    def run_or_skip(grib_files, tropo_file, geom_file):
        print('update mode: ON')
        print('output file: {}'.format(tropo_file))
        flag = 'skip'

        # check existance and modification time
        if ut.run_or_skip(out_file=tropo_file, in_file=grib_files, print_msg=False) == 'run':
            flag = 'run'
            print('1) output file either do NOT exist or is NOT newer than all GRIB files.')

        else:
            print('1) output file exists and is newer than all GRIB files.')

            # check dataset size in space / time
            date_list = [str(re.findall('\d{8}', os.path.basename(i))[0]) for i in grib_files]
            if (get_dataset_size(tropo_file) != get_dataset_size(geom_file)
                    or any(i not in timeseries(tropo_file).get_date_list() for i in date_list)):
                flag = 'run'
                print('2) output file does NOT have the same len/wid as the geometry file {} or does NOT contain all dates'.format(geom_file))
            else:
                print('2) output file has the same len/wid as the geometry file and contains all dates')

                # check if output file is fully written
                with h5py.File(tropo_file, 'r') as f:
                    if np.all(f['timeseries'][-1,:,:] == 0):
                        flag = 'run'
                        print('3) output file is NOT fully written.')
                    else:
                        print('3) output file is fully written.')

        # result
        print('run or skip: {}'.format(flag))
        return flag
Esempio n. 9
0
def design_matrix4deformation(inps):
    # Date Info
    ts_obj = timeseries(inps.timeseries_file)
    ts_obj.open()

    # Design matrix - temporal deformation model
    print('-'*80)
    print('correct topographic phase residual (DEM error) (Fattahi & Amelung, 2013, IEEE-TGRS)')
    msg = 'ordinal least squares (OLS) inversion with L2-norm minimization on: phase'
    if inps.phaseVelocity:
        msg += ' velocity'
    if inps.rangeDist.size != 1:
        msg += ' (pixel-wisely)'
    print(msg)

    tbase = np.array(ts_obj.tbase, np.float32) / 365.25

    # 1. Polynomial - 2D matrix in size of (numDate, polyOrder+1)
    print("temporal deformation model: polynomial order = "+str(inps.polyOrder))
    A_def = np.ones((ts_obj.numDate, 1), np.float32)
    for i in range(inps.polyOrder):
        Ai = np.array(tbase**(i+1) / gamma(i+2), np.float32).reshape(-1, 1)
        A_def = np.hstack((A_def, Ai))

    # 2. Step function - 2D matrix in size of (numDate, len(inps.stepFuncDate))
    if inps.stepFuncDate:
        print("temporal deformation model: step functions at "+str(inps.stepFuncDate))
        t_steps = ptime.yyyymmdd2years(inps.stepFuncDate)
        t = np.array(ptime.yyyymmdd2years(ts_obj.dateList))
        for t_step in t_steps:
            Ai = np.array(t > t_step, np.float32).reshape(-1, 1)
            A_def = np.hstack((A_def, Ai))
    print('-'*80)
    return A_def
Esempio n. 10
0
def read_timeseries_yx(y, x, ts_file, ref_y=None, ref_x=None, win_size=1):
    """ Read time-series of one pixel with input y/x
    Parameters: y/x         : int, row/column number of interest
                ts_file     : string, filename of time-series HDF5 file
                ref_y/x     : int, row/column number of reference pixel
    Returns:    dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0)
                dis   : 1D np.array of float in meter
    """
    # read date
    obj = timeseries(ts_file)
    obj.open(print_msg=False)
    dates = ptime.date_list2vector(obj.dateList)[0]
    dates = np.array(dates)

    # read displacement
    print('input y / x: {} / {}'.format(y, x))
    box = (x, y, x + 1, y + 1)
    dis = readfile.read(ts_file, box=box)[0]
    if win_size != 1:
        buf = int(win_size / 2)
        box_win = (x - buf, y - buf, x + buf + 1, y + buf + 1)
        dis_win = readfile.read(ts_file, box=box_win)[0]
        dis = np.nanmean(dis_win.reshape((obj.numDate, -1)), axis=1)

    # reference pixel
    if ref_y is not None:
        ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1)
        dis -= readfile.read(ts_file, box=ref_box)[0]

    #start at zero
    dis -= dis[0]
    return dates, dis
Esempio n. 11
0
def read_geometry(inps):
    ts_obj = timeseries(inps.timeseries_file)
    ts_obj.open(print_msg=False)

    # 2D / 3D geometry
    if inps.geom_file:
        geom_obj = geometry(inps.geom_file)
        geom_obj.open()
        if 'incidenceAngle' not in geom_obj.datasetNames:
            inps.incAngle = ut.incidence_angle(ts_obj.metadata, dimension=0)
            inps.rangeDist = ut.range_distance(ts_obj.metadata, dimension=0)
        else:
            print(('read 2D incidenceAngle,slantRangeDistance from {} file:'
                   ' {}').format(geom_obj.name, os.path.basename(geom_obj.file)))
            inps.incAngle = geom_obj.read(datasetName='incidenceAngle', print_msg=False).flatten()
            inps.rangeDist = geom_obj.read(datasetName='slantRangeDistance', print_msg=False).flatten()
        if 'bperp' in geom_obj.datasetNames:
            print('read 3D bperp from {} file: {} ...'.format(geom_obj.name, os.path.basename(geom_obj.file)))
            dset_list = ['bperp-{}'.format(d) for d in ts_obj.dateList]
            inps.pbase = geom_obj.read(datasetName=dset_list, print_msg=False).reshape((ts_obj.numDate, -1))
            inps.pbase -= np.tile(inps.pbase[ts_obj.refIndex, :].reshape(1, -1), (ts_obj.numDate, 1))
        else:
            print('read mean bperp from {} file'.format(ts_obj.name))
            inps.pbase = ts_obj.pbase.reshape((-1, 1))

    # 0D geometry
    else:
        print('read mean incidenceAngle,slantRangeDistance,bperp value from {} file'.format(ts_obj.name))
        inps.incAngle = ut.incidence_angle(ts_obj.metadata, dimension=0)
        inps.rangeDist = ut.range_distance(ts_obj.metadata, dimension=0)
        inps.pbase = ts_obj.pbase.reshape((-1, 1))

    inps.sinIncAngle = np.sin(inps.incAngle * np.pi / 180.)
    return inps
Esempio n. 12
0
def prep_metadata(ts_file, template=None, print_msg=True):
    """Prepare metadata for HDF-EOS5 file."""
    # read metadata from ts_file
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    meta = dict(ts_obj.metadata)

    # read metadata from template_file
    for key, value in template.items():
        if not key.startswith(('mintpy', 'isce')):
            meta[key] = value

    # grab unavco metadata
    unavco_meta = metadata_mintpy2unavco(meta, ts_obj.dateList)
    if print_msg:
        print('## UNAVCO Metadata:')
        print('-----------------------------------------')
        info.print_attributes(unavco_meta)
        print('-----------------------------------------')

    # update metadata from unavco metadata
    meta.update(unavco_meta)
    meta['FILE_TYPE'] = 'HDFEOS'

    return meta
Esempio n. 13
0
def run_2to3_timeseries(py2_file, py3_file):
    """Convert timeseries file from py2-MintPy format to py3-MintPy format"""
    # read data from py2_file
    atr = readfile.read_attribute(py2_file)
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    with h5py.File(py2_file, 'r') as f:
        date_list = list(f['timeseries'].keys())
        num_date = len(date_list)
        ts_data = np.zeros((num_date, length, width), np.float32)
        print('reading time-series ...')
        prog_bar = ptime.progressBar(maxValue=num_date)
        for i in range(num_date):
            ts_data[i, :, :] = f['timeseries/{}'.format(date_list[i])][:]
            prog_bar.update(i + 1, suffix=date_list[i])
        prog_bar.close()

    # prepare metadata
    bperp = np.array([float(i) for i in atr['P_BASELINE_TIMESERIES'].split()],
                     dtype=np.float32)
    dates = np.array(date_list, np.string_)
    atr['REF_DATE'] = date_list[0]
    for key in [
            'P_BASELINE_TIMESERIES', 'P_BASELINE_TOP_TIMESERIES',
            'P_BASELINE_BOTTOM_TIMESERIES'
    ]:
        try:
            atr.pop(key)
        except:
            pass

    # write to py3_file
    ts_obj = timeseries(py3_file)
    ts_obj.write2hdf5(data=ts_data, dates=dates, bperp=bperp, metadata=atr)
    return py3_file
Esempio n. 14
0
def get_design_matrix4defo(inps):
    """Get the design matrix for ground surface deformation
    Parameters: inps   - namespace
    Returns:    G_defo - 2D np.ndarray in float32 in size of [num_date, num_param]
    """

    # key msg
    msg = '-' * 80
    msg += '\ncorrect topographic phase residual (DEM error) (Fattahi & Amelung, 2013, IEEE-TGRS)'
    msg += '\nordinal least squares (OLS) inversion with L2-norm minimization on: phase'
    if inps.phaseVelocity:
        msg += ' velocity'
    msg += "\ntemporal deformation model: polynomial order = {}".format(
        inps.polyOrder)
    if inps.stepFuncDate:
        msg += "\ntemporal deformation model: step functions at {}".format(
            inps.stepFuncDate)
    msg += '\n' + '-' * 80
    print(msg)

    # get design matrix for temporal deformation model
    model = dict()
    model['polynomial'] = inps.polyOrder
    model['step'] = inps.stepFuncDate
    date_list = timeseries(inps.timeseries_file).get_date_list()
    G_defo = timeseries.get_design_matrix4time_func(date_list, model)

    return G_defo
Esempio n. 15
0
def read_timeseries_yx(timeseries_file, y, x, ref_yx=None):
    '''Read time-series displacement on point (y,x) from timeseries_file
    Inputs:
        timeseries_file : string, name/path of timeseries hdf5 file
        y/x : int, row/column number of point of interest
    Output:
        dis_ts : list of float, displacement time-series of point of interest
    '''
    atr = readfile.read_attribute(timeseries_file)
    k = atr['FILE_TYPE']
    dis_ts = []

    if k in ['GIANT_TS']:
        h5 = h5py.File(timeseries_file, 'r')
        date_list = [
            dt.fromordinal(int(i)).strftime('%Y%m%d')
            for i in h5['dates'][:].tolist()
        ]
        dname = [i for i in ['rawts', 'recons'] if i in list(h5.keys())][0]
        dis_ts = h5[dname][:, y, x]
        if ref_yx is not None:
            dis_ts = h5[dname][:, ref_yx[0], ref_yx[1]]
        h5.close()
    else:
        box = [x, y, x + 1, y + 1]
        dis_ts = timeseries(timeseries_file).read(box=box, print_msg=False)
        #date_list = list(h5[k].keys())
        #for date in date_list:
        #    dis = h5[k].get(date)[y,x]
        #    if inps.ref_yx:
        #        dis -= h5[k].get(date)[ref_yx[0], ref_yx[1]]
        #    dis_ts.append(dis)
        #dis_ts = np.array(dis_ts)

    return dis_ts
Esempio n. 16
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read timeseries data
    obj = timeseries(inps.timeseries_file)
    obj.open()
    ts_data = obj.read()
    inps.date_list = list(obj.dateList)

    # read topographic data (DEM)
    dem = read_topographic_data(inps.geom_file, obj.metadata)

    # estimate phase/elevation ratio parameters
    X = estimate_phase_elevation_ratio(dem, ts_data, inps)

    # correct trop delay in timeseries
    trop_data = estimate_tropospheric_delay(dem, X, obj.metadata)
    mask = ts_data == 0.
    ts_data -= trop_data
    ts_data[mask] = 0.

    # write time-series file
    metadata = dict(obj.metadata)
    metadata['mintpy.troposphericDelay.polyOrder'] = str(inps.poly_order)
    if not inps.outfile:
        inps.outfile = '{}_tropHgt.h5'.format(
            os.path.splitext(inps.timeseries_file)[0])
    writefile.write(ts_data,
                    out_file=inps.outfile,
                    metadata=metadata,
                    ref_file=inps.timeseries_file)
    return inps.outfile
Esempio n. 17
0
def read_ref_date(inps):
    # check input reference date
    if not inps.refDate:
        print('No reference date input, skip this step.')
        return None
    # string in digit
    elif inps.refDate.isdigit():
        pass
    else:
        if os.path.isfile(inps.refDate):
            print('read reference date from file: ' + inps.refDate)
            if inps.refDate.endswith('.h5'):
                # HDF5 file
                atr = readfile.read_attribute(inps.refDate)
                inps.refDate = atr['REF_DATE']
            else:
                # txt file
                inps.refDate = ptime.read_date_txt(inps.refDate)[0]
        else:
            print('input file {} does not exist, skip this step'.format(inps.refDate))
            return None
    inps.refDate = ptime.yyyymmdd(inps.refDate)
    print('input reference date: {}'.format(inps.refDate))

    # check available dates
    date_list = timeseries(inps.timeseries_file[0]).get_date_list()
    if inps.refDate not in date_list:
        msg = 'input reference date: {} is not found.'.format(inps.refDate)
        msg += '\nAll available dates:\n{}'.format(date_list)
        raise Exception(msg)
    return inps.refDate
Esempio n. 18
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    start_time = time.time()

    # download
    date_list = timeseries(inps.dis_file).get_date_list()
    tec_files = download_igs_tec(date_list,
                                 tec_dir=inps.tec_dir,
                                 tec_sol=inps.tec_sol)

    # calculate
    if run_or_skip(inps.iono_file, tec_files, inps.dis_file,
                   inps.geom_file) == 'run':
        calc_iono_ramp_timeseries_igs(
            tec_dir=inps.tec_dir,
            tec_sol=inps.tec_sol,
            interp_method=inps.interp_method,
            ts_file=inps.dis_file,
            geom_file=inps.geom_file,
            iono_file=inps.iono_file,
            rotate_tec_map=inps.rotate_tec_map,
            sub_tec_ratio=inps.sub_tec_ratio,
            update_mode=inps.update_mode,
        )

    ## correct
    #correct_timeseries(dis_file=inps.dis_file,
    #                   iono_file=inps.iono_file,
    #                   cor_dis_file=inps.cor_dis_file)

    m, s = divmod(time.time() - start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))
    return
Esempio n. 19
0
def calculate_temporal_coherence_patch(ifgram_file,
                                       timeseries_file,
                                       box=None,
                                       ifg_num_file=None):
    atr = readfile.read_attribute(timeseries_file)
    if not box:
        box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))

    # Read timeseries data
    ts_obj = timeseries(timeseries_file)
    ts_obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(timeseries_file))
    ts_data = ts_obj.read(box=box, print_msg=False).reshape(ts_obj.numDate, -1)
    ts_data = ts_data[1:, :]
    ts_data *= -4 * np.pi / float(atr['WAVELENGTH'])

    # Read ifgram data
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    A = stack_obj.get_design_matrix4timeseries(
        stack_obj.get_date12_list(dropIfgram=True))[0]
    print('reading unwrapPhase data from file: {}'.format(ifgram_file))
    ifgram_data = stack_obj.read(datasetName='unwrapPhase',
                                 box=box).reshape(A.shape[0], -1)
    ref_value = stack_obj.get_reference_phase(dropIfgram=True).reshape((-1, 1))
    ifgram_data -= np.tile(ref_value, (1, ifgram_data.shape[1]))

    ifgram_diff = ifgram_data - np.dot(A, ts_data)
    del ts_data

    pixel_num = ifgram_data.shape[1]
    temp_coh = np.zeros((pixel_num), np.float32)
    # (fast) nasty solution, which used all phase value including invalid zero phase
    if not ifg_num_file:
        temp_coh = np.abs(np.sum(np.exp(1j * ifgram_diff),
                                 axis=0)) / ifgram_diff.shape[0]

    # (slow) same solution as ifgram_inversion.py, considering:
    #   1) invalid zero phase in ifgram
    #   2) design matrix rank deficiency.
    else:
        print(
            'considering different number of interferograms used in network inversion for each pixel'
        )
        ifg_num_map = readfile.read(ifg_num_file, box=box)[0].flatten()
        prog_bar = ptime.progressBar(maxValue=pixel_num)
        for i in range(pixel_num):
            if ifg_num_map[i] > 0:
                idx = ifgram_data[:, i] != 0.
                temp_diff = ifgram_diff[idx, i]
                temp_coh[i] = np.abs(np.sum(np.exp(1j * temp_diff),
                                            axis=0)) / temp_diff.shape[0]
            prog_bar.update(i + 1,
                            every=1000,
                            suffix='{}/{}'.format(i + 1, pixel_num))
        prog_bar.close()

    temp_coh = np.reshape(temp_coh, (box[3] - box[1], box[2] - box[0]))
    return temp_coh
Esempio n. 20
0
def get_residual_rms(timeseries_resid_file,
                     mask_file='maskTempCoh.h5',
                     ramp_type='quadratic'):
    """Calculate deramped Root Mean Square in space for each epoch of input timeseries file.
    Parameters: timeseries_resid_file : string, 
                    timeseries HDF5 file, e.g. timeseries_ECMWF_demErrInvResid.h5
                mask_file : string,
                    mask file, e.g. maskTempCoh.h5
                ramp_type : string, 
                    ramp type, e.g. linear, quadratic, no for do not remove ramp
    Returns:    rms_list : list of float,
                    Root Mean Square of deramped input timeseries file
                date_list : list of string in YYYYMMDD format,
                    corresponding dates
                rms_file : string, text file with rms and date info.
    Example:
        import mintpy.utils.utils as ut
        rms_list, date_list = ut.get_residual_rms('timeseriesResidual.h5', 'maskTempCoh.h5')
    """
    # Intermediate files name
    if ramp_type == 'no':
        print('No ramp removal')
        deramped_file = timeseries_resid_file
    else:
        deramped_file = '{}_ramp.h5'.format(
            os.path.splitext(timeseries_resid_file)[0])
    rms_file = os.path.join(
        os.path.dirname(os.path.abspath(deramped_file)),
        'rms_{}.txt'.format(os.path.splitext(deramped_file)[0]))

    # Get residual RMS text file
    if run_or_skip(out_file=rms_file,
                   in_file=[deramped_file, mask_file],
                   check_readable=False) == 'run':
        if run_or_skip(out_file=deramped_file,
                       in_file=timeseries_resid_file) == 'run':
            if not os.path.isfile(timeseries_resid_file):
                msg = 'Can not find input timeseries residual file: ' + timeseries_resid_file
                msg += '\nRe-run dem_error.py to generate it.'
                raise Exception(msg)
            else:
                print('removing a {} ramp from file: {}'.format(
                    ramp_type, timeseries_resid_file))
                deramped_file = run_deramp(timeseries_resid_file,
                                           ramp_type=ramp_type,
                                           mask_file=mask_file,
                                           out_file=deramped_file)
        print('Calculating residual RMS for each epoch from file: ' +
              deramped_file)
        rms_file = timeseries(deramped_file).timeseries_rms(maskFile=mask_file,
                                                            outFile=rms_file)

    # Read residual RMS text file
    print('read timeseries residual RMS from file: ' + rms_file)
    fc = np.loadtxt(rms_file, dtype=bytes).astype(str)
    rms_list = fc[:, 1].astype(np.float).tolist()
    date_list = list(fc[:, 0])
    return rms_list, date_list, rms_file
Esempio n. 21
0
def run_or_skip(inps):
    print('-' * 50)
    print('update mode: ON')
    flag = 'skip'

    # check output file
    if not os.path.isfile(inps.outfile):
        flag = 'run'
        print('1) output file {} NOT found.'.format(inps.outfile))
    else:
        # check if time-series file is partly written using file size
        # since time-series file is not compressed
        with h5py.File(inps.outfile, 'r') as f:
            fsize_ref = f['timeseries'].size * 4
        fsize = os.path.getsize(inps.outfile)
        if fsize <= fsize_ref:
            flag = 'run'
            print('1) output file {} is NOT fully written.'.format(
                inps.outfile))

        else:
            print('1) output file {} already exists.'.format(inps.outfile))

            # check modification time
            infiles = [inps.timeseries_file]
            if inps.geom_file:
                infiles.append(inps.geom_file)
            ti = max(os.path.getmtime(i) for i in infiles)
            to = os.path.getmtime(inps.outfile)
            if ti > to:
                flag = 'run'
                print(
                    '2) output file is NOT newer than input file: {}.'.format(
                        infiles))
            else:
                print('2) output file is newer than input file: {}.'.format(
                    infiles))

    # check configuration
    if flag == 'skip':
        date_list_all = timeseries(inps.timeseries_file).get_date_list()
        inps.excludeDate = read_exclude_date(inps.excludeDate,
                                             date_list_all,
                                             print_msg=False)[1]
        meta = readfile.read_attribute(inps.outfile)
        if any(
                str(vars(inps)[key]) != meta.get(key_prefix + key, 'None')
                for key in configKeys):
            flag = 'run'
            print('3) NOT all key configration parameters are the same:{}'.
                  format(configKeys))
        else:
            print('3) all key configuration parameters are the same:{}'.format(
                configKeys))

    # result
    print('run or skip: {}.'.format(flag))
    return flag
Esempio n. 22
0
def read_geometry(ts_file, geom_file=None, box=None):
    """Read the following geometry info in 0/2/3D
    Parameters: ts_file       - str, path of time-series file
                geom_file     - str, path of geometry file
                box           - tuple of 4 int for (x0, y0, x1, y1) of the area of interest
    Returns:    sin_inc_angle - 0/2D array, sin(inc_angle)
                range_dist    - 0/2D array, slant range distance in meter
                pbase         - 0/3D array, perp baseline in meter
    """
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)

    # 0/2/3D geometry
    if geom_file:
        geom_obj = geometry(geom_file)
        geom_obj.open()

        # 0/2D incidence angle / slant range distance
        if 'incidenceAngle' not in geom_obj.datasetNames:
            inc_angle = ut.incidence_angle(ts_obj.metadata, dimension=0)
            range_dist = ut.range_distance(ts_obj.metadata, dimension=0)
        else:
            print(
                'read 2D incidenceAngle, slantRangeDistance from {} file: {}'.
                format(geom_obj.name, os.path.basename(geom_obj.file)))
            inc_angle = geom_obj.read(datasetName='incidenceAngle',
                                      box=box,
                                      print_msg=False).flatten()
            range_dist = geom_obj.read(datasetName='slantRangeDistance',
                                       box=box,
                                       print_msg=False).flatten()

        # 0/3D perp baseline
        if 'bperp' in geom_obj.datasetNames:
            print('read 3D bperp from {} file: {} ...'.format(
                geom_obj.name, os.path.basename(geom_obj.file)))
            dset_list = ['bperp-{}'.format(d) for d in ts_obj.dateList]
            pbase = geom_obj.read(datasetName=dset_list,
                                  box=box,
                                  print_msg=False).reshape(
                                      (ts_obj.numDate, -1))
            pbase -= np.tile(pbase[ts_obj.refIndex, :].reshape(1, -1),
                             (ts_obj.numDate, 1))
        else:
            print('read mean bperp from {} file'.format(ts_obj.name))
            pbase = ts_obj.pbase.reshape((-1, 1))

    # 0D geometry
    else:
        print(
            'read mean incidenceAngle, slantRangeDistance, bperp value from {} file'
            .format(ts_obj.name))
        inc_angle = ut.incidence_angle(ts_obj.metadata, dimension=0)
        range_dist = ut.range_distance(ts_obj.metadata, dimension=0)
        pbase = ts_obj.pbase.reshape((-1, 1))

    sin_inc_angle = np.sin(inc_angle * np.pi / 180.)
    return sin_inc_angle, range_dist, pbase
Esempio n. 23
0
def get_datetime_list(ts_file, date_wise_acq_time=False):
    """Prepare exact datetime for each acquisition in the time-series file.

    Parameters: ts_file            - str, path of the time-series HDF5 file
                date_wise_acq_time - bool, use the exact date-wise acquisition time
    Returns:    sensingMid         - list of datetime.datetime objects
    """
    print('\nprepare datetime info for each acquisition')

    ts_file = os.path.abspath(ts_file)
    date_list = timeseries(ts_file).get_date_list()

    proj_dir = os.path.dirname(os.path.dirname(ts_file))
    xml_dirs = [os.path.join(proj_dir, i) for i in ['reference', 'secondarys']]

    # list of existing dataset names
    with h5py.File(ts_file, 'r') as f:
        ds_names = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]

    dt_name = 'sensingMid'
    if dt_name in ds_names:
        # opt 1. read sensingMid if exists
        print('read exact datetime info from /{} in file: {}'.format(
            dt_name, os.path.basename(ts_file)))
        with h5py.File(ts_file, 'r') as f:
            sensingMidStr = [i.decode('utf-8') for i in f[dt_name][:]]

        # convert string to datetime object
        date_str_format = ptime.get_date_str_format(sensingMidStr[0])
        sensingMid = [
            dt.datetime.strptime(i, date_str_format) for i in sensingMidStr
        ]

    elif date_wise_acq_time and all(os.path.isdir(i) for i in xml_dirs):
        # opt 2. read sensingMid in xml files
        print(
            'read exact datetime info in XML files from ISCE-2/topsStack results in directory:',
            proj_dir)
        from mintpy.utils import isce_utils
        sensingMid = isce_utils.get_sensing_datetime_list(
            proj_dir, date_list=date_list)[0]

        # plot
        plot_sensingMid_variation(sensingMid)

    else:
        # opt 3. use constant time of the day for all acquisitions
        msg = 'Use the same time of the day for all acquisitions from CENTER_LINE_UTC\n'
        msg += 'With <= 1 min variation for Sentinel-1A/B for example, this simplication has negligible impact on SET calculation.'
        print(msg)
        atr = readfile.read_attribute(ts_file)
        utc_sec = dt.timedelta(seconds=float(atr['CENTER_LINE_UTC']))
        sensingMid = [
            dt.datetime.strptime(i, '%Y%m%d') + utc_sec for i in date_list
        ]

    return sensingMid
Esempio n. 24
0
def read_inps2date_time(inps):
    """Read dates and time info from input arguments.
    Related options: --file OR --date-list, --hour

    Parameters: inps      - Namespace for the input arguments
    Returns:    date_list - list of str, dates in YYYYMMDD format
                hour      - str, hour in 2-digit with zero padding
    """

    # if --file is specified
    if inps.dis_file:
        # 1) ignore --date-list and --hour
        for key in ['date_list', 'hour']:
            if vars(inps)[key] is not None:
                vars(inps)[key] = None
                msg = 'input "{:<10}" is ignored'.format(key)
                msg += ', use info from file {} instead'.format(inps.dis_file)
                print(msg)

        # 2) read dates/time from time-series file
        print('read dates/time info from file: {}'.format(inps.dis_file))
        atr = readfile.read_attribute(inps.dis_file)
        if atr['FILE_TYPE'] == 'timeseries':
            ts_obj = timeseries(inps.dis_file)
            ts_obj.open(print_msg=False)
            inps.date_list = ts_obj.dateList
        else:
            inps.date_list = ptime.yyyymmdd(atr['DATE12'].split('-'))
        inps.hour = closest_weather_model_hour(atr['CENTER_LINE_UTC'],
                                               grib_source=inps.tropo_model)

    # read dates if --date-list is text file
    if len(inps.date_list) == 1 and os.path.isfile(inps.date_list[0]):
        date_file = inps.date_list[0]
        if date_file.startswith('SAFE_'):
            print(
                'read date list and hour info from Sentinel-1 SAFE filenames: {}'
                .format(date_file))
            inps.date_list, inps.hour = safe2date_time(date_file,
                                                       inps.tropo_model)
        else:
            print('read date list from text file: {}'.format(date_file))
            inps.date_list = np.loadtxt(date_file, dtype=bytes,
                                        usecols=(0, )).astype(str).tolist()
            inps.date_list = ptime.yyyymmdd(inps.date_list)

    # at east 2 dates are required (for meaningful calculation)
    if len(inps.date_list) < 2:
        raise AttributeError('input number of dates < 2!')

    # print time info
    if inps.hour is None:
        raise AttributeError('time info (--hour) not found!')
    print('time of cloest available product: {}:00 UTC'.format(inps.hour))

    return inps.date_list, inps.hour
Esempio n. 25
0
def calc_iono_ramp_timeseries_igs(tec_dir, tec_sol, interp_method, ts_file, geom_file, iono_file,
                                  rotate_tec_map=True, sub_tec_ratio=None, update_mode=True):
    """Calculate the time-series of 2D ionospheric delay from IGS TEC data.
    Considering the variation of the incidence angle along range direction.

    Parameters: tec_dir   - str, path of the local TEC directory
                ts_file   - str, path of the time-series file
                geom_file - str, path of the geometry file including incidenceAngle data
                iono_file - str, path of output iono ramp time-series file
    Returns:    iono_file - str, path of output iono ramp time-series file
    """
    print("\n------------------------------------------------------------------------------")
    # prepare geometry
    iono_lat, iono_lon = iono.prep_geometry_iono(geom_file, print_msg=True)[1:3]

    # prepare date/time
    date_list = timeseries(ts_file).get_date_list()
    meta = readfile.read_attribute(ts_file)
    utc_sec = float(meta['CENTER_LINE_UTC'])
    h, s = divmod(utc_sec, 3600)
    m, s = divmod(s, 60)
    print('UTC time: {:02.0f}:{:02.0f}:{:02.1f}'.format(h, m, s))

    # read IGS TEC
    vtec_list = []
    print('read IGS TEC file ...')
    print('interpolation method: {}'.format(interp_method))
    prog_bar = ptime.progressBar(maxValue=len(date_list))
    for i, date_str in enumerate(date_list):
        # read zenith TEC
        tec_file = iono.get_igs_tec_filename(tec_dir, date_str, sol=tec_sol)
        vtec = iono.get_igs_tec_value(
            tec_file,
            utc_sec,
            lat=iono_lat,
            lon=iono_lon,
            interp_method=interp_method,
            rotate_tec_map=rotate_tec_map,
        )
        vtec_list.append(vtec)
        prog_bar.update(i+1, suffix=date_str)
    prog_bar.close()

    # TEC --> iono ramp
    vtec2iono_ramp_timeseries(
        date_list=date_list,
        vtec_list=vtec_list,
        geom_file=geom_file,
        iono_file=iono_file,
        sub_tec_ratio=sub_tec_ratio,
        update_mode=update_mode,
    )

    return iono_file
Esempio n. 26
0
def main(argv):
    try:
        timeseries_file = argv[0]
    except:
        usage()
        sys.exit(1)

    obj = timeseries(timeseries_file)
    obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(timeseries_file))
    ts_data = obj.read(print_msg=False)

    print('calculate the 1st derivative of timeseries data')
    ts_data_1d = np.zeros(ts_data.shape, np.float32)
    ts_data_1d[1:, :, :] = np.diff(ts_data, n=1, axis=0)

    out_file = '{}_1stDerivative.h5'.format(os.path.splitext(timeseries_file)[0])
    obj_out = timeseries(out_file)
    obj_out.write2hdf5(ts_data_1d, refFile=timeseries_file)

    return out_file
Esempio n. 27
0
def main(argv):
    try:
        timeseries_file = argv[0]
    except:
        usage()
        sys.exit(1)

    obj = timeseries(timeseries_file)
    obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(timeseries_file))
    ts_data = obj.read(print_msg=False)

    print('calculate the 1st derivative of timeseries data')
    ts_data_1d = np.zeros(ts_data.shape, np.float32)
    ts_data_1d[1:, :, :] = np.diff(ts_data, n=1, axis=0)

    out_file = '{}_1stDerivative.h5'.format(os.path.splitext(timeseries_file)[0])
    obj_out = timeseries(out_file)
    obj_out.write2hdf5(ts_data_1d, refFile=timeseries_file)

    return out_file
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read data
    obj = timeseries(inps.file)
    obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(inps.file))
    ts_data = obj.read(print_msg=False)

    # calculation
    print('calculate the 1st derivative of timeseries data')
    ts_data_1d = np.zeros(ts_data.shape, np.float32)
    ts_data_1d[1:, :, :] = np.diff(ts_data, n=1, axis=0)

    # write to file
    if not inps.outfile:
        inps.outfile = '{}_1stDiff.h5'.format(os.path.splitext(inps.file)[0])
    obj_out = timeseries(inps.outfile)
    obj_out.write2hdf5(ts_data_1d, refFile=inps.file)

    return inps.outfile
Esempio n. 29
0
def get_residual_std(timeseries_resid_file,
                     mask_file='maskTempCoh.h5',
                     ramp_type='quadratic'):
    """Calculate deramped standard deviation in space for each epoch of input timeseries file.
    Parameters: timeseries_resid_file - string, timeseries HDF5 file,
                    e.g. timeseries_ERA5_demErrInvResid.h5
                mask_file - string, mask file, e.g. maskTempCoh.h5
                ramp_type - string, ramp type, e.g. linear, quadratic, no for do not remove ramp
    Returns:    std_list  - list of float, standard deviation of deramped input timeseries file
                date_list - list of string in YYYYMMDD format, corresponding dates
    Example:    import mintpy.utils.utils as ut
                std_list, date_list = ut.get_residual_std('timeseries_ERA5_demErrInvResid.h5',
                                                          'maskTempCoh.h5')
    """
    # Intermediate files name
    if ramp_type == 'no':
        print('No ramp removal')
        deramped_file = timeseries_resid_file
    else:
        deramped_file = '{}_ramp.h5'.format(
            os.path.splitext(timeseries_resid_file)[0])
    std_file = os.path.splitext(deramped_file)[0] + '_std.txt'

    # Get residual std text file
    if run_or_skip(out_file=std_file,
                   in_file=[deramped_file, mask_file],
                   check_readable=False) == 'run':
        if run_or_skip(out_file=deramped_file,
                       in_file=timeseries_resid_file) == 'run':
            if not os.path.isfile(timeseries_resid_file):
                msg = 'Can not find input timeseries residual file: ' + timeseries_resid_file
                msg += '\nRe-run dem_error.py to generate it.'
                raise Exception(msg)
            else:
                print('removing a {} ramp from file: {}'.format(
                    ramp_type, timeseries_resid_file))
                deramped_file = run_deramp(timeseries_resid_file,
                                           ramp_type=ramp_type,
                                           mask_file=mask_file,
                                           out_file=deramped_file)
        print(
            'calculating residual standard deviation for each epoch from file: '
            + deramped_file)
        std_file = timeseries(deramped_file).timeseries_std(maskFile=mask_file,
                                                            outFile=std_file)

    # Read residual std text file
    print('read timeseries RMS from file: ' + std_file)
    fc = np.loadtxt(std_file, dtype=bytes).astype(str)
    std_list = fc[:, 1].astype(np.float).tolist()
    date_list = list(fc[:, 0])
    return std_list, date_list
Esempio n. 30
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read timeseries info / data
    obj = timeseries(inps.timeseries_file)
    obj.open()

    tbase = np.array(obj.yearList, np.float32).reshape(-1, 1)
    tbase -= tbase[obj.refIndex]

    ts_data = obj.read().reshape(obj.numDate, -1)

    # Smooth acquisitions / moving window in time one by one
    print('-' * 50)
    print('filtering in time Gaussian window with size of {:.1f} years'.format(
        inps.time_win))
    ts_data_filt = np.zeros(ts_data.shape, np.float32)
    prog_bar = ptime.progressBar(maxValue=obj.numDate)
    for i in range(obj.numDate):
        # Weight from Gaussian (normal) distribution in time
        tbase_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (tbase_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        # Smooth the current acquisition
        ts_data_filt[i, :] = np.sum(ts_data * weight, axis=0)
        prog_bar.update(i + 1, suffix=obj.dateList[i])
    prog_bar.close()
    del ts_data
    ts_data_filt -= ts_data_filt[obj.refIndex, :]
    ts_data_filt = np.reshape(ts_data_filt,
                              (obj.numDate, obj.length, obj.width))

    # write filtered timeseries file
    if not inps.outfile:
        inps.outfile = '{}_tempGaussian.h5'.format(
            os.path.splitext(inps.timeseries_file)[0])
    obj_out = timeseries(inps.outfile)
    obj_out.write2hdf5(ts_data_filt, refFile=inps.timeseries_file)
    return inps.outfile
Esempio n. 31
0
def read_date_info(inps):
    """Get inps.excludeDate full list
    Inputs:
        inps          - Namespace, 
    Output:
        inps.excludeDate  - list of string for exclude date in YYYYMMDD format
    """
    if inps.key == 'timeseries':
        tsobj = timeseries(inps.timeseries_file)
    elif inps.key == 'giantTimeseries':
        tsobj = giantTimeseries(inps.timeseries_file)
    elif inps.key == 'HDFEOS':
        tsobj = HDFEOS(inps.timeseries_file)
    tsobj.open()
    inps.excludeDate = read_exclude_date(inps, tsobj.dateList)

    # Date used for estimation inps.dateList
    inps.dateList = [i for i in tsobj.dateList if i not in inps.excludeDate]

    date_list = inps.dateList
    dt_list = [dt.strptime(i, '%Y%m%d') for i in date_list]
    yr_list = [i.year + (i.timetuple().tm_yday - 1) / 365.25 for i in dt_list]
    yr_diff = np.array(yr_list)
    yr_diff -= yr_diff[0]
    inps.yr_diff = yr_diff

    inps.numDate = len(inps.dateList)
    print('-' * 50)
    print('dates from input file: {}\n{}'.format(tsobj.numDate,
                                                 tsobj.dateList))
    print('-' * 50)
    if len(inps.dateList) == len(tsobj.dateList):
        print('using all dates to calculate the velocity')
    else:
        print('dates used to estimate the velocity: {}\n{}'.format(
            inps.numDate, inps.dateList))
    print('-' * 50)

    # flag array for ts data reading
    inps.dropDate = np.array(
        [i not in inps.excludeDate for i in tsobj.dateList], dtype=np.bool_)

    # output file name
    if not inps.outfile:
        outname = 'velocity'
        if inps.key == 'giantTimeseries':
            prefix = os.path.basename(inps.timeseries_file).split('PARAMS')[0]
            outname = prefix + outname
        outname += '.h5'
        inps.outfile = outname
    return inps
Esempio n. 32
0
def prep_metadata(ts_file, print_msg=True):
    """Prepare metadata for HDF-EOS5 file"""
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    unavco_meta_dict = metadata_mintpy2unavco(ts_obj.metadata, ts_obj.dateList)
    if print_msg:
        print('## UNAVCO Metadata:')
        print('-----------------------------------------')
        info.print_attributes(unavco_meta_dict)
        print('-----------------------------------------')

    meta_dict = dict(ts_obj.metadata)
    meta_dict.update(unavco_meta_dict)
    meta_dict['FILE_TYPE'] = 'HDFEOS'
    return meta_dict
Esempio n. 33
0
def prep_metadata(ts_file, print_msg=True):
    """Prepare metadata for HDF-EOS5 file"""
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    unavco_meta_dict = metadata_mintpy2unavco(ts_obj.metadata, ts_obj.dateList)
    if print_msg:
        print('## UNAVCO Metadata:')
        print('-----------------------------------------')
        info.print_attributes(unavco_meta_dict)
        print('-----------------------------------------')

    meta_dict = dict(ts_obj.metadata)
    meta_dict.update(unavco_meta_dict)
    meta_dict['FILE_TYPE'] = 'HDFEOS'
    return meta_dict
Esempio n. 34
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read timeseries info / data
    obj = timeseries(inps.timeseries_file)
    obj.open()

    tbase = np.array(obj.yearList, np.float32).reshape(-1, 1)
    tbase -= tbase[obj.refIndex]

    ts_data = obj.read().reshape(obj.numDate, -1)

    # Smooth acquisitions / moving window in time one by one
    print('-'*50)
    print('filtering in time Gaussian window with size of {:.1f} years'.format(inps.time_win))
    ts_data_filt = np.zeros(ts_data.shape, np.float32)
    prog_bar = ptime.progressBar(maxValue=obj.numDate)
    for i in range(obj.numDate):
        # Weight from Gaussian (normal) distribution in time
        tbase_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (tbase_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        # Smooth the current acquisition
        ts_data_filt[i, :] = np.sum(ts_data * weight, axis=0)
        prog_bar.update(i+1, suffix=obj.dateList[i])
    prog_bar.close()
    del ts_data
    ts_data_filt -= ts_data_filt[obj.refIndex, :]
    ts_data_filt = np.reshape(ts_data_filt, (obj.numDate, obj.length, obj.width))

    # write filtered timeseries file
    if not inps.outfile:
        inps.outfile = '{}_tempGaussian.h5'.format(os.path.splitext(inps.timeseries_file)[0])
    obj_out = timeseries(inps.outfile)
    obj_out.write2hdf5(ts_data_filt, refFile=inps.timeseries_file)
    return inps.outfile
Esempio n. 35
0
def write2hdf5_file(ifgram_file,
                    metadata,
                    ts,
                    temp_coh,
                    num_inv_ifg=None,
                    suffix='',
                    inps=None):
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    date_list = stack_obj.get_date_list(dropIfgram=True)

    # File 1 - timeseries.h5
    ts_file = '{}{}.h5'.format(suffix, os.path.splitext(inps.outfile[0])[0])
    metadata['REF_DATE'] = date_list[0]
    metadata['FILE_TYPE'] = 'timeseries'
    metadata['UNIT'] = 'm'

    print('-' * 50)
    print('calculating perpendicular baseline timeseries')
    pbase = stack_obj.get_perp_baseline_timeseries(dropIfgram=True)

    ts_obj = timeseries(ts_file)
    ts_obj.write2hdf5(data=ts, dates=date_list, bperp=pbase, metadata=metadata)

    # File 2 - temporalCoherence.h5
    out_file = '{}{}.h5'.format(suffix, os.path.splitext(inps.outfile[1])[0])
    metadata['FILE_TYPE'] = 'temporalCoherence'
    metadata['UNIT'] = '1'
    print('-' * 50)
    writefile.write(temp_coh, out_file=out_file, metadata=metadata)

    ## File 3 - timeseriesDecorStd.h5
    #if not np.all(ts_std == 0.):
    #    out_file = 'timeseriesDecorStd{}.h5'.format(suffix)
    #    metadata['FILE_TYPE'] = 'timeseries'
    #    metadata['UNIT'] = 'm'
    #    phase2range = -1*float(stack_obj.metadata['WAVELENGTH'])/(4.*np.pi)
    #    ts_std *= abs(phase2range)
    #    print('-'*50)
    #    writefile.write(ts_std, out_file=out_file, metadata=metadata, ref_file=ts_file)

    # File 3 - numInvIfgram.h5
    out_file = 'numInvIfgram{}.h5'.format(suffix)
    metadata['FILE_TYPE'] = 'mask'
    metadata['UNIT'] = '1'
    print('-' * 50)
    writefile.write(num_inv_ifg, out_file=out_file, metadata=metadata)
    return
Esempio n. 36
0
def get_residual_rms(timeseries_resid_file, mask_file='maskTempCoh.h5', ramp_type='quadratic'):
    """Calculate deramped Root Mean Square in space for each epoch of input timeseries file.
    Parameters: timeseries_resid_file : string, 
                    timeseries HDF5 file, e.g. timeseries_ECMWF_demErrInvResid.h5
                mask_file : string,
                    mask file, e.g. maskTempCoh.h5
                ramp_type : string, 
                    ramp type, e.g. linear, quadratic, no for do not remove ramp
    Returns:    rms_list : list of float,
                    Root Mean Square of deramped input timeseries file
                date_list : list of string in YYYYMMDD format,
                    corresponding dates
                rms_file : string, text file with rms and date info.
    Example:
        import mintpy.utils.utils as ut
        rms_list, date_list = ut.get_residual_rms('timeseriesResidual.h5', 'maskTempCoh.h5')
    """
    # Intermediate files name
    if ramp_type == 'no':
        print('No ramp removal')
        deramped_file = timeseries_resid_file
    else:
        deramped_file = '{}_ramp.h5'.format(os.path.splitext(timeseries_resid_file)[0])
    rms_file = os.path.join(os.path.dirname(os.path.abspath(deramped_file)),
                            'rms_{}.txt'.format(os.path.splitext(deramped_file)[0]))

    # Get residual RMS text file
    if run_or_skip(out_file=rms_file, in_file=[deramped_file, mask_file], check_readable=False) == 'run':
        if run_or_skip(out_file=deramped_file, in_file=timeseries_resid_file) == 'run':
            if not os.path.isfile(timeseries_resid_file):
                msg = 'Can not find input timeseries residual file: '+timeseries_resid_file
                msg += '\nRe-run dem_error.py to generate it.'
                raise Exception(msg)
            else:
                print('removing a {} ramp from file: {}'.format(ramp_type, timeseries_resid_file))
                deramped_file = run_deramp(timeseries_resid_file,
                                           ramp_type=ramp_type,
                                           mask_file=mask_file,
                                           out_file=deramped_file)
        print('Calculating residual RMS for each epoch from file: '+deramped_file)
        rms_file = timeseries(deramped_file).timeseries_rms(maskFile=mask_file, outFile=rms_file)

    # Read residual RMS text file
    print('read timeseries residual RMS from file: '+rms_file)
    rms_fileContent = np.loadtxt(rms_file, dtype=bytes).astype(str)
    rms_list = rms_fileContent[:, 1].astype(np.float).tolist()
    date_list = list(rms_fileContent[:, 0])
    return rms_list, date_list, rms_file
Esempio n. 37
0
def calculate_temporal_coherence_patch(ifgram_file, timeseries_file, box=None, ifg_num_file=None):
    atr = readfile.read_attribute(timeseries_file)
    if not box:
        box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))

    # Read timeseries data
    ts_obj = timeseries(timeseries_file)
    ts_obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(timeseries_file))
    ts_data = ts_obj.read(box=box, print_msg=False).reshape(ts_obj.numDate, -1)
    ts_data = ts_data[1:, :]
    ts_data *= -4*np.pi/float(atr['WAVELENGTH'])

    # Read ifgram data
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    A = stack_obj.get_design_matrix4timeseries(stack_obj.get_date12_list(dropIfgram=True))[0]
    print('reading unwrapPhase data from file: {}'.format(ifgram_file))
    ifgram_data = stack_obj.read(datasetName='unwrapPhase', box=box).reshape(A.shape[0], -1)
    ref_value = stack_obj.get_reference_phase(dropIfgram=True).reshape((-1, 1))
    ifgram_data -= np.tile(ref_value, (1, ifgram_data.shape[1]))

    ifgram_diff = ifgram_data - np.dot(A, ts_data)
    del ts_data

    pixel_num = ifgram_data.shape[1]
    temp_coh = np.zeros((pixel_num), np.float32)
    # (fast) nasty solution, which used all phase value including invalid zero phase
    if not ifg_num_file:
        temp_coh = np.abs(np.sum(np.exp(1j*ifgram_diff), axis=0)) / ifgram_diff.shape[0]

    # (slow) same solution as ifgram_inversion.py, considering:
    #   1) invalid zero phase in ifgram
    #   2) design matrix rank deficiency.
    else:
        print('considering different number of interferograms used in network inversion for each pixel')
        ifg_num_map = readfile.read(ifg_num_file, box=box)[0].flatten()
        prog_bar = ptime.progressBar(maxValue=pixel_num)
        for i in range(pixel_num):
            if ifg_num_map[i] > 0:
                idx = ifgram_data[:, i] != 0.
                temp_diff = ifgram_diff[idx, i]
                temp_coh[i] = np.abs(np.sum(np.exp(1j*temp_diff), axis=0)) / temp_diff.shape[0]
            prog_bar.update(i+1, every=1000, suffix='{}/{}'.format(i+1, pixel_num))
        prog_bar.close()

    temp_coh = np.reshape(temp_coh, (box[3]-box[1], box[2]-box[0]))
    return temp_coh
Esempio n. 38
0
    def open(self):
        atr = readfile.read_attribute(self.insar_file)
        k = atr['FILE_TYPE']
        if k == 'timeseries':
            ts_obj = timeseries(self.insar_file)
        elif k == 'giantTimeseries':
            ts_obj = giantTimeseries(self.insar_file)
        ts_obj.open(print_msg=False)
        self.metadata = dict(ts_obj.metadata)
        self.num_date = ts_obj.numDate
        self.insar_datetime = ts_obj.times

        self.read_gps()
        self.read_insar()
        self.calculate_rmse()
        return
Esempio n. 39
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    # default dset
    if not inps.dset:
        atr = readfile.read_attribute(inps.file)
        k = atr['FILE_TYPE']
        if k in ['ifgramStack', 'HDFEOS']:
            raise Exception("NO input dataset! It's required for {} file".format(k))

        #for time-series
        if k == 'timeseries':
            inps.dset = timeseries(inps.file).get_date_list()[-1]
            print('NO date specified >>> continue with the last date: {}'.format(inps.dset))
    return inps
Esempio n. 40
0
def read_timeseries_info():
    global atr, k, h5, dateList, tims, date_num, inps

    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print(('input file is '+k+': '+inps.timeseries_file))
    if not k in ['timeseries','GIANT_TS']:
        raise ValueError('Only timeseries file is supported!')

    h5 = h5py.File(inps.timeseries_file,'r')
    if k in ['GIANT_TS']:
        dateList = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()]
    else:
        dateList = timeseries(inps.timeseries_file).get_date_list()
    date_num = len(dateList)
    inps.dates, tims = ptime.date_list2vector(dateList)
Esempio n. 41
0
def print_date_list(fname, disp_ifgram='all', disp_num=False, print_msg=False):
    """Print time/date info of file"""
    k = readfile.read_attribute(fname)['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        dateList = timeseries(fname).get_date_list()

    elif k == 'HDFEOS':
        dateList = HDFEOS(fname).get_date_list()

    elif k == 'giantTimeseries':
        dateList = giantTimeseries(fname).get_date_list()


    elif k in ['giantIfgramStack']:
        dateList = giantIfgramStack(fname).get_date12_list()

    elif k in ['ifgramStack']:
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        dateListAll = obj.get_date12_list(dropIfgram=False)
        dateListKept = obj.get_date12_list(dropIfgram=True)

        # show dropped ifgram or not
        if disp_ifgram == 'all':
            dateList = list(dateListAll)
        elif disp_ifgram == 'kept':
            dateList = list(dateListKept)
        else:
            dateList = sorted(list(set(dateListAll) - set(dateListKept)))

    else:
        print('--date option can not be applied to {} file, ignore it.'.format(k))

    # print list info
    if print_msg and dateList is not None:
        for d in dateList:
            if disp_num:
                if k in ['ifgramStack']:
                    num = dateListAll.index(d)
                else:
                    num = dateList.index(d)
                msg = '{}\t{}'.format(d, num)
            else:
                msg = d
            print(msg)
    return dateList
Esempio n. 42
0
def read_timeseries_info():
    """Reads basic information about timeseries file being viewed"""
    global atr, k, h5, dateList, inps.tims, inps.num_date, inps

    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print(('input file is '+k+': '+inps.timeseries_file))
    if not k in ['timeseries','giantTimeseries']:
        raise ValueError('Only timeseries file is supported!')

    h5 = h5py.File(inps.timeseries_file,'r')
    if k in ['giantTimeseries']:
        dateList = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()]
    else:
        dateList = timeseries(inps.timeseries_file).get_date_list()
    inps.num_date = len(dateList)
    inps.dates, inps.tims = ptime.date_list2vector(dateList)
Esempio n. 43
0
def read_ref_date(inps):
    if not inps.refDate:
        print('No reference date input, skip this step.')
        return inps.timeseries_file

    elif os.path.isfile(inps.refDate):
        print('read reference date from file: ' + inps.refDate)
        inps.refDate = ptime.read_date_txt(inps.refDate)[0]
    inps.refDate = ptime.yyyymmdd(inps.refDate)
    print('input reference date: {}'.format(inps.refDate))

    # check input reference date
    date_list = timeseries(inps.timeseries_file[0]).get_date_list()
    if inps.refDate not in date_list:
        msg = 'input reference date: {} is not found.'.format(inps.refDate)
        msg += '\nAll available dates:\n{}'.format(date_list)
        raise Exception(msg)
    return inps.refDate
Esempio n. 44
0
def get_residual_std(timeseries_resid_file, mask_file='maskTempCoh.h5', ramp_type='quadratic'):
    """Calculate deramped standard deviation in space for each epoch of input timeseries file.
    Parameters: timeseries_resid_file - string, timeseries HDF5 file,
                    e.g. timeseries_ECMWF_demErrInvResid.h5
                mask_file - string, mask file, e.g. maskTempCoh.h5
                ramp_type - string, ramp type, e.g. linear, quadratic, no for do not remove ramp
    Returns:    std_list  - list of float, standard deviation of deramped input timeseries file
                date_list - list of string in YYYYMMDD format, corresponding dates
    Example:    import mintpy.utils.utils as ut
                std_list, date_list = ut.get_residual_std('timeseries_ECMWF_demErrInvResid.h5',
                                                          'maskTempCoh.h5')
    """
    # Intermediate files name
    if ramp_type == 'no':
        print('No ramp removal')
        deramped_file = timeseries_resid_file
    else:
        deramped_file = '{}_ramp.h5'.format(os.path.splitext(timeseries_resid_file)[0])
    std_file = os.path.splitext(deramped_file)[0]+'_std.txt'

    # Get residual std text file
    if run_or_skip(out_file=std_file, in_file=[deramped_file, mask_file], check_readable=False) == 'run':
        if run_or_skip(out_file=deramped_file, in_file=timeseries_resid_file) == 'run':
            if not os.path.isfile(timeseries_resid_file):
                msg = 'Can not find input timeseries residual file: '+timeseries_resid_file
                msg += '\nRe-run dem_error.py to generate it.'
                raise Exception(msg)
            else:
                print('removing a {} ramp from file: {}'.format(ramp_type, timeseries_resid_file))
                deramped_file = run_deramp(timeseries_resid_file,
                                           ramp_type=ramp_type,
                                           mask_file=mask_file,
                                           out_file=deramped_file)
        print('calculating residual standard deviation for each epoch from file: '+deramped_file)
        std_file = timeseries(deramped_file).timeseries_std(maskFile=mask_file, outFile=std_file)

    # Read residual std text file
    print('read timeseries RMS from file: '+std_file)
    std_fileContent = np.loadtxt(std_file, dtype=bytes).astype(str)
    std_list = std_fileContent[:, 1].astype(np.float).tolist()
    date_list = list(std_fileContent[:, 0])
    return std_list, date_list
Esempio n. 45
0
def read_file_data(epoch=None):
    global atr, attributes, ref_dates_list

    atr = readfile.read_attribute(h5_file.get())

    file_type = atr['FILE_TYPE']

    ref_dates_list = ["All"]

    h5file = h5py.File(h5_file.get(), 'r')
    if file_type in ['HDFEOS']:
        ref_dates_list += h5file.attrs['DATE_TIMESERIES'].split()
    else:
        ref_dates_list = timeseries(h5_file.get()).get_date_list()

    if epoch and epoch is not "All":
        data, attributes = readfile.read(h5_file.get(), datasetName=ref_dates_list[epoch])
    else:
        data, attributes = readfile.read(h5_file.get(), datasetName=ref_dates_list[len(ref_dates_list) - 1])

    return data
Esempio n. 46
0
def ref_date_file(ts_file, ref_date, outfile=None):
    """Change input file reference date to a different one.
    Parameters: ts_file : str, timeseries file to be changed
                ref_date : str, date in YYYYMMDD format
                outfile  : if str, save to a different file
                           if None, modify the data value in the existing input file
    """
    print('-'*50)
    print('change reference date for file: {}'.format(ts_file))
    atr = readfile.read_attribute(ts_file)
    if ref_date == atr['REF_DATE']:
        print('same reference date chosen as existing reference date.')
        if not outfile:
            print('Nothing to be done.')
            return ts_file
        else:
            print('Copy {} to {}'.format(ts_file, outfile))
            shutil.copy2(ts_file, outfile)
            return outfile
    else:
        obj = timeseries(ts_file)
        obj.open(print_msg=False)
        ref_idx = obj.dateList.index(ref_date)
        print('reading data ...')
        ts_data = readfile.read(ts_file)[0]

        ts_data -= np.tile(ts_data[ref_idx, :, :].reshape(1, obj.length, obj.width), (obj.numDate, 1, 1))

        if not outfile:
            print('open {} with r+ mode'.format(ts_file))
            with h5py.File(ts_file, 'r+') as f:
                print("update /timeseries dataset and 'REF_DATE' attribute value")
                f['timeseries'][:] = ts_data
                f.attrs['REF_DATE'] = ref_date
            print('close {}'.format(ts_file))
        else:
            atr['REF_DATE'] = ref_date
            writefile.write(ts_data, outfile, metadata=atr, ref_file=ts_file)
    return outfile
Esempio n. 47
0
def main(argv):
    try:
        timeseries_file = argv[0]
    except:
        usage()
        sys.exit(1)

    try:
        out_file = argv[1]
    except:
        out_file = 'sum_'+timeseries_file

    # Read Timeseries
    obj = timeseries(timeseries_file)
    obj.open()
    D = obj.read().reshape(obj.numDate, -1)

    # Calculate Sum
    sumD = np.zeros(D.shape)
    for i in range(obj.numDate):
        sumD[i, :] = np.sum(np.abs(D - D[i, :]), axis=0) / obj.numDate
        sys.stdout.write('\rcalculating epochs sum {}/{} ...'.format(i+1, obj.numDate))
        sys.stdout.flush()
    print('')
    del D

    # Normalize to 0 and 1
    # with high atmosphere equal to 0 and no atmosphere equal to 1
    sumD -= np.max(sumD, 0)
    sumD *= -1
    sumD /= np.max(sumD, 0)
    sumD[np.isnan(sumD)] = 1

    # Write sum epochs file
    sumD = np.reshape(sumD, (obj.numDate, obj.length, obj.width))
    atr = dict(obj.metadata)
    atr['UNIT'] = '1'
    writefile.write(sumD, out_file=out_file, metadata=atr, ref_file=timeseries_file)
    print('Done.')
Esempio n. 48
0
def get_delay_timeseries(inps, atr):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
                atr  : dict, metadata to be saved in trop_file
    Returns:    trop_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
    else:
        if any(i is None for i in [inps.geom_file, inps.ref_yx]):
            print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
            if not os.path.isfile(inps.trop_file):
                inps.trop_file = None
            return

        # calculate phase delay
        length, width = int(atr['LENGTH']), int(atr['WIDTH'])
        num_date = len(inps.grib_file_list)
        date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
        trop_data = np.zeros((num_date, length, width), np.float32)

        print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
        print('number of grib files used: {}'.format(num_date))
        prog_bar = ptime.progressBar(maxValue=num_date)
        for i in range(num_date):
            grib_file = inps.grib_file_list[i]
            trop_data[i] = get_delay(grib_file, inps)
            prog_bar.update(i+1, suffix=os.path.basename(grib_file))
        prog_bar.close()

        # Convert relative phase delay on reference date
        try:
            inps.ref_date = atr['REF_DATE']
        except:
            inps.ref_date = date_list[0]
        print('convert to relative phase delay with reference date: '+inps.ref_date)
        inps.ref_idx = date_list.index(inps.ref_date)
        trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))

        # Write tropospheric delay to HDF5
        atr['REF_Y'] = inps.ref_yx[0]
        atr['REF_X'] = inps.ref_yx[1]
        ts_obj = timeseries(inps.trop_file)
        ts_obj.write2hdf5(data=trop_data,
                          dates=date_list,
                          metadata=atr,
                          refFile=inps.timeseries_file)

    # Delete temporary DEM file in ROI_PAC format
    if inps.geom_file:
        temp_files =[fname for fname in [inps.dem_file,
                                         inps.inc_angle_file,
                                         inps.lat_file,
                                         inps.lon_file] 
                     if (fname is not None and 'pyaps' in fname)]
        if temp_files:
            print('delete temporary geometry files')
            rmCmd = 'rm '
            for fname in temp_files:
                rmCmd += ' {f} {f}.rsc '.format(f=fname)
            print(rmCmd)
            os.system(rmCmd)
    return
Esempio n. 49
0
def check_inputs(inps):
    parser = create_parser()

    # output directories/files
    atr = dict()
    mintpy_dir = None
    if inps.timeseries_file:
        atr = readfile.read_attribute(inps.timeseries_file)
        mintpy_dir = os.path.dirname(inps.timeseries_file)
        if not inps.outfile:
            fbase = os.path.splitext(inps.timeseries_file)[0]
            inps.outfile = '{}_{}.h5'.format(fbase, inps.trop_model)
    elif inps.geom_file:
        atr = readfile.read_attribute(inps.geom_file)
        mintpy_dir = os.path.join(os.path.dirname(inps.geom_file), '..')
    else:
        mintpy_dir = os.path.abspath(os.getcwd())

    # trop_file
    inps.trop_file = os.path.join(mintpy_dir, 'inputs/{}.h5'.format(inps.trop_model))
    print('output tropospheric delay file: {}'.format(inps.trop_file))

    # hour
    if not inps.hour:
        if 'CENTER_LINE_UTC' in atr.keys():
            inps.hour = ptime.closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.trop_model)
        else:
            parser.print_usage()
            raise Exception('no input for hour')
    print('time of cloest available product: {}:00 UTC'.format(inps.hour))

    # date list
    if inps.timeseries_file:
        print('read date list from timeseries file: {}'.format(inps.timeseries_file))
        ts_obj = timeseries(inps.timeseries_file)
        ts_obj.open(print_msg=False)
        inps.date_list = ts_obj.dateList
    elif len(inps.date_list) == 1:
        if os.path.isfile(inps.date_list[0]):
            print('read date list from text file: {}'.format(inps.date_list[0]))
            inps.date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list[0],
                                                       dtype=bytes,
                                                       usecols=(0,)).astype(str).tolist())
        else:
            parser.print_usage()
            raise Exception('ERROR: input date list < 2')

    # Grib data directory
    inps.grib_dir = os.path.join(inps.weather_dir, inps.trop_model)
    if not os.path.isdir(inps.grib_dir):
        os.makedirs(inps.grib_dir)
        print('making directory: '+inps.grib_dir)

    # Date list to grib file list
    inps.grib_file_list = date_list2grib_file(inps.date_list,
                                              inps.hour,
                                              inps.trop_model,
                                              inps.grib_dir)

    if 'REF_Y' in atr.keys():
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]
        print('reference pixel: {}'.format(inps.ref_yx))

    # Coordinate system: geocoded or not
    inps.geocoded = False
    if 'Y_FIRST' in atr.keys():
        inps.geocoded = True
    print('geocoded: {}'.format(inps.geocoded))

    # Prepare DEM, inc_angle, lat/lon file for PyAPS to read
    if inps.geom_file:
        geom_atr = readfile.read_attribute(inps.geom_file)
        print('converting DEM/incAngle for PyAPS to read')
        # DEM
        data = readfile.read(inps.geom_file, datasetName='height', print_msg=False)[0]
        inps.dem_file = 'pyapsDem.hgt'
        writefile.write(data, inps.dem_file, metadata=geom_atr)

        # inc_angle
        inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle', print_msg=False)[0]
        inps.inc_angle_file = 'pyapsIncAngle.flt'
        writefile.write(inps.inc_angle, inps.inc_angle_file, metadata=geom_atr)

        # latitude
        try:
            data = readfile.read(inps.geom_file, datasetName='latitude', print_msg=False)[0]
            print('converting lat for PyAPS to read')
            inps.lat_file = 'pyapsLat.flt'
            writefile.write(data, inps.lat_file, metadata=geom_atr)
        except:
            inps.lat_file = None

        # longitude
        try:
            data = readfile.read(inps.geom_file, datasetName='longitude', print_msg=False)[0]
            print('converting lon for PyAPS to read')
            inps.lon_file = 'pyapsLon.flt'
            writefile.write(data, inps.lon_file, metadata=geom_atr)
        except:
            inps.lon_file = None
    return inps, atr
Esempio n. 50
0
def diff_file(file1, file2, outFile=None, force=False):
    """Subtraction/difference of two input files"""
    if not outFile:
        fbase, fext = os.path.splitext(file1)
        if len(file2) > 1:
            raise ValueError('Output file name is needed for more than 2 files input.')
        outFile = '{}_diff_{}{}'.format(fbase, os.path.splitext(os.path.basename(file2[0]))[0], fext)
    print('{} - {} --> {}'.format(file1, file2, outFile))

    # Read basic info
    atr1 = readfile.read_attribute(file1)
    k1 = atr1['FILE_TYPE']
    atr2 = readfile.read_attribute(file2[0])
    k2 = atr2['FILE_TYPE']
    print('input files are: {} and {}'.format(k1, k2))

    if k1 == 'timeseries':
        if k2 not in ['timeseries', 'giantTimeseries']:
            raise Exception('Input multiple dataset files are not the same file type!')
        if len(file2) > 1:
            raise Exception(('Only 2 files substraction is supported for time-series file,'
                             ' {} input.'.format(len(file2)+1)))

        obj1 = timeseries(file1)
        obj1.open()
        if k2 == 'timeseries':
            obj2 = timeseries(file2[0])
            unit_fac = 1.
        elif k2 == 'giantTimeseries':
            obj2 = giantTimeseries(file2[0])
            unit_fac = 0.001
        obj2.open()
        ref_date, ref_y, ref_x = _check_reference(obj1.metadata, obj2.metadata)

        # check dates shared by two timeseries files
        dateListShared = [i for i in obj1.dateList if i in obj2.dateList]
        dateShared = np.ones((obj1.numDate), dtype=np.bool_)
        if dateListShared != obj1.dateList:
            print('WARNING: {} does not contain all dates in {}'.format(file2, file1))
            if force:
                dateExcluded = list(set(obj1.dateList) - set(dateListShared))
                print('Continue and enforce the differencing for their shared dates only.')
                print('\twith following dates are ignored for differencing:\n{}'.format(dateExcluded))
                dateShared[np.array([obj1.dateList.index(i) for i in dateExcluded])] = 0
            else:
                raise Exception('To enforce the differencing anyway, use --force option.')

        # consider different reference_date/pixel
        data2 = readfile.read(file2[0], datasetName=dateListShared)[0] * unit_fac
        if ref_date:
            data2 -= np.tile(data2[obj2.dateList.index(ref_date), :, :],
                             (data2.shape[0], 1, 1))
        if ref_y and ref_x:
            data2 -= np.tile(data2[:, ref_y, ref_x].reshape(-1, 1, 1),
                             (1, data2.shape[1], data2.shape[2]))

        data = obj1.read()
        mask = data == 0.
        data[dateShared] -= data2
        data[mask] = 0.               # Do not change zero phase value
        del data2
        writefile.write(data, out_file=outFile, ref_file=file1)

    elif all(i == 'ifgramStack' for i in [k1, k2]):
        obj1 = ifgramStack(file1)
        obj1.open()
        obj2 = ifgramStack(file2[0])
        obj2.open()
        dsNames = list(set(obj1.datasetNames) & set(obj2.datasetNames))
        if len(dsNames) == 0:
            raise ValueError('no common dataset between two files!')
        dsName = [i for i in ifgramDatasetNames if i in dsNames][0]

        # read data
        print('reading {} from file {} ...'.format(dsName, file1))
        data1 = readfile.read(file1, datasetName=dsName)[0]
        print('reading {} from file {} ...'.format(dsName, file2[0]))
        data2 = readfile.read(file2[0], datasetName=dsName)[0]

        # consider reference pixel
        if 'unwrapphase' in dsName.lower():
            print('referencing to pixel ({},{}) ...'.format(obj1.refY, obj1.refX))
            ref1 = data1[:, obj1.refY, obj1.refX]
            ref2 = data2[:, obj2.refY, obj2.refX]
            for i in range(data1.shape[0]):
                data1[i,:][data1[i, :] != 0.] -= ref1[i]
                data2[i,:][data2[i, :] != 0.] -= ref2[i]

        # operation and ignore zero values
        data1[data1 == 0] = np.nan
        data2[data2 == 0] = np.nan
        data = data1 - data2
        del data1, data2
        data[np.isnan(data)] = 0.

        # write to file
        dsDict = {}
        dsDict[dsName] = data
        writefile.write(dsDict, out_file=outFile, ref_file=file1)

    # Sing dataset file
    else:
        data1 = readfile.read(file1)[0]
        data = np.array(data1, data1.dtype)
        for fname in file2:
            data2 = readfile.read(fname)[0]
            data = np.array(data, dtype=np.float32) - np.array(data2, dtype=np.float32)
            data = np.array(data, data1.dtype)
        print('writing >>> '+outFile)
        writefile.write(data, out_file=outFile, metadata=atr1)

    return outFile
Esempio n. 51
0
def read_network_info(inps):
    ext = os.path.splitext(inps.file)[1]

    # 1. Read dateList and pbaseList
    if ext in ['.h5', '.he5']:
        k = readfile.read_attribute(inps.file)['FILE_TYPE']
        print('reading temporal/spatial baselines from {} file: {}'.format(k, inps.file))
        if k == 'ifgramStack':
            inps.dateList = ifgramStack(inps.file).get_date_list(dropIfgram=False)
            inps.pbaseList = ifgramStack(inps.file).get_perp_baseline_timeseries(dropIfgram=False)
        elif k == 'timeseries':
            obj = timeseries(inps.file)
            obj.open(print_msg=False)
            inps.dateList = obj.dateList
            inps.pbaseList = obj.pbase
        else:
            raise ValueError('input file is not ifgramStack/timeseries, can not read temporal/spatial baseline info.')
    else:
        print('reading temporal/spatial baselines from list file: '+inps.bl_list_file)
        inps.dateList, inps.pbaseList = pnet.read_baseline_file(inps.bl_list_file)[0:2]
    print('number of acquisitions: {}'.format(len(inps.dateList)))

    # 2. Read All Date12/Ifgrams/Pairs
    inps.date12List = pnet.get_date12_list(inps.file)
    print('reading interferograms info from file: {}'.format(inps.file))
    print('number of interferograms: {}'.format(len(inps.date12List)))

    if inps.save_list:
        txtFile = os.path.splitext(os.path.basename(inps.file))[0]+'_date12List.txt'
        np.savetxt(txtFile, inps.date12List, fmt='%s')
        print('save pairs/date12 info to file: '+txtFile)

    # Optional: Read dropped date12 / date
    inps.dateList_drop = []
    inps.date12List_drop = []
    if ext in ['.h5', '.he5'] and k == 'ifgramStack':
        inps.date12List_keep = ifgramStack(inps.file).get_date12_list(dropIfgram=True)
        inps.date12List_drop = sorted(list(set(inps.date12List) - set(inps.date12List_keep)))
        print('-'*50)
        print('number of interferograms marked as drop: {}'.format(len(inps.date12List_drop)))
        print('number of interferograms marked as keep: {}'.format(len(inps.date12List_keep)))

        mDates = [i.split('_')[0] for i in inps.date12List_keep]
        sDates = [i.split('_')[1] for i in inps.date12List_keep]
        inps.dateList_keep = sorted(list(set(mDates + sDates)))
        inps.dateList_drop = sorted(list(set(inps.dateList) - set(inps.dateList_keep)))
        print('number of acquisitions marked as drop: {}'.format(len(inps.dateList_drop)))
        if len(inps.dateList_drop) > 0:
            print(inps.dateList_drop)

    # Optional: Read Coherence List
    inps.cohList = None
    if ext in ['.h5', '.he5'] and k == 'ifgramStack':
        inps.cohList, cohDate12List = ut.spatial_average(inps.file, datasetName='coherence', maskFile=inps.maskFile,
                                                         saveList=True, checkAoi=False)
        if all(np.isnan(inps.cohList)):
            inps.cohList = None
            print('WARNING: all coherence value are nan! Do not use this and continue.')

        if set(cohDate12List) > set(inps.date12List):
            print('extract coherence value for all pair/date12 in input file')
            inps.cohList = [inps.cohList[cohDate12List.index(i)] for i in inps.date12List]
        elif set(cohDate12List) < set(inps.date12List):
            inps.cohList = None
            print('WARNING: not every pair/date12 from input file is in coherence file')
            print('turn off the color plotting of interferograms based on coherence')
    return inps
Esempio n. 52
0
def get_delay_timeseries(inps, atr):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
                atr  : dict, metadata to be saved in trop_file
    Returns:    trop_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    # check 1 - existing tropo delay file
    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
        return

    # check 2 - geometry file
    if any(i is None for i in [inps.geom_file, inps.ref_yx]):
        print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
        if not os.path.isfile(inps.trop_file):
            inps.trop_file = None
        return

    # prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.dem = geom_obj.read(datasetName='height')
    inps.inc = geom_obj.read(datasetName='incidenceAngle')
    if 'latitude' in geom_obj.datasetNames:
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')
    else:
        inps.lat, inps.lon = get_lat_lon(geom_obj.metadata)

    # calculate phase delay
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(inps.grib_file_list)
    date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
    trop_data = np.zeros((num_date, length, width), np.float32)

    print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
    print('number of grib files used: {}'.format(num_date))
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        grib_file = inps.grib_file_list[i]
        trop_data[i] = get_delay(grib_file, inps)
        prog_bar.update(i+1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    # Convert relative phase delay on reference date
    inps.ref_date = atr.get('REF_DATE', date_list[0])
    print('convert to relative phase delay with reference date: '+inps.ref_date)
    inps.ref_idx = date_list.index(inps.ref_date)
    trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))

    # Write tropospheric delay to HDF5
    atr['REF_Y'] = inps.ref_yx[0]
    atr['REF_X'] = inps.ref_yx[1]
    ts_obj = timeseries(inps.trop_file)
    ts_obj.write2hdf5(data=trop_data,
                      dates=date_list,
                      metadata=atr,
                      refFile=inps.timeseries_file)
    return
Esempio n. 53
0
def reference_file(inps):
    """Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    """
    if not inps:
        inps = cmd_line_parse([''])
    atr = readfile.read_attribute(inps.file)
    if (inps.ref_y and inps.ref_x and 'REF_Y' in atr.keys()
            and inps.ref_y == int(atr['REF_Y']) and inps.ref_x == int(atr['REF_X'])
            and not inps.force):
        print('Same reference pixel is already selected/saved in file, skip updating.')
        return inps.file

    # Get stack and mask
    stack = ut.temporal_average(inps.file, datasetName='unwrapPhase', updateMode=True, outFile=False)[0]
    mask = np.multiply(~np.isnan(stack), stack != 0.)
    if np.nansum(mask) == 0.0:
        raise ValueError('no pixel found with valid phase value in all datasets.')

    if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x] == 0.:
        raise ValueError('reference y/x have nan value in some dataset. Please re-select.')

    # Find reference y/x
    if not inps.ref_y or not inps.ref_x:
        if inps.method == 'maxCoherence':
            inps.ref_y, inps.ref_x = select_max_coherence_yx(coh_file=inps.coherenceFile,
                                                             mask=mask,
                                                             min_coh=inps.minCoherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps, mask)
    if not inps.ref_y or not inps.ref_x:
        raise ValueError('ERROR: no reference y/x found.')

    # Seeding file with reference y/x
    atrNew = reference_point_attribute(atr, y=inps.ref_y, x=inps.ref_x)
    if not inps.write_data:
        print('Add/update ref_x/y attribute to file: '+inps.file)
        print(atrNew)
        inps.outfile = ut.add_attribute(inps.file, atrNew)

    else:
        if not inps.outfile:
            inps.outfile = '{}_seeded{}'.format(os.path.splitext(inps.file)[0],
                                                os.path.splitext(inps.file)[1])
        k = atr['FILE_TYPE']

        # For ifgramStack file, update data value directly, do not write to new file
        if k == 'ifgramStack':
            f = h5py.File(inps.file, 'r+')
            ds = f[k].get('unwrapPhase')
            for i in range(ds.shape[0]):
                ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]
            f[k].attrs.update(atrNew)
            f.close()
            inps.outfile = inps.file

        elif k == 'timeseries':
            data = timeseries(inps.file).read()
            for i in range(data.shape[0]):
                data[i, :, :] -= data[i, inps.ref_y, inps.ref_x]
            obj = timeseries(inps.outfile)
            atr.update(atrNew)
            obj.write2hdf5(data=data, metadata=atr, refFile=inps.file)
            obj.close()
        else:
            print('writing >>> '+inps.outfile)
            data = readfile.read(inps.file)[0]
            data -= data[inps.ref_y, inps.ref_x]
            atr.update(atrNew)
            writefile.write(data, out_file=inps.outfile, metadata=atr)
    ut.touch([inps.coherenceFile, inps.maskFile])
    return inps.outfile
Esempio n. 54
0
def spatial_average(File, datasetName='coherence', maskFile=None, box=None,
                    saveList=False, checkAoi=True):
    """Read/Calculate Spatial Average of input file.

    If input file is text file, read it directly;
    If input file is data matrix file:
        If corresponding text file exists with the same mask file/AOI info, read it directly;
        Otherwise, calculate it from data file.

        Only non-nan pixel is considered.
    Parameters: File     : string, path of input file
                maskFile : string, path of mask file, e.g. maskTempCoh.h5
                box      : 4-tuple defining the left, upper, right, and lower pixel coordinate
                saveList : bool, save (list of) mean value into text file
    Returns:    meanList : list for float, average value in space for each epoch of input file
                dateList : list of string for date info
                    date12_list, e.g. 101120-110220, for interferograms/coherence
                    date8_list, e.g. 20101120, for timeseries
                    file name, e.g. velocity.h5, for all the other file types
    Example:    meanList = spatial_average('inputs/ifgramStack.h5')[0]
                meanList, date12_list = spatial_average('inputs/ifgramStack.h5',
                                                        maskFile='maskTempCoh.h5',
                                                        saveList=True)
    """
    def read_text_file(fname):
        txtContent = np.loadtxt(fname, dtype=bytes).astype(str)
        meanList = [float(i) for i in txtContent[:, 1]]
        dateList = [i for i in txtContent[:, 0]]
        return meanList, dateList

    # Baic File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    if not box:
        box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))

    # If input is text file
    suffix = ''
    if k == 'ifgramStack':
        suffix += '_'+datasetName
    suffix += '_spatialAvg.txt'
    if File.endswith(suffix):
        print('Input file is spatial average txt already, read it directly')
        meanList, dateList = read_text_file(File)
        return meanList, dateList

    # Read existing txt file only if 1) data file is older AND 2) same AOI
    txtFile = os.path.splitext(os.path.basename(File))[0]+suffix
    file_line = '# Data file: {}\n'.format(os.path.basename(File))
    mask_line = '# Mask file: {}\n'.format(maskFile)
    aoi_line = '# AOI box: {}\n'.format(box)
    try:
        # Read AOI line from existing txt file
        fl = open(txtFile, 'r')
        lines = fl.readlines()
        fl.close()
        if checkAoi:
            try:
                aoi_line_orig = [i for i in lines if '# AOI box:' in i][0]
            except:
                aoi_line_orig = ''
        else:
            aoi_line_orig = aoi_line
        try:
            mask_line_orig = [i for i in lines if '# Mask file:' in i][0]
        except:
            mask_line_orig = ''
        if (aoi_line_orig == aoi_line 
                and mask_line_orig == mask_line
                and run_or_skip(out_file=txtFile,
                                in_file=[File, maskFile],
                                check_readable=False) == 'skip'):
            print(txtFile+' already exists, read it directly')
            meanList, dateList = read_text_file(txtFile)
            return meanList, dateList
    except:
        pass

    # Calculate mean coherence list
    if k == 'ifgramStack':
        obj = ifgramStack(File)
        obj.open(print_msg=False)
        meanList, dateList = obj.spatial_average(datasetName=datasetName,
                                                 maskFile=maskFile,
                                                 box=box)
        pbase = obj.pbaseIfgram
        tbase = obj.tbaseIfgram
        obj.close()
    elif k == 'timeseries':
        meanList, dateList = timeseries(File).spatial_average(maskFile=maskFile,
                                                              box=box)
    else:
        data = readfile.read(File, box=box)[0]
        if maskFile and os.path.isfile(maskFile):
            print('mask from file: '+maskFile)
            mask = readfile.read(maskFile, datasetName='mask', box=box)[0]
            data[mask == 0.] = np.nan
        meanList = np.nanmean(data)
        dateList = [os.path.basename(File)]

    # Write mean coherence list into text file
    if saveList:
        print('write average value in space into text file: '+txtFile)
        fl = open(txtFile, 'w')
        # Write comments
        fl.write(file_line+mask_line+aoi_line)
        # Write data list
        numLine = len(dateList)
        if k == 'ifgramStack':
            fl.write('#\tDATE12\t\tMean\tBtemp/days\tBperp/m\t\tNum\n')
            for i in range(numLine):
                fl.write('%s\t%.4f\t%8.0f\t%8.1f\t%d\n' %
                         (dateList[i], meanList[i], tbase[i], pbase[i], i))
        else:
            fl.write('#\tDATE12\t\tMean\n')
            for i in range(numLine):
                fl.write('%s\t%.4f\n' % (dateList[i], meanList[i]))
        fl.close()

    if len(meanList) == 1:
        meanList = meanList[0]
        dateList = dateList[0]
    return meanList, dateList
Esempio n. 55
0
def temporal_average(File, datasetName='coherence', updateMode=False, outFile=None):
    """Calculate temporal average of multi-temporal dataset, equivalent to stacking
    For ifgramStakc/unwrapPhase, return average phase velocity

    Parameters: File : string, file to be averaged in time
                datasetName : string, dataset to be read from input file, for multiple
                    datasets file - ifgramStack - only
                    e.g.: coherence, unwrapPhase
                updateMode : bool
                outFile : string, output filename
                    None for auto output filename
                    False for do not save as output file
    Returns:    dataMean : 2D array
                outFile : string, output file name
    Examples:   avgPhaseVel = ut.temporal_average('ifgramStack.h5', datasetName='unwrapPhase')[0]
                ut.temporal_average('ifgramStack.h5', datasetName='coherence',
                                    outFile='avgSpatialCoh.h5', updateMode=True)
    """
    atr = readfile.read_attribute(File, datasetName=datasetName)
    k = atr['FILE_TYPE']
    if k not in ['ifgramStack', 'timeseries']:
        print('WARNING: input file is not multi-temporal file: {}, return itself.'.format(File))
        data = readfile.read(File)[0]
        return data, File

    # Default output filename
    if outFile is None:
        ext = os.path.splitext(File)[1]
        if not outFile:
            if k == 'ifgramStack':
                if datasetName == 'coherence':
                    outFile = 'avgSpatialCoh.h5'
                elif 'unwrapPhase' in datasetName:
                    outFile = 'avgPhaseVelocity.h5'
                else:
                    outFile = 'avg{}.h5'.format(datasetName)
            elif k == 'timeseries':
                if k in File:
                    processMark = os.path.basename(File).split('timeseries')[1].split(ext)[0]
                    outFile = 'avgDisplacement{}.h5'.format(processMark)
            else:
                outFile = 'avg{}.h5'.format(File)

    if updateMode and os.path.isfile(outFile):
        dataMean = readfile.read(outFile)[0]
        return dataMean, outFile

    # Calculate temporal average
    if k == 'ifgramStack':
        dataMean = ifgramStack(File).temporal_average(datasetName=datasetName)
        if 'unwrapPhase' in datasetName:
            atr['FILE_TYPE'] = 'velocity'
            atr['UNIT'] = 'm/year'
        else:
            atr['FILE_TYPE'] = datasetName
    elif k == 'timeseries':
        dataMean = timeseries(File).temporal_average()
        atr['FILE_TYPE'] = 'displacement'

    if outFile:
        writefile.write(dataMean, out_file=outFile, metadata=atr)
    return dataMean, outFile
Esempio n. 56
0
def get_slice_list(fname):
    """Get list of 2D slice existed in file (for display)"""
    fbase, fext = os.path.splitext(os.path.basename(fname))
    fext = fext.lower()
    atr = read_attribute(fname)
    k = atr['FILE_TYPE']

    global slice_list
    # HDF5 Files
    if fext in ['.h5', '.he5']:
        with h5py.File(fname, 'r') as f:
            d1_list = [i for i in f.keys() if isinstance(f[i], h5py.Dataset)]
        if k == 'timeseries' and k in d1_list:
            obj = timeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['geometry'] and k not in d1_list:
            obj = geometry(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['ifgramStack']:
            obj = ifgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['HDFEOS']:
            obj = HDFEOS(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantTimeseries']:
            obj = giantTimeseries(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        elif k in ['giantIfgramStack']:
            obj = giantIfgramStack(fname)
            obj.open(print_msg=False)
            slice_list = obj.sliceList

        else:
            ## Find slice by walking through the file structure
            length, width = int(atr['LENGTH']), int(atr['WIDTH'])
            def get_hdf5_2d_dataset(name, obj):
                global slice_list
                if isinstance(obj, h5py.Dataset) and obj.shape[-2:] == (length, width):
                    if obj.ndim == 2:
                        slice_list.append(name)
                    else:
                        warnings.warn('file has un-defined {}D dataset: {}'.format(obj.ndim, name))
            slice_list = []
            with h5py.File(fname, 'r') as f:
                f.visititems(get_hdf5_2d_dataset)

    # Binary Files
    else:
        if fext.lower() in ['.trans', '.utm_to_rdc']:
            slice_list = ['rangeCoord', 'azimuthCoord']
        elif fbase.startswith('los'):
            slice_list = ['incidenceAngle', 'azimuthAngle']
        elif atr.get('number_bands', '1') == '2' and 'unw' not in k:
            slice_list = ['band1', 'band2']
        else:
            slice_list = ['']
    return slice_list
Esempio n. 57
0
def check_inputs(inps):
    parser = create_parser()

    # output directories/files
    atr = dict()
    mintpy_dir = None
    if inps.timeseries_file:
        atr = readfile.read_attribute(inps.timeseries_file)
        mintpy_dir = os.path.dirname(inps.timeseries_file)
        if not inps.outfile:
            fbase = os.path.splitext(inps.timeseries_file)[0]
            inps.outfile = '{}_{}.h5'.format(fbase, inps.trop_model)
    elif inps.geom_file:
        atr = readfile.read_attribute(inps.geom_file)
        mintpy_dir = os.path.join(os.path.dirname(inps.geom_file), '..')
    else:
        mintpy_dir = os.path.abspath(os.getcwd())

    # trop_file
    inps.trop_file = os.path.join(mintpy_dir, 'inputs/{}.h5'.format(inps.trop_model))
    print('output tropospheric delay file: {}'.format(inps.trop_file))

    # hour
    if not inps.hour:
        if 'CENTER_LINE_UTC' in atr.keys():
            inps.hour = closest_weather_model_hour(atr['CENTER_LINE_UTC'], inps.trop_model)
        else:
            parser.print_usage()
            raise Exception('no input for hour')
    print('time of cloest available product: {}:00 UTC'.format(inps.hour))

    # date list
    if inps.timeseries_file:
        print('read date list from timeseries file: {}'.format(inps.timeseries_file))
        ts_obj = timeseries(inps.timeseries_file)
        ts_obj.open(print_msg=False)
        inps.date_list = ts_obj.dateList
    elif len(inps.date_list) == 1:
        if os.path.isfile(inps.date_list[0]):
            print('read date list from text file: {}'.format(inps.date_list[0]))
            inps.date_list = ptime.yyyymmdd(np.loadtxt(inps.date_list[0],
                                                       dtype=bytes,
                                                       usecols=(0,)).astype(str).tolist())
        else:
            parser.print_usage()
            raise Exception('ERROR: input date list < 2')

    # Grib data directory
    inps.grib_dir = os.path.join(inps.weather_dir, inps.trop_model)
    if not os.path.isdir(inps.grib_dir):
        os.makedirs(inps.grib_dir)
        print('making directory: '+inps.grib_dir)

    # area extent for ERA5 grib data download
    inps.snwe = get_snwe(atr)

    # Date list to grib file list
    inps.grib_file_list = date_list2grib_file(date_list=inps.date_list,
                                              hour=inps.hour,
                                              model=inps.trop_model,
                                              grib_dir=inps.grib_dir,
                                              snwe=inps.snwe)

    if 'REF_Y' in atr.keys():
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]
        print('reference pixel: {}'.format(inps.ref_yx))
    return inps, atr
Esempio n. 58
0
def write(datasetDict, out_file, metadata=None, ref_file=None, compression=None):
    """ Write one file.
    Parameters: datasetDict : dict of dataset, with key = datasetName and value = 2D/3D array, e.g.:
                    {'height'        : np.ones((   200,300), dtype=np.int16),
                     'incidenceAngle': np.ones((   200,300), dtype=np.float32),
                     'bperp'         : np.ones((80,200,300), dtype=np.float32),
                     ...}
                out_file : str, output file name
                metadata : dict of attributes
                ref_file : str, reference file to get auxliary info
                compression : str, compression while writing to HDF5 file, None, "lzf", "gzip"
    Returns:    out_file : str
    Examples:   dsDict = dict()
                dsDict['velocity'] = np.ones((200,300), dtype=np.float32)
                write(datasetDict=dsDict, out_file='velocity.h5', metadata=atr)
    """
    # copy metadata to meta
    if metadata:
        meta = {key: value for key, value in metadata.items()}
    elif ref_file:
        meta = readfile.read_attribute(ref_file)
    else:
        raise ValueError('No metadata or reference file input.')

    # convert ndarray input into dict type
    if isinstance(datasetDict, np.ndarray):
        data = np.array(datasetDict, datasetDict.dtype)
        datasetDict = dict()
        datasetDict[meta['FILE_TYPE']] = data

    ext = os.path.splitext(out_file)[1].lower()
    # HDF5 File
    if ext in ['.h5', '.he5']:
        if compression is None and ref_file:
            compression = readfile.get_hdf5_compression(ref_file)

        k = meta['FILE_TYPE']
        if k == 'timeseries':
            if ref_file is None:
                raise Exception('Can not write {} file without reference file!'.format(k))
            obj = timeseries(out_file)
            obj.write2hdf5(datasetDict[k],
                           metadata=meta,
                           refFile=ref_file,
                           compression=compression)

        else:
            if os.path.isfile(out_file):
                print('delete exsited file: {}'.format(out_file))
                os.remove(out_file)

            print('create HDF5 file: {} with w mode'.format(out_file))
            with h5py.File(out_file, 'w') as f:
                # 1. Write input datasets
                maxDigit = max([len(i) for i in list(datasetDict.keys())])
                for dsName in datasetDict.keys():
                    data = datasetDict[dsName]
                    print(('create dataset /{d:<{w}} of {t:<10} in size of {s:<20} '
                           'with compression={c}').format(d=dsName,
                                                          w=maxDigit,
                                                          t=str(data.dtype),
                                                          s=str(data.shape),
                                                          c=compression))
                    ds = f.create_dataset(dsName,
                                          data=data,
                                          chunks=True,
                                          compression=compression)

                # 2. Write extra/auxliary datasets from ref_file
                if ref_file and os.path.splitext(ref_file)[1] in ['.h5', '.he5']:
                    atr_ref = readfile.read_attribute(ref_file)
                    shape_ref = (int(atr_ref['LENGTH']), int(atr_ref['WIDTH']))
                    with h5py.File(ref_file, 'r') as fr:
                        dsNames = [i for i in fr.keys()
                                   if (i not in list(datasetDict.keys())
                                       and isinstance(fr[i], h5py.Dataset) 
                                       and fr[i].shape[-2:] != shape_ref)]
                        maxDigit = max([len(i) for i in dsNames]+[maxDigit])
                        for dsName in dsNames:
                            ds = fr[dsName]
                            print(('create dataset /{d:<{w}} of {t:<10} in size of {s:<10} '
                                   'with compression={c}').format(d=dsName,
                                                                  w=maxDigit,
                                                                  t=str(ds.dtype),
                                                                  s=str(ds.shape),
                                                                  c=compression))
                            f.create_dataset(dsName,
                                             data=ds[:],
                                             chunks=True,
                                             compression=compression)

                # 3. metadata
                for key, value in meta.items():
                    f.attrs[key] = str(value)
                print('finished writing to {}'.format(out_file))

    # ISCE / ROI_PAC GAMMA / Image product
    else:
        key_list = list(datasetDict.keys())
        data_list = []
        for key in key_list:
            data_list.append(datasetDict[key])

        # Write Data File
        print('write {}'.format(out_file))
        if ext in ['.unw', '.cor', '.hgt']:
            write_float32(data_list[0], out_file)
            meta['DATA_TYPE'] = 'float32'
        elif ext == '.dem':
            write_real_int16(data_list[0], out_file)
            meta['DATA_TYPE'] = 'int16'
        elif ext in ['.trans']:
            write_float32(data_list[0], data_list[1], out_file)
        elif ext in ['.utm_to_rdc', '.UTM_TO_RDC']:
            data = np.zeros(rg.shape, dtype=np.complex64)
            data.real = datasetDict['rangeCoord']
            data.imag = datasetDict['azimuthCoord']
            data.astype('>c8').tofile(out_file)
        elif ext in ['.mli', '.flt']:
            write_real_float32(data_list[0], out_file)
        elif ext == '.slc':
            write_complex_int16(data_list[0], out_file)
        elif ext == '.int':
            write_complex64(data_list[0], out_file)
        elif meta['DATA_TYPE'].lower() in ['float32', 'float']:
            write_real_float32(data_list[0], out_file)
        elif meta['DATA_TYPE'].lower() in ['int16', 'short']:
            write_real_int16(data_list[0], out_file)
        elif meta['DATA_TYPE'].lower() in ['byte','bool']:
            write_byte(data_list[0], out_file)
        else:
            print('Un-supported file type: '+ext)
            return 0

        # write metadata file
        write_roipac_rsc(meta, out_file+'.rsc', print_msg=True)
    return out_file
Esempio n. 59
0
def write2hdf5(out_file, ts_file, coh_file, mask_file, geom_file, metadata):
    """Write HDF5 file in HDF-EOS5 format"""
    ts_obj = timeseries(ts_file)
    ts_obj.open(print_msg=False)
    dateList = ts_obj.dateList

    # Open HDF5 File
    f = h5py.File(out_file, 'w')
    print('create HDF5 file: {} with w mode'.format(out_file))
    maxDigit = 20

    # Write Observation - Displacement
    gName = 'HDFEOS/GRIDS/timeseries/observation'
    print('create group   /{}'.format(gName))
    group = f.create_group(gName)

    dsName = 'displacement'
    data = ts_obj.read(print_msg=False)
    print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
           ' with compression={c}').format(g=gName,
                                           d=dsName,
                                           w=maxDigit,
                                           t=str(data.dtype),
                                           s=data.shape,
                                           c=compression))
    dset = group.create_dataset(dsName,
                                data=data,
                                dtype=np.float32,
                                chunks=True,
                                compression=compression)
    dset.attrs['Title'] = dsName
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['_FillValue'] = FLOAT_ZERO
    dset.attrs['Units'] = 'meters'

    dsName = 'date'
    data = np.array(dateList, dtype=np.string_)
    group.create_dataset(dsName, data=data)
    print('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'.format(g=gName,
                                                                          d=dsName,
                                                                          w=maxDigit,
                                                                          t=str(data.dtype),
                                                                          s=data.shape))

    dsName = 'bperp'
    data = np.array(ts_obj.pbase, dtype=np.float32)
    group.create_dataset(dsName, data=data)
    print('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'.format(g=gName,
                                                                          d=dsName,
                                                                          w=maxDigit,
                                                                          t=str(data.dtype),
                                                                          s=data.shape))

    # Write Quality
    gName = 'HDFEOS/GRIDS/timeseries/quality'
    print('create group   /{}'.format(gName))
    group = f.create_group(gName)

    ## 1 - temporalCoherence
    dsName = 'temporalCoherence'
    data = readfile.read(coh_file)[0]
    print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
           ' with compression={c}').format(g=gName,
                                           d=dsName,
                                           w=maxDigit,
                                           t=str(data.dtype),
                                           s=data.shape,
                                           c=compression))
    dset = group.create_dataset(dsName,
                                data=data,
                                chunks=True,
                                compression=compression)
    dset.attrs['Title'] = dsName
    dset.attrs['MissingValue'] = FLOAT_ZERO
    dset.attrs['_FillValue'] = FLOAT_ZERO
    dset.attrs['Units'] = '1'

    ## 2 - mask
    dsName = 'mask'
    data = readfile.read(mask_file, datasetName='mask')[0]
    print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
           ' with compression={c}').format(g=gName,
                                           d=dsName,
                                           w=maxDigit,
                                           t=str(data.dtype),
                                           s=data.shape,
                                           c=compression))
    dset = group.create_dataset(dsName,
                                data=data,
                                chunks=True,
                                compression=compression)
    dset.attrs['Title'] = dsName
    dset.attrs['MissingValue'] = BOOL_ZERO
    dset.attrs['_FillValue'] = BOOL_ZERO
    dset.attrs['Units'] = '1'

    # Write Geometry
    # Required: height, incidenceAngle
    # Optional: rangeCoord, azimuthCoord, azimuthAngle, slantRangeDistance, waterMask, shadowMask
    gName = 'HDFEOS/GRIDS/timeseries/geometry'
    print('create group   /{}'.format(gName))
    group = f.create_group(gName)

    geom_obj = geometry(geom_file)
    geom_obj.open(print_msg=False)
    for dsName in geom_obj.datasetNames:
        data = geom_obj.read(datasetName=dsName, print_msg=False)
        print(('create dataset /{g}/{d:<{w}} of {t:<10} in size of {s}'
               ' with compression={c}').format(g=gName,
                                               d=dsName,
                                               w=maxDigit,
                                               t=str(data.dtype),
                                               s=data.shape,
                                               c=compression))
        dset = group.create_dataset(dsName,
                                    data=data,
                                    chunks=True,
                                    compression=compression)

        dset.attrs['Title'] = dsName
        if dsName in ['height',
                      'slantRangeDistance',
                      'bperp']:
            dset.attrs['MissingValue'] = FLOAT_ZERO
            dset.attrs['_FillValue'] = FLOAT_ZERO
            dset.attrs['Units'] = 'meters'

        elif dsName in ['incidenceAngle',
                        'azimuthAngle',
                        'latitude',
                        'longitude']:
            dset.attrs['MissingValue'] = FLOAT_ZERO
            dset.attrs['_FillValue'] = FLOAT_ZERO
            dset.attrs['Units'] = 'degrees'

        elif dsName in ['rangeCoord', 'azimuthCoord']:
            dset.attrs['MissingValue'] = FLOAT_ZERO
            dset.attrs['_FillValue'] = FLOAT_ZERO
            dset.attrs['Units'] = '1'

        elif dsName in ['waterMask', 'shadowMask']:
            dset.attrs['MissingValue'] = BOOL_ZERO
            dset.attrs['_FillValue'] = BOOL_ZERO
            dset.attrs['Units'] = '1'

    # Write Attributes to the HDF File
    print('write metadata to root level')
    for key, value in iter(metadata.items()):
        f.attrs[key] = value
    f.close()
    print('finished writing to {}'.format(out_file))
    return out_file