示例#1
0
def prepare_stack(inputDir, filePattern, metadata=dict(), baseline_dict=dict(), update_mode=True):
    print('prepare .rsc file for ', filePattern)
    isce_files = sorted(glob.glob(os.path.join(os.path.abspath(inputDir), '*', filePattern)))
    if len(isce_files) == 0:
        raise FileNotFoundError('no file found in pattern: {}'.format(filePattern))

    # write .rsc file for each interferogram file
    num_file = len(isce_files)
    prog_bar = ptime.progressBar(maxValue=num_file)
    for i in range(num_file):
        # prepare metadata for current file
        isce_file = isce_files[i]
        dates = os.path.basename(os.path.dirname(isce_file)).split('_')  # to modify to YYYYMMDDTHHMMSS
        ifg_metadata = readfile.read_attribute(isce_file, metafile_ext='.xml')
        ifg_metadata.update(metadata)
        ifg_metadata = add_ifgram_metadata(ifg_metadata, dates, baseline_dict)

        # write .rsc file
        rsc_file = isce_file+'.rsc'
        writefile.write_roipac_rsc(ifg_metadata, rsc_file,
                                   update_mode=update_mode,
                                   print_msg=False)
        prog_bar.update(i+1, suffix='{}_{}'.format(dates[0], dates[1]))
    prog_bar.close()
    return
示例#2
0
def main(argv):

    try:
        file = argv[0]
    except:
        usage()
        sys.exit(1)

    g_name = 'unwrapPhase'
    g_name_out = 'unwrapPhase_laplace'

    print('calculate Laplace filter of {} based on approximate second derivatives.'.format(g_name))

    f = h5py.File(file, 'a')
    ds = f[g_name]
    if g_name_out in f.keys():
        ds_out = f[g_name_out]
    else:
        ds_out = f.create_dataset(g_name_out, shape=ds.shape, dtype=np.float32, chunks=True, compression=None)
    print('write to dataset /{}'.format(g_name_out))

    num_ifgram = ds.shape[0]
    prog_bar = ptime.progressBar(maxValue=num_ifgram)
    for i in range(num_ifgram):
        unw = ds[i, :, :]
        ds_out[i, :, :] = laplace(unw)
        prog_bar.update(i+1, suffix='{}/{}'.format(i+1, num_ifgram))
    prog_bar.close()
    f.close()
    print('finished writing to {}'.format(file))
    return
示例#3
0
def run_2to3_timeseries(py2_file, py3_file):
    """Convert timeseries file from py2-MintPy format to py3-MintPy format"""
    # read data from py2_file
    atr = readfile.read_attribute(py2_file)
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    with h5py.File(py2_file, 'r') as f:
        date_list = list(f['timeseries'].keys())
        num_date = len(date_list)
        ts_data = np.zeros((num_date, length, width), np.float32)
        print('reading time-series ...')
        prog_bar = ptime.progressBar(maxValue=num_date)
        for i in range(num_date):
            ts_data[i, :, :] = f['timeseries/{}'.format(date_list[i])][:]
            prog_bar.update(i + 1, suffix=date_list[i])
        prog_bar.close()

    # prepare metadata
    bperp = np.array([float(i) for i in atr['P_BASELINE_TIMESERIES'].split()],
                     dtype=np.float32)
    dates = np.array(date_list, np.string_)
    atr['REF_DATE'] = date_list[0]
    for key in [
            'P_BASELINE_TIMESERIES', 'P_BASELINE_TOP_TIMESERIES',
            'P_BASELINE_BOTTOM_TIMESERIES'
    ]:
        try:
            atr.pop(key)
        except:
            pass

    # write to py3_file
    ts_obj = timeseries(py3_file)
    ts_obj.write2hdf5(data=ts_data, dates=dates, bperp=bperp, metadata=atr)
    return py3_file
示例#4
0
def bin_variance(distance,
                 variance,
                 step=5e3,
                 min_pair_num=100e3,
                 print_msg=True):
    x_steps = np.arange(0, np.max(distance), step)
    num_step = len(x_steps)
    std = np.zeros(x_steps.shape)
    stdStd = np.zeros(std.shape)
    p_num = np.zeros(x_steps.shape)

    if print_msg:
        prog_bar = ptime.progressBar(maxValue=num_step)
    for i in range(num_step):
        x = x_steps[i]
        idx = (distance > max(0, x - step / 2.)) * (distance < x + step / 2.)
        p_num[i] = np.sum(idx)
        std[i] = np.mean(np.sqrt(variance[idx]))
        stdStd[i] = np.std(np.sqrt(variance[idx]))
        if print_msg:
            prog_bar.update(i + 1, every=10)
    if print_msg:
        prog_bar.close()

    max_step_idx = int(max(np.argwhere(p_num > min_pair_num)))
    return x_steps[0:max_step_idx], std[0:max_step_idx], stdStd[0:max_step_idx]
示例#5
0
def structure_function(data,
                       lat,
                       lon,
                       step=5e3,
                       min_pair_num=100e3,
                       print_msg=True):
    num_sample = len(data)
    distance = np.zeros((num_sample**2))
    variance = np.zeros((num_sample**2))
    if print_msg:
        prog_bar = ptime.progressBar(maxValue=num_sample)
    for i in range(num_sample):
        distance[i * num_sample:(i + 1) * num_sample] = get_distance(
            lat, lon, i)
        variance[i * num_sample:(i + 1) * num_sample] = np.square(data -
                                                                  data[i])
        if print_msg:
            prog_bar.update(i + 1, every=10)
    if print_msg:
        prog_bar.close()

    dist, std, stdStd = bin_variance(distance,
                                     variance,
                                     step=step,
                                     min_pair_num=min_pair_num,
                                     print_msg=print_msg)
    return dist, std, stdStd
示例#6
0
def prepare_stack(inputDir, filePattern, metadata=dict(), baseline_dict=dict(), update_mode=True):
    print('prepare .rsc file for ', filePattern)
    isce_files = sorted(glob.glob(os.path.join(os.path.abspath(inputDir), '*', filePattern)))
    if len(isce_files) == 0:
        raise FileNotFoundError('no file found in pattern: {}'.format(filePattern))

    # write .rsc file for each interferogram file
    num_file = len(isce_files)
    prog_bar = ptime.progressBar(maxValue=num_file)
    for i in range(num_file):
        isce_file = isce_files[i]
        # prepare metadata for current file
        ifg_metadata = readfile.read_attribute(isce_file, metafile_ext='.xml')
        ifg_metadata.update(metadata)
        dates = os.path.basename(os.path.dirname(isce_file)).split('_')
        ifg_metadata = add_ifgram_metadata(ifg_metadata, dates, baseline_dict)

        # write .rsc file
        rsc_file = isce_file+'.rsc'
        writefile.write_roipac_rsc(ifg_metadata, rsc_file,
                                   update_mode=update_mode,
                                   print_msg=False)
        prog_bar.update(i+1, suffix='{}_{}'.format(dates[0], dates[1]))
    prog_bar.close()
    return
示例#7
0
    def run_resample(self,
                     src_data,
                     interp_method='nearest',
                     fill_value=np.nan,
                     nprocs=1,
                     print_msg=True):
        """Run interpolation operation for input 2D/3D data
        Parameters: src_data      : 2D/3D np.array, source data to be geocoded
                    interp_method : string, nearest | linear
                    fill_value    : NaN or number
                    nprocs        : int, number of processes to be used
                    print_msg     : bool
        Returns:    geo_data      : 2D/3D np.array
        """
        # use pyresample
        if self.processor == 'pyresample':
            if len(src_data.shape) == 3:
                src_data = np.moveaxis(src_data, 0, -1)

            if src_data.dtype == np.bool_:
                fill_value = False
                print('restrict fill value to False for bool type source data')

            # resample source data into target data
            geo_data = self.run_pyresample(src_data=src_data,
                                           interp_method=interp_method,
                                           fill_value=fill_value,
                                           nprocs=nprocs,
                                           radius=None,
                                           print_msg=True)

            if len(geo_data.shape) == 3:
                geo_data = np.moveaxis(geo_data, -1, 0)

        # use scipy.interpolater.RegularGridInterpolator
        else:
            if print_msg:
                print(
                    'resampling using scipy.interpolate.RegularGridInterpolator ...'
                )
            if len(src_data.shape) == 3:
                geo_data = np.empty(
                    (src_data.shape[0], self.length, self.width),
                    src_data.dtype)
                prog_bar = ptime.progressBar(maxValue=src_data.shape[0])
                for i in range(src_data.shape[0]):
                    geo_data[i, :, :] = self.run_regular_grid_interpolator(
                        src_data=src_data[i, :, :],
                        interp_method=interp_method,
                        fill_value=fill_value,
                        print_msg=True)
                    prog_bar.update(i + 1)
                prog_bar.close()
            else:
                geo_data = self.run_regular_grid_interpolator(
                    src_data=src_data,
                    interp_method=interp_method,
                    fill_value=fill_value,
                    print_msg=True)
        return geo_data
def get_number_of_nonzero_closure_phase(ifgram_file,
                                        dsName='unwrapPhase',
                                        step=100):
    # read ifgramStack file
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open()
    length, width = stack_obj.length, stack_obj.width
    date12_list = stack_obj.get_date12_list(dropIfgram=True)
    num_ifgram = len(date12_list)
    C = stack_obj.get_design_matrix4triplet(date12_list)
    ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsName,
                                              dropIfgram=True).reshape(
                                                  num_ifgram, -1)

    # calculate number of nonzero closure phase
    closure_int = np.zeros((length, width), np.int16)
    num_loop = int(np.ceil(length / step))
    prog_bar = ptime.progressBar(maxValue=num_loop)
    for i in range(num_loop):
        r0 = i * step
        r1 = min((r0 + step), stack_obj.length)
        box = (0, r0, stack_obj.width, r1)
        unw = ifginv.read_unwrap_phase(stack_obj,
                                       box=box,
                                       ref_phase=ref_phase,
                                       unwDatasetName=dsName,
                                       dropIfgram=True,
                                       print_msg=False).reshape(
                                           num_ifgram, -1)
        closure_pha = np.dot(C, unw)
        cint = np.round((closure_pha - ut.wrap(closure_pha)) / (2. * np.pi))
        closure_int[r0:r1, :] = np.sum(cint != 0, axis=0).reshape(-1, width)
        prog_bar.update(i + 1, every=1)
    prog_bar.close()
    return closure_int
示例#9
0
def get_nonzero_phase_closure(ifgram_file,
                              out_file=None,
                              thres=0.1,
                              unwDatasetName='unwrapPhase'):
    """Calculate/Read number of non-zero phase closure
    Parameters: ifgram_file : string, path of ifgram stack file
                out_file    : string, path of num non-zero phase closure file
    Returns:    num_nonzero_closure : 2D np.array in size of (length, width)
    """
    if not out_file:
        out_file = 'numNonzeroPhaseClosure_{}.h5'.format(unwDatasetName)
    if os.path.isfile(out_file) and readfile.read_attribute(out_file):
        print('1. read number of nonzero phase closure from file: {}'.format(
            out_file))
        num_nonzero_closure = readfile.read(out_file)[0]
    else:
        obj = ifgramStack(ifgram_file)
        obj.open(print_msg=False)
        length, width = obj.length, obj.width

        ref_phase = obj.get_reference_phase(unwDatasetName=unwDatasetName,
                                            dropIfgram=False)
        C = obj.get_design_matrix4triplet(
            obj.get_date12_list(dropIfgram=False))

        # calculate phase closure line by line to save memory usage
        num_nonzero_closure = np.zeros((length, width), np.float32)
        print(
            '1. calculating phase closure of all pixels from dataset - {} ...'.
            format(unwDatasetName))
        line_step = 10
        num_loop = int(np.ceil(length / line_step))
        prog_bar = ptime.progressBar(maxValue=num_loop)
        for i in range(num_loop):
            # read phase
            i0, i1 = i * line_step, min(length, (i + 1) * line_step)
            box = (0, i0, width, i1)
            pha_data = ifginv.read_unwrap_phase(obj,
                                                box,
                                                ref_phase,
                                                unwDatasetName=unwDatasetName,
                                                dropIfgram=False,
                                                print_msg=False)
            # calculate phase closure
            pha_closure = np.dot(C, pha_data)
            pha_closure = np.abs(pha_closure - ut.wrap(pha_closure))
            # get number of non-zero phase closure
            num_nonzero = np.sum(pha_closure >= thres, axis=0)
            num_nonzero_closure[i0:i1, :] = num_nonzero.reshape(i1 - i0, width)
            prog_bar.update(i + 1,
                            every=1,
                            suffix='{}/{} lines'.format((i + 1) * line_step,
                                                        length))
        prog_bar.close()

        atr = dict(obj.metadata)
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 1
        writefile.write(num_nonzero_closure, out_file=out_file, metadata=atr)
    return num_nonzero_closure
def get_number_of_nonzero_closure_phase(ifgram_file, dsName='unwrapPhase', step=100):
    # read ifgramStack file
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open()
    length, width = stack_obj.length, stack_obj.width
    date12_list = stack_obj.get_date12_list(dropIfgram=True)
    num_ifgram = len(date12_list)
    C = stack_obj.get_design_matrix4triplet(date12_list)
    ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsName, dropIfgram=True).reshape(num_ifgram, -1)

    # calculate number of nonzero closure phase
    closure_int = np.zeros((length, width), np.int16)
    num_loop = int(np.ceil(length / step))
    prog_bar = ptime.progressBar(maxValue=num_loop)
    for i in range(num_loop):
        r0 = i * step
        r1 = min((r0+step), stack_obj.length)
        box = (0, r0, stack_obj.width, r1)
        unw = ifginv.read_unwrap_phase(stack_obj, box=box,
                                       ref_phase=ref_phase,
                                       unwDatasetName=dsName,
                                       dropIfgram=True,
                                       print_msg=False).reshape(num_ifgram, -1)
        closure_pha = np.dot(C, unw)
        cint = np.round((closure_pha - ut.wrap(closure_pha)) / (2.*np.pi))
        closure_int[r0:r1, :] = np.sum(cint != 0, axis=0).reshape(-1, width)
        prog_bar.update(i+1, every=1)
    prog_bar.close()
    return closure_int
示例#11
0
def calculate_temporal_coherence_patch(ifgram_file,
                                       timeseries_file,
                                       box=None,
                                       ifg_num_file=None):
    atr = readfile.read_attribute(timeseries_file)
    if not box:
        box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))

    # Read timeseries data
    ts_obj = timeseries(timeseries_file)
    ts_obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(timeseries_file))
    ts_data = ts_obj.read(box=box, print_msg=False).reshape(ts_obj.numDate, -1)
    ts_data = ts_data[1:, :]
    ts_data *= -4 * np.pi / float(atr['WAVELENGTH'])

    # Read ifgram data
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    A = stack_obj.get_design_matrix4timeseries(
        stack_obj.get_date12_list(dropIfgram=True))[0]
    print('reading unwrapPhase data from file: {}'.format(ifgram_file))
    ifgram_data = stack_obj.read(datasetName='unwrapPhase',
                                 box=box).reshape(A.shape[0], -1)
    ref_value = stack_obj.get_reference_phase(dropIfgram=True).reshape((-1, 1))
    ifgram_data -= np.tile(ref_value, (1, ifgram_data.shape[1]))

    ifgram_diff = ifgram_data - np.dot(A, ts_data)
    del ts_data

    pixel_num = ifgram_data.shape[1]
    temp_coh = np.zeros((pixel_num), np.float32)
    # (fast) nasty solution, which used all phase value including invalid zero phase
    if not ifg_num_file:
        temp_coh = np.abs(np.sum(np.exp(1j * ifgram_diff),
                                 axis=0)) / ifgram_diff.shape[0]

    # (slow) same solution as ifgram_inversion.py, considering:
    #   1) invalid zero phase in ifgram
    #   2) design matrix rank deficiency.
    else:
        print(
            'considering different number of interferograms used in network inversion for each pixel'
        )
        ifg_num_map = readfile.read(ifg_num_file, box=box)[0].flatten()
        prog_bar = ptime.progressBar(maxValue=pixel_num)
        for i in range(pixel_num):
            if ifg_num_map[i] > 0:
                idx = ifgram_data[:, i] != 0.
                temp_diff = ifgram_diff[idx, i]
                temp_coh[i] = np.abs(np.sum(np.exp(1j * temp_diff),
                                            axis=0)) / temp_diff.shape[0]
            prog_bar.update(i + 1,
                            every=1000,
                            suffix='{}/{}'.format(i + 1, pixel_num))
        prog_bar.close()

    temp_coh = np.reshape(temp_coh, (box[3] - box[1], box[2] - box[0]))
    return temp_coh
示例#12
0
def main(argv):
    try:
        file = argv[0]
    except:
        usage()
        sys.exit(1)

    outfile = os.path.splitext(file)[0]+'_wrap'+os.path.splitext(file)[1]
    one_cycle = 2*np.pi
    one_cycle = 0.05

    atr = readfile.read_attribute(file)
    k = atr['FILE_TYPE']

    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(file, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progressBar(maxValue=epoch_num)

        print('writing >>> '+outfile)
        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print('number of interferograms: '+str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]

                data_wrap = rewrap(data)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data_wrap)
                for key, value in h5[k][epoch].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        elif k == 'timeseries':
            print('number of acquisitions: '+str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_wrap = rewrap(data, one_cycle)

                dset = group.create_dataset(epoch, data=data_wrap)
                prog_bar.update(i+1, suffix=epoch)
            for key, value in h5[k].attrs.items():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    print('Done.')
    return outfile
示例#13
0
def calc_iono_ramp_timeseries_igs(tec_dir, tec_sol, interp_method, ts_file, geom_file, iono_file,
                                  rotate_tec_map=True, sub_tec_ratio=None, update_mode=True):
    """Calculate the time-series of 2D ionospheric delay from IGS TEC data.
    Considering the variation of the incidence angle along range direction.

    Parameters: tec_dir   - str, path of the local TEC directory
                ts_file   - str, path of the time-series file
                geom_file - str, path of the geometry file including incidenceAngle data
                iono_file - str, path of output iono ramp time-series file
    Returns:    iono_file - str, path of output iono ramp time-series file
    """
    print("\n------------------------------------------------------------------------------")
    # prepare geometry
    iono_lat, iono_lon = iono.prep_geometry_iono(geom_file, print_msg=True)[1:3]

    # prepare date/time
    date_list = timeseries(ts_file).get_date_list()
    meta = readfile.read_attribute(ts_file)
    utc_sec = float(meta['CENTER_LINE_UTC'])
    h, s = divmod(utc_sec, 3600)
    m, s = divmod(s, 60)
    print('UTC time: {:02.0f}:{:02.0f}:{:02.1f}'.format(h, m, s))

    # read IGS TEC
    vtec_list = []
    print('read IGS TEC file ...')
    print('interpolation method: {}'.format(interp_method))
    prog_bar = ptime.progressBar(maxValue=len(date_list))
    for i, date_str in enumerate(date_list):
        # read zenith TEC
        tec_file = iono.get_igs_tec_filename(tec_dir, date_str, sol=tec_sol)
        vtec = iono.get_igs_tec_value(
            tec_file,
            utc_sec,
            lat=iono_lat,
            lon=iono_lon,
            interp_method=interp_method,
            rotate_tec_map=rotate_tec_map,
        )
        vtec_list.append(vtec)
        prog_bar.update(i+1, suffix=date_str)
    prog_bar.close()

    # TEC --> iono ramp
    vtec2iono_ramp_timeseries(
        date_list=date_list,
        vtec_list=vtec_list,
        geom_file=geom_file,
        iono_file=iono_file,
        sub_tec_ratio=sub_tec_ratio,
        update_mode=update_mode,
    )

    return iono_file
示例#14
0
def get_nonzero_phase_closure(ifgram_file, out_file=None, thres=0.1, unwDatasetName='unwrapPhase'):
    """Calculate/Read number of non-zero phase closure
    Parameters: ifgram_file : string, path of ifgram stack file
                out_file    : string, path of num non-zero phase closure file
    Returns:    num_nonzero_closure : 2D np.array in size of (length, width)
    """
    if not out_file:
        out_file = 'numNonzeroPhaseClosure_{}.h5'.format(unwDatasetName)
    if os.path.isfile(out_file) and readfile.read_attribute(out_file):
        print('1. read number of nonzero phase closure from file: {}'.format(out_file))
        num_nonzero_closure = readfile.read(out_file)[0]
    else:
        obj = ifgramStack(ifgram_file)
        obj.open(print_msg=False)
        length, width = obj.length, obj.width

        ref_phase = obj.get_reference_phase(unwDatasetName=unwDatasetName, dropIfgram=False)
        C = obj.get_design_matrix4triplet(obj.get_date12_list(dropIfgram=False))

        # calculate phase closure line by line to save memory usage
        num_nonzero_closure = np.zeros((length, width), np.float32)
        print('1. calculating phase closure of all pixels from dataset - {} ...'.format(unwDatasetName))
        line_step = 10
        num_loop = int(np.ceil(length / line_step))
        prog_bar = ptime.progressBar(maxValue=num_loop)
        for i in range(num_loop):
            # read phase
            i0, i1 = i*line_step, min(length, (i+1)*line_step)
            box = (0, i0, width, i1)
            pha_data = ifginv.read_unwrap_phase(obj,
                                                box,
                                                ref_phase,
                                                unwDatasetName=unwDatasetName,
                                                dropIfgram=False,
                                                print_msg=False)
            # calculate phase closure
            pha_closure = np.dot(C, pha_data)
            pha_closure = np.abs(pha_closure - ut.wrap(pha_closure))
            # get number of non-zero phase closure
            num_nonzero = np.sum(pha_closure >= thres, axis=0)
            num_nonzero_closure[i0:i1, :] = num_nonzero.reshape(i1-i0, width)
            prog_bar.update(i+1, every=1, suffix='{}/{} lines'.format((i+1)*line_step, length))
        prog_bar.close()

        atr = dict(obj.metadata)
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 1
        writefile.write(num_nonzero_closure, out_file=out_file, metadata=atr)
    return num_nonzero_closure
示例#15
0
    def run_resample(self, src_data, interp_method='nearest', fill_value=np.nan, nprocs=1, print_msg=True):
        """Run interpolation operation for input 2D/3D data
        Parameters: src_data      : 2D/3D np.array, source data to be geocoded
                    interp_method : string, nearest | linear
                    fill_value    : NaN or number
                    nprocs        : int, number of processes to be used
                    print_msg     : bool
        Returns:    geo_data      : 2D/3D np.array
        """
        # use pyresample
        if self.processor == 'pyresample':
            if len(src_data.shape) == 3:
                src_data = np.moveaxis(src_data, 0, -1)

            if src_data.dtype == np.bool_:
                fill_value = False
                print('restrict fill value to False for bool type source data')

            # resample source data into target data
            geo_data = self.run_pyresample(src_data=src_data,
                                           interp_method=interp_method,
                                           fill_value=fill_value,
                                           nprocs=nprocs,
                                           radius=None,
                                           print_msg=True)

            if len(geo_data.shape) == 3:
                geo_data = np.moveaxis(geo_data, -1, 0)

        # use scipy.interpolater.RegularGridInterpolator
        else:
            if print_msg:
                print('resampling using scipy.interpolate.RegularGridInterpolator ...')
            if len(src_data.shape) == 3:
                geo_data = np.empty((src_data.shape[0], self.length, self.width), src_data.dtype)
                prog_bar = ptime.progressBar(maxValue=src_data.shape[0])
                for i in range(src_data.shape[0]):
                    geo_data[i, :, :] = self.run_regular_grid_interpolator(src_data=src_data[i, :, :],
                                                                           interp_method=interp_method,
                                                                           fill_value=fill_value,
                                                                           print_msg=True)
                    prog_bar.update(i+1)
                prog_bar.close()
            else:
                geo_data = self.run_regular_grid_interpolator(src_data=src_data,
                                                              interp_method=interp_method,
                                                              fill_value=fill_value,
                                                              print_msg=True)
        return geo_data
示例#16
0
def calculate_temporal_coherence_patch(ifgram_file, timeseries_file, box=None, ifg_num_file=None):
    atr = readfile.read_attribute(timeseries_file)
    if not box:
        box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))

    # Read timeseries data
    ts_obj = timeseries(timeseries_file)
    ts_obj.open(print_msg=False)
    print('reading timeseries data from file: {}'.format(timeseries_file))
    ts_data = ts_obj.read(box=box, print_msg=False).reshape(ts_obj.numDate, -1)
    ts_data = ts_data[1:, :]
    ts_data *= -4*np.pi/float(atr['WAVELENGTH'])

    # Read ifgram data
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    A = stack_obj.get_design_matrix4timeseries(stack_obj.get_date12_list(dropIfgram=True))[0]
    print('reading unwrapPhase data from file: {}'.format(ifgram_file))
    ifgram_data = stack_obj.read(datasetName='unwrapPhase', box=box).reshape(A.shape[0], -1)
    ref_value = stack_obj.get_reference_phase(dropIfgram=True).reshape((-1, 1))
    ifgram_data -= np.tile(ref_value, (1, ifgram_data.shape[1]))

    ifgram_diff = ifgram_data - np.dot(A, ts_data)
    del ts_data

    pixel_num = ifgram_data.shape[1]
    temp_coh = np.zeros((pixel_num), np.float32)
    # (fast) nasty solution, which used all phase value including invalid zero phase
    if not ifg_num_file:
        temp_coh = np.abs(np.sum(np.exp(1j*ifgram_diff), axis=0)) / ifgram_diff.shape[0]

    # (slow) same solution as ifgram_inversion.py, considering:
    #   1) invalid zero phase in ifgram
    #   2) design matrix rank deficiency.
    else:
        print('considering different number of interferograms used in network inversion for each pixel')
        ifg_num_map = readfile.read(ifg_num_file, box=box)[0].flatten()
        prog_bar = ptime.progressBar(maxValue=pixel_num)
        for i in range(pixel_num):
            if ifg_num_map[i] > 0:
                idx = ifgram_data[:, i] != 0.
                temp_diff = ifgram_diff[idx, i]
                temp_coh[i] = np.abs(np.sum(np.exp(1j*temp_diff), axis=0)) / temp_diff.shape[0]
            prog_bar.update(i+1, every=1000, suffix='{}/{}'.format(i+1, pixel_num))
        prog_bar.close()

    temp_coh = np.reshape(temp_coh, (box[3]-box[1], box[2]-box[0]))
    return temp_coh
示例#17
0
def prepare_stack(unw_files, meta, update_mode=True):
    """Prepare .rsc file for all unwrapped interferogram files."""
    num_file = len(unw_files)
    if num_file == 0:
        raise FileNotFoundError('NO unwrapped interferogram file found!')

    # write .rsc file for each interferogram file
    prog_bar = ptime.progressBar(maxValue=num_file)
    for i, unw_file in enumerate(unw_files):
        ifg_dir = os.path.dirname(unw_file)
        ifg_meta = {}

        # copy over the common metadata
        for key, value in meta.items():
            ifg_meta[key] = value

        # update from .grd file
        ifg_meta.update(readfile.read_gdal_vrt(unw_file))

        # add DATE12
        prm_files = get_prm_files(ifg_dir)
        date1, date2 = [os.path.splitext(os.path.basename(i))[0] for i in prm_files]
        ifg_meta['DATE12'] = '{}-{}'.format(ptime.yymmdd(date1), ptime.yymmdd(date2))

        # and P_BASELINE_TOP/BOTTOM_HDR
        baseline_file = os.path.join(ifg_dir, 'baseline.txt')
        if os.path.isfile(baseline_file):
            bDict = readfile.read_template(baseline_file)
            ifg_meta['P_BASELINE_TOP_HDR'] = bDict['B_perpendicular']
            ifg_meta['P_BASELINE_BOTTOM_HDR'] = bDict['B_perpendicular']
        else:
            ifg_meta['P_BASELINE_TOP_HDR'] = '0'
            ifg_meta['P_BASELINE_BOTTOM_HDR'] = '0'
            msg = 'WARNING: No baseline file found in: {}. '.format(baseline_file)
            msg += 'Set P_BASELINE* to 0 and continue.'
            print(msg)

        # write .rsc file
        rsc_file = unw_file+'.rsc'
        writefile.write_roipac_rsc(ifg_meta, rsc_file,
                                   update_mode=update_mode,
                                   print_msg=False)

        prog_bar.update(i+1, suffix='{}_{}'.format(date1, date2))
    prog_bar.close()
    return
示例#18
0
def structure_function(data, lat, lon, step=5e3, min_pair_num=100e3, print_msg=True):
    num_sample = len(data)
    distance = np.zeros((num_sample**2))
    variance = np.zeros((num_sample**2))
    if print_msg:
        prog_bar = ptime.progressBar(maxValue=num_sample)
    for i in range(num_sample):
        distance[i*num_sample:(i+1)*num_sample] = get_distance(lat, lon, i)
        variance[i*num_sample:(i+1)*num_sample] = np.square(data - data[i])
        if print_msg:
            prog_bar.update(i+1, every=10)
    if print_msg:
        prog_bar.close()

    dist, std, stdStd = bin_variance(
        distance, variance, step=step, min_pair_num=min_pair_num, print_msg=print_msg)
    return dist, std, stdStd
示例#19
0
def asc_desc2horz_vert(dlos, los_inc_angle, los_az_angle, horz_az_angle=-90):
    """Decompose asc / desc LOS data into horz / vert data.
    Parameters: dlos          - 2D np.ndarray in size of (num_file, num_pixel), LOS displacement in meters.
                los_inc_angle - 1/2D np.ndarray in size of (num_file), num_pixel), LOS incidence angle in radians.
                los_az_angle  - 1/2D np.ndarray in size of (num_file), num_pixel), LOS azimuth   angle in radians.
                horz_az_angle - float, horizontal azimuth angle of interest in radians.
    Returns:    dhorz         - 1D np.ndarray in size of (num_pixel), horizontal displacement in meters.
                dvert         - 1D np.ndarray in size of (num_pixel), vertical   displacement in meters.
    """
    # initiate output
    num_pixel = dlos.shape[1]
    dhorz = np.zeros(num_pixel, dtype=np.float32) * np.nan
    dvert = np.zeros(num_pixel, dtype=np.float32) * np.nan

    # 0D (constant) incidence / azimuth angle --> invert once for all pixels
    if los_inc_angle.ndim == 1:
        A = get_design_matrix(los_inc_angle, los_az_angle, horz_az_angle)
        print('decomposing asc/desc into horz/vert direction ...')
        dhv = np.dot(np.linalg.pinv(A), dlos).astype(np.float32)
        dhorz = dhv[0, :]
        dvert = dhv[1, :]

    # 2D incidence / azimuth angle --> invert pixel-by-pixel
    elif los_inc_angle.ndim == 2:
        mask = np.sum(np.isnan(dlos), axis=0) > 0
        num_pixel2inv = int(np.sum(mask))
        idx_pixel2inv = np.where(mask)[0]
        print('number of valid pixels to decompose: {} out of {} ({:.1f}%)'.
              format(num_pixel2inv, num_pixel,
                     num_pixel2inv / num_pixel * 100))

        print(
            'decomposing asc/desc into horz/vert direction pixel-by-pixel ...')
        prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
        for i, idx in enumerate(idx_pixel2inv):
            A = get_design_matrix(los_inc_angle[:, idx], los_az_angle[:, idx],
                                  horz_az_angle)
            dhv = np.dot(np.linalg.pinv(A), dlos[:, idx]).astype(np.float32)
            dhorz[idx] = dhv[0]
            dvert[idx] = dhv[1]
            prog_bar.update(i + 1,
                            every=1000,
                            suffix=f'{i+1}/{num_pixel2inv} pixels')
        prog_bar.close()

    return dhorz, dvert
示例#20
0
def get_IPF(proj_dir, ts_file):
    """Grab the IPF version number of each sub-swatch for Sentinel-1 time-series

    Parameters: proj_dir    - str, path of the project directory
                              E.g.: ~/data/AtacamaSenDT149
                ts_file     - str, path of HDF5 file for time-series
    Returns:    date_list   - list of str, dates in YYYYMMDD format
                IFP_IW1/2/3 - list of str, IFP version number
    """
    from mintpy.objects import timeseries

    s_dir = os.path.join(proj_dir, 'secondarys')
    m_dir = os.path.join(proj_dir, 'reference')

    # date list
    date_list = timeseries(ts_file).get_date_list()
    num_date = len(date_list)
    # reference date
    m_date = [
        i for i in date_list if not os.path.isdir(os.path.join(s_dir, i))
    ][0]

    # grab IPF numver
    IPF_IW1, IPF_IW2, IPF_IW3 = [], [], []
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        date_str = date_list[i]

        # get xml_dir
        if date_str == m_date:
            xml_dir = m_dir
        else:
            xml_dir = os.path.join(s_dir, date_str)

        # grab IPF version number
        for j, IPF_IW in enumerate([IPF_IW1, IPF_IW2, IPF_IW3]):
            xml_file = os.path.join(xml_dir, 'IW{}.xml'.format(j + 1))
            IPFv = load_product(xml_file).processingSoftwareVersion
            IPF_IW.append('{:.02f}'.format(float(IPFv)))

        prog_bar.update(i + 1, suffix='{} IW1/2/3'.format(date_str))
    prog_bar.close()
    return date_list, IPF_IW1, IPF_IW2, IPF_IW3
示例#21
0
def add_unwrapped_phase(h5File, unwStack, cohStack, connCompStack):

    dsUnw = gdal.Open(unwStack, gdal.GA_ReadOnly)
    dsCoh = gdal.Open(cohStack, gdal.GA_ReadOnly)
    dsComp = gdal.Open(connCompStack, gdal.GA_ReadOnly)

    nPairs = dsUnw.RasterCount

    h5 = h5py.File(h5File, "a")

    prog_bar = ptime.progressBar(maxValue=nPairs)
    for ii in range(nPairs):
        bnd = dsUnw.GetRasterBand(ii + 1)
        h5["unwrapPhase"][ii, :, :] = -1.0 * bnd.ReadAsArray()

        d12 = bnd.GetMetadata("unwrappedPhase")["Dates"]
        h5["date"][ii, 0] = d12.split("_")[1].encode("utf-8")
        h5["date"][ii, 1] = d12.split("_")[0].encode("utf-8")

        bnd = dsCoh.GetRasterBand(ii + 1)
        h5["coherence"][ii, :, :] = bnd.ReadAsArray()

        bnd = dsComp.GetRasterBand(ii + 1)
        h5["connectComponent"][ii, :, :] = bnd.ReadAsArray()

        bperp = float(
            dsUnw.GetRasterBand(ii + 1).GetMetadata("unwrappedPhase")
            ["perpendicularBaseline"])
        h5["bperp"][ii] = -1.0 * bperp

        h5["dropIfgram"][ii] = True
        prog_bar.update(ii + 1,
                        suffix='{}_{}'.format(
                            d12.split("_")[1],
                            d12.split("_")[0]))

    prog_bar.close()
    h5.close()
    dsUnw = None
    dsCoh = None
    dsComp = None

    return
示例#22
0
def bootstrap(timeseriesFile, bootCount):
    ts_data, atr = readfile.read(timeseriesFile)
    tsData = readfile.timeseries(timeseriesFile)
    if atr['UNIT'] == 'mm':
        ts_data *= 1. / 1000.

    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    dateList = tsData.get_date_list()
    sampleNo = len(dateList)
    vel = np.zeros((bootCount, (length * width)))
    prog_bar = ptime.progressBar(maxValue=bootCount, prefix='Calculating ')
    for i in range(bootCount):
        bootSamples = list(
            np.sort(resample(dateList, replace=True, n_samples=sampleNo)))
        # dropList = [x for x in dateList if x not in bootSamples]

        prog_bar.update(i + 1, suffix='Running boot number: ' + str(i + 1))
        bootList = []
        for k in bootSamples:
            bootList.append(dateList.index(k))
        numDate = len(bootList)
        ts_data_sub = ts_data[bootList, :, :].reshape(numDate, -1)

        A = tsData.get_design_matrix4average_velocity(bootSamples)
        X = np.dot(np.linalg.pinv(A), ts_data_sub)
        vel[i] = np.array(X[0, :], dtype='float32')

    prog_bar.close()
    print('Finished resampling and velocity calculation')
    velMean = vel.mean(axis=0).reshape(length, width)
    velStd = vel.std(axis=0).reshape(length, width)
    print('Calculated mean and standard deviation of bootstrap estimations')

    atr['FILE_TYPE'] = 'velocity'
    atr['UNIT'] = 'm/year'
    atr['START_DATE'] = bootSamples[0]
    atr['END_DATE'] = bootSamples[-1]
    atr['DATE12'] = '{}_{}'.format(bootSamples[0], bootSamples[-1])

    return velMean, velStd, atr
示例#23
0
def bin_variance(distance, variance, step=5e3, min_pair_num=100e3, print_msg=True):
    x_steps = np.arange(0, np.max(distance), step)
    num_step = len(x_steps)
    std = np.zeros(x_steps.shape)
    stdStd = np.zeros(std.shape)
    p_num = np.zeros(x_steps.shape)

    if print_msg:
        prog_bar = ptime.progressBar(maxValue=num_step)
    for i in range(num_step):
        x = x_steps[i]
        idx = (distance > max(0, x-step/2.)) * (distance < x+step/2.)
        p_num[i] = np.sum(idx)
        std[i] = np.mean(np.sqrt(variance[idx]))
        stdStd[i] = np.std(np.sqrt(variance[idx]))
        if print_msg:
            prog_bar.update(i+1, every=10)
    if print_msg:
        prog_bar.close()

    max_step_idx = int(max(np.argwhere(p_num > min_pair_num)))
    return x_steps[0:max_step_idx], std[0:max_step_idx], stdStd[0:max_step_idx]
示例#24
0
def prepare_stack(inputDir,
                  filePattern,
                  metadata=dict(),
                  baseline_dict=dict(),
                  update_mode=True):
    print('prepare .rsc file for ', filePattern)
    if not os.path.exists(
            glob.glob(
                os.path.join(os.path.abspath(inputDir), '*',
                             filePattern + '.xml'))[0]):
        filePattern = filePattern.split('.full')[0]
    isce_files = sorted(
        glob.glob(
            os.path.join(os.path.abspath(inputDir), '*',
                         filePattern + '.xml')))
    if len(isce_files) == 0:
        raise FileNotFoundError(
            'no file found in pattern: {}'.format(filePattern))
    slc_dates = np.sort(os.listdir(inputDir))
    # write .rsc file for each interferogram file
    num_file = len(isce_files)
    prog_bar = ptime.progressBar(maxValue=num_file)
    for i in range(num_file):
        isce_file = isce_files[i].split('.xml')[0]
        # prepare metadata for current file
        slc_metadata = read_attribute(isce_file, metafile_ext='.xml')
        slc_metadata.update(metadata)
        dates = [slc_dates[0], os.path.basename(os.path.dirname(isce_file))]
        slc_metadata = add_slc_metadata(slc_metadata, dates, baseline_dict)

        # write .rsc file
        rsc_file = isce_file + '.rsc'
        writefile.write_roipac_rsc(slc_metadata,
                                   rsc_file,
                                   update_mode=update_mode,
                                   print_msg=False)
        prog_bar.update(i + 1, suffix='{}_{}'.format(dates[0], dates[1]))
    prog_bar.close()
    return
示例#25
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read timeseries info / data
    obj = timeseries(inps.timeseries_file)
    obj.open()

    tbase = np.array(obj.yearList, np.float32).reshape(-1, 1)
    tbase -= tbase[obj.refIndex]

    ts_data = obj.read().reshape(obj.numDate, -1)

    # Smooth acquisitions / moving window in time one by one
    print('-' * 50)
    print('filtering in time Gaussian window with size of {:.1f} years'.format(
        inps.time_win))
    ts_data_filt = np.zeros(ts_data.shape, np.float32)
    prog_bar = ptime.progressBar(maxValue=obj.numDate)
    for i in range(obj.numDate):
        # Weight from Gaussian (normal) distribution in time
        tbase_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (tbase_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        # Smooth the current acquisition
        ts_data_filt[i, :] = np.sum(ts_data * weight, axis=0)
        prog_bar.update(i + 1, suffix=obj.dateList[i])
    prog_bar.close()
    del ts_data
    ts_data_filt -= ts_data_filt[obj.refIndex, :]
    ts_data_filt = np.reshape(ts_data_filt,
                              (obj.numDate, obj.length, obj.width))

    # write filtered timeseries file
    if not inps.outfile:
        inps.outfile = '{}_tempGaussian.h5'.format(
            os.path.splitext(inps.timeseries_file)[0])
    obj_out = timeseries(inps.outfile)
    obj_out.write2hdf5(ts_data_filt, refFile=inps.timeseries_file)
    return inps.outfile
示例#26
0
def get_idx(long_array, short_array, n=2):
    sort_idx = np.argsort(long_array)
    long_array_sort = long_array[sort_idx]
    #print(long_array_sort[0:100])

    Ns = len(short_array)
    k = round(Ns / n) + 1
    short_idx = np.zeros((Ns, ), dtype=int)

    prog_bar = ptime.progressBar(maxValue=n)
    for i in range(n):
        i = i + 1
        a0 = k * (i - 1)
        if k * (i + 1) < Ns:
            b0 = k * (i + 1)
        else:
            b0 = Ns

        if not a0 > b0:
            idx0 = np.arange(a0, b0)
            #print(idx0)
            short_array0 = short_array[idx0]
            Ns0 = len(short_array0)

            idx_value = find_nearest(long_array_sort, short_array0[0])
            long_array0 = long_array_sort[idx_value:(idx_value + 2 * k)]
            short_idx0 = np.zeros((Ns0, ))
            sort_idx0 = sort_idx[idx_value:(idx_value + 2 * k)]

            for j in range(Ns0):
                id0 = find_nearest(long_array0, short_array0[j])
                short_idx0[j] = sort_idx0[id0]

            short_idx[idx0] = short_idx0
        prog_bar.update(i + 1,
                        every=round(n / 100),
                        suffix='{}/{} pixels'.format(i + 1, n))
    prog_bar.close()
    return short_idx
示例#27
0
def main(argv):

    try:
        file = argv[0]
    except:
        usage()
        sys.exit(1)

    g_name = 'unwrapPhase'
    g_name_out = 'unwrapPhase_laplace'

    print(
        'calculate Laplace filter of {} based on approximate second derivatives.'
        .format(g_name))

    f = h5py.File(file, 'a')
    ds = f[g_name]
    if g_name_out in f.keys():
        ds_out = f[g_name_out]
    else:
        ds_out = f.create_dataset(g_name_out,
                                  shape=ds.shape,
                                  dtype=np.float32,
                                  chunks=True,
                                  compression=None)
    print('write to dataset /{}'.format(g_name_out))

    num_ifgram = ds.shape[0]
    prog_bar = ptime.progressBar(maxValue=num_ifgram)
    for i in range(num_ifgram):
        unw = ds[i, :, :]
        ds_out[i, :, :] = laplace(unw)
        prog_bar.update(i + 1, suffix='{}/{}'.format(i + 1, num_ifgram))
    prog_bar.close()
    f.close()
    print('finished writing to {}'.format(file))
    return
示例#28
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # read timeseries info / data
    obj = timeseries(inps.timeseries_file)
    obj.open()

    tbase = np.array(obj.yearList, np.float32).reshape(-1, 1)
    tbase -= tbase[obj.refIndex]

    ts_data = obj.read().reshape(obj.numDate, -1)

    # Smooth acquisitions / moving window in time one by one
    print('-'*50)
    print('filtering in time Gaussian window with size of {:.1f} years'.format(inps.time_win))
    ts_data_filt = np.zeros(ts_data.shape, np.float32)
    prog_bar = ptime.progressBar(maxValue=obj.numDate)
    for i in range(obj.numDate):
        # Weight from Gaussian (normal) distribution in time
        tbase_diff = tbase[i] - tbase
        weight = np.exp(-0.5 * (tbase_diff**2) / (inps.time_win**2))
        weight /= np.sum(weight)
        # Smooth the current acquisition
        ts_data_filt[i, :] = np.sum(ts_data * weight, axis=0)
        prog_bar.update(i+1, suffix=obj.dateList[i])
    prog_bar.close()
    del ts_data
    ts_data_filt -= ts_data_filt[obj.refIndex, :]
    ts_data_filt = np.reshape(ts_data_filt, (obj.numDate, obj.length, obj.width))

    # write filtered timeseries file
    if not inps.outfile:
        inps.outfile = '{}_tempGaussian.h5'.format(os.path.splitext(inps.timeseries_file)[0])
    obj_out = timeseries(inps.outfile)
    obj_out.write2hdf5(ts_data_filt, refFile=inps.timeseries_file)
    return inps.outfile
示例#29
0
def get_delay_timeseries(inps, atr):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
                atr  : dict, metadata to be saved in trop_file
    Returns:    trop_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    # check 1 - existing tropo delay file
    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
        return

    # check 2 - geometry file
    if any(i is None for i in [inps.geom_file, inps.ref_yx]):
        print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
        if not os.path.isfile(inps.trop_file):
            inps.trop_file = None
        return

    # prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.dem = geom_obj.read(datasetName='height')
    inps.inc = geom_obj.read(datasetName='incidenceAngle')
    if 'latitude' in geom_obj.datasetNames:
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')
    else:
        inps.lat, inps.lon = get_lat_lon(geom_obj.metadata)

    # calculate phase delay
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = len(inps.grib_file_list)
    date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
    trop_data = np.zeros((num_date, length, width), np.float32)

    print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
    print('number of grib files used: {}'.format(num_date))
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        grib_file = inps.grib_file_list[i]
        trop_data[i] = get_delay(grib_file, inps)
        prog_bar.update(i+1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    # Convert relative phase delay on reference date
    inps.ref_date = atr.get('REF_DATE', date_list[0])
    print('convert to relative phase delay with reference date: '+inps.ref_date)
    inps.ref_idx = date_list.index(inps.ref_date)
    trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))

    # Write tropospheric delay to HDF5
    atr['REF_Y'] = inps.ref_yx[0]
    atr['REF_X'] = inps.ref_yx[1]
    ts_obj = timeseries(inps.trop_file)
    ts_obj.write2hdf5(data=trop_data,
                      dates=date_list,
                      metadata=atr,
                      refFile=inps.timeseries_file)
    return
示例#30
0
def plot_figure(j, inps, metadata):
    """Plot one figure with multiple subplots
    1) create figure
    2) read all data into 3D array
    3) loop to plot each subplot using plot_subplot4figure()
    4) common colorbar and save
    """
    fig_title = 'Figure {} - {}'.format(str(j), inps.outfile[j - 1])
    vprint('----------------------------------------')
    vprint(fig_title)

    # Open a new figure object
    fig = plt.figure(j, figsize=inps.fig_size)
    fig.canvas.set_window_title(fig_title)

    # Read all data for the current figure into 3D np.array
    i_start = (j - 1) * inps.fig_row_num * inps.fig_col_num
    i_end = min([inps.dsetNum, i_start + inps.fig_row_num * inps.fig_col_num])
    data = np.abs(read_data4figure(i_start, i_end, inps, metadata))

    # Loop - Subplots
    vprint('plotting ...')
    prog_bar = ptime.progressBar(maxValue=i_end - i_start,
                                 print_msg=inps.print_msg)
    for i in range(i_start, i_end):
        idx = i - i_start
        ax = fig.add_subplot(inps.fig_row_num, inps.fig_col_num, idx + 1)
        im = plot_subplot4figure(i,
                                 inps,
                                 ax=ax,
                                 data=data[idx, :, :],
                                 metadata=metadata)
        prog_bar.update(idx + 1, suffix=inps.dset[i].split('/')[-1])
    prog_bar.close()
    del data

    # Tune the subplot layout
    fig.subplots_adjust(left=0.02,
                        right=0.98,
                        bottom=0.02,
                        top=0.98,
                        wspace=0.05,
                        hspace=0.05)
    if inps.fig_wid_space or inps.fig_hei_space:
        fig.subplots_adjust(hspace=inps.fig_hei_space,
                            wspace=inps.fig_wid_space)
    elif inps.fig_tight_layout:
        fig.tight_layout()

    # Min and Max for this figure
    inps.dlim_all = [
        np.nanmin([inps.dlim_all[0], inps.dlim[0]]),
        np.nanmax([inps.dlim_all[1], inps.dlim[1]])
    ]
    vprint('data    range: {} {}'.format(inps.dlim, inps.disp_unit))
    if inps.vlim:
        vprint('display range: {} {}'.format(inps.vlim, inps.disp_unit))

    # Colorbar
    if not inps.vlim:
        vprint('Note: different color scale for EACH subplot!')
    else:
        if inps.disp_cbar:
            cbar_length = 0.4
            if inps.fig_size[1] > 8.0:
                cbar_length /= 2
            vprint('show colorbar')
            fig.subplots_adjust(right=0.93)
            cax = fig.add_axes(
                [0.94, (1.0 - cbar_length) / 2, 0.005, cbar_length])
            inps, cbar = pp.plot_colorbar(inps, im, cax)

    # Save Figure
    if inps.save_fig:
        vprint('save figure to {} with dpi={}'.format(
            os.path.abspath(inps.outfile[j - 1]), inps.fig_dpi))
        fig.savefig(inps.outfile[j - 1],
                    bbox_inches='tight',
                    transparent=True,
                    dpi=inps.fig_dpi)
        if not inps.disp_fig:
            fig.clf()
    return
示例#31
0
def read_data4figure(i_start, i_end, inps, metadata):
    """Read multiple datasets for one figure into 3D matrix based on i_start/end"""
    data = np.zeros((i_end - i_start, inps.pix_box[3] - inps.pix_box[1],
                     inps.pix_box[2] - inps.pix_box[0]))

    # fast reading for single dataset type
    if (len(inps.dsetFamilyList) == 1 and inps.key in [
            'timeseries', 'giantTimeseries', 'ifgramStack', 'HDFEOS',
            'geometry', 'slc'
    ]):

        dset_list = [inps.dset[i] for i in range(i_start, i_end)]
        data = read(inps.file, datasetName=dset_list, box=inps.pix_box)[0]

        if inps.key == 'slc':
            data = np.abs(data)

        if inps.key == 'ifgramStack':
            # reference pixel info in unwrapPhase
            if inps.dsetFamilyList[0] == 'unwrapPhase' and inps.file_ref_yx:
                ref_y, ref_x = inps.file_ref_yx
                ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1)
                ref_data = read(inps.file,
                                datasetName=dset_list,
                                box=ref_box,
                                print_msg=False)[0]
                for i in range(data.shape[0]):
                    mask = data[i, :, :] != 0.
                    data[i, mask] -= ref_data[i]

    # slow reading with one 2D matrix at a time
    else:
        vprint('reading data ...')
        prog_bar = ptime.progressBar(maxValue=i_end - i_start,
                                     print_msg=inps.print_msg)
        for i in range(i_start, i_end):
            d = read(inps.file,
                     datasetName=inps.dset[i],
                     box=inps.pix_box,
                     print_msg=False)[0]
            data[i - i_start, :, :] = d
            prog_bar.update(i - i_start + 1,
                            suffix=inps.dset[i].split('/')[-1])
        prog_bar.close()

    # ref_date for timeseries
    if inps.ref_date:
        vprint('consider input reference date: ' + inps.ref_date)
        ref_data = read(inps.file,
                        datasetName=inps.ref_date,
                        box=inps.pix_box,
                        print_msg=False)[0]
        data -= ref_data

    # v/dlim, adjust data if all subplots share the same unit
    # This could be:
    # 1) the same type OR
    # 2) velocity or timeseries OR
    # 3) data/model output from load_gbis.py OR
    # 4) horizontal/vertical output from asc_desc2horz_vert.py
    if (len(inps.dsetFamilyList) == 1
            or all(d in inps.dsetFamilyList
                   for d in ['horizontal', 'vertical'])
            or inps.dsetFamilyList == ['data', 'model', 'residual']
            or inps.key in ['velocity', 'timeseries', 'inversion']):
        data, inps = update_data_with_plot_inps(data, metadata, inps)
        if (not inps.vlim and not (inps.dsetFamilyList[0].startswith('unwrap')
                                   and not inps.file_ref_yx)
                and inps.dsetFamilyList[0] not in ['bperp']):
            data_mli = multilook_data(data, 10, 10)
            inps.vlim = [np.nanmin(data_mli), np.nanmax(data_mli)]
            del data_mli
    inps.dlim = [np.nanmin(data), np.nanmax(data)]

    # multilook
    if inps.multilook:
        data = multilook_data(data, inps.multilook_num, inps.multilook_num)

    # mask
    if inps.msk is not None:
        vprint('masking data')
        msk = np.tile(inps.msk, (data.shape[0], 1, 1))
        data = np.ma.masked_where(msk == 0., data)
    if inps.zero_mask:
        vprint('masking pixels with zero value')
        data = np.ma.masked_where(data == 0., data)
    return data
示例#32
0
def run_unwrap_error_bridge(ifgram_file, water_mask_file, ramp_type=None, radius=50, 
                            ccName='connectComponent', dsNameIn='unwrapPhase',
                            dsNameOut='unwrapPhase_bridging'):
    """Run unwrapping error correction with bridging
    Parameters: ifgram_file     : str, path of ifgram stack file
                water_mask_file : str, path of water mask file
                ramp_type       : str, name of phase ramp to be removed during the phase jump estimation
                ccName          : str, dataset name of connected components
                dsNameIn        : str, dataset name of unwrap phase to be corrected
                dsNameOut       : str, dataset name of unwrap phase to be saved after correction
    Returns:    ifgram_file     : str, path of ifgram stack file
    """
    print('-'*50)
    print('correct unwrapping error in {} with bridging ...'.format(ifgram_file))
    if ramp_type is not None:
        print('estimate and remove a {} ramp while calculating phase offset'.format(ramp_type))

    # read water mask
    if water_mask_file and os.path.isfile(water_mask_file):
        print('read water mask from file:', water_mask_file)
        water_mask = readfile.read(water_mask_file)[0]
    else:
        water_mask = None

    # file info
    atr = readfile.read_attribute(ifgram_file)
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    k = atr['FILE_TYPE']

    # correct unwrap error ifgram by ifgram
    if k == 'ifgramStack':
        date12_list = ifgramStack(ifgram_file).get_date12_list(dropIfgram=False)
        num_ifgram = len(date12_list)
        shape_out = (num_ifgram, length, width)

        # prepare output data writing
        print('open {} with r+ mode'.format(ifgram_file))
        f = h5py.File(ifgram_file, 'r+')
        print('input  dataset:', dsNameIn)
        print('output dataset:', dsNameOut)
        if dsNameOut in f.keys():
            ds = f[dsNameOut]
            print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out))
        else:
            ds = f.create_dataset(dsNameOut,
                                  shape_out,
                                  maxshape=(None, None, None),
                                  chunks=True,
                                  compression=None)
            print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out))

        # correct unwrap error ifgram by ifgram
        prog_bar = ptime.progressBar(maxValue=num_ifgram)
        for i in range(num_ifgram):
            # read unwrapPhase and connectComponent
            date12 = date12_list[i]
            unw = np.squeeze(f[dsNameIn][i, :, :])
            cc = np.squeeze(f[ccName][i, :, :])
            if water_mask is not None:
                cc[water_mask == 0] = 0

            # bridging
            cc_obj = connectComponent(conncomp=cc, metadata=atr)
            cc_obj.label()
            cc_obj.find_mst_bridge()
            unw_cor = cc_obj.unwrap_conn_comp(unw, ramp_type=ramp_type)

            # write to hdf5 file
            ds[i, :, :] = unw_cor
            prog_bar.update(i+1, suffix=date12)
        prog_bar.close()
        ds.attrs['MODIFICATION_TIME'] = str(time.time())
        f.close()
        print('close {} file.'.format(ifgram_file))

    if k == '.unw':
        # read unwrap phase
        unw = readfile.read(ifgram_file)[0]

        # read connected components
        cc_files0 = [ifgram_file+'.conncomp', os.path.splitext(ifgram_file)[0]+'_snap_connect.byt']
        cc_files = [i for i in cc_files0 if os.path.isfile(i)]
        if len(cc_files) == 0:
            raise FileNotFoundError(cc_files0)
        cc = readfile.read(cc_files[0])[0]
        if water_mask is not None:
            cc[water_mask == 0] = 0

        # bridging
        cc_obj = connectComponent(conncomp=cc, metadata=atr)
        cc_obj.label()
        cc_obj.find_mst_bridge()
        unw_cor = cc_obj.unwrap_conn_comp(unw, ramp_type=ramp_type)

        # write to hdf5 file
        out_file = '{}_unwCor{}'.format(os.path.splitext(ifgram_file)[0],
                                        os.path.splitext(ifgram_file)[1])
        print('writing >>> {}'.format(out_file))
        writefile.write(unw_cor, out_file=out_file, ref_file=ifgram_file)

    return ifgram_file
示例#33
0
    def run_resample(self, src_data, box_ind=0, print_msg=True):
        """Run interpolation operation for input 2D/3D data
        Parameters: src_data   - 2D/3D np.array, source data to be resampled
                    box_ind    - int, index of the current box of interest
                                 for multiple boxes with pyresample only
                    print_msg  - bool
        Returns:    dest_data  - 2D/3D np.array, resampled data
        """
        # adjust fill_value for each source data / block
        fill_value = self.fill_value
        float_types = [
            np.single, np.double, np.longdouble, np.csingle, np.cdouble,
            np.clongdouble
        ]
        if src_data.dtype == np.bool_:
            fill_value = False
            if print_msg:
                print(
                    'input source data is bool type, restrict fill_value to False.'
                )

        elif src_data.dtype not in float_types and np.isnan(fill_value):
            fill_value = 0
            if print_msg:
                print(
                    'input source data is NOT float, change fill_value from NaN to 0.'
                )

        ## pyresample
        if self.software == 'pyresample':
            # move 1st/time dimension to the last
            # so that rows/cols axis are the frist, as required by pyresample
            if len(src_data.shape) == 3:
                src_data = np.moveaxis(src_data, 0, -1)

            # resample source data into target data
            dest_data = self.run_pyresample(
                src_data=src_data,
                src_def=self.src_def_list[box_ind],
                dest_def=self.dest_def_list[box_ind],
                radius=self.radius,
                interp_method=self.interp_method,
                fill_value=fill_value,
                nprocs=self.nprocs,
                print_msg=self.print_msg)

            # move 1st/time dimension back
            if len(dest_data.shape) == 3:
                dest_data = np.moveaxis(dest_data, -1, 0)

        ## scipy
        else:
            if print_msg:
                print(
                    '{} resampling using scipy.interpolate.RegularGridInterpolator ...'
                    .format(self.interp_method))
            if len(src_data.shape) == 3:
                dest_data = np.empty(
                    (src_data.shape[0], self.length, self.width),
                    src_data.dtype)
                prog_bar = ptime.progressBar(maxValue=src_data.shape[0],
                                             print_msg=print_msg)
                for i in range(src_data.shape[0]):
                    dest_data[i, :, :] = self.run_regular_grid_interpolator(
                        src_data=src_data[i, :, :],
                        interp_method=self.interp_method,
                        fill_value=fill_value,
                        print_msg=True)
                    prog_bar.update(i + 1)
                prog_bar.close()
            else:
                dest_data = self.run_regular_grid_interpolator(
                    src_data=src_data,
                    interp_method=self.interp_method,
                    fill_value=fill_value,
                    print_msg=True)
        return dest_data
示例#34
0
def get_delay_timeseries(inps, atr):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
                atr  : dict, metadata to be saved in trop_file
    Returns:    trop_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        return (atr['LENGTH'], atr['WIDTH'])

    if (ut.run_or_skip(out_file=inps.trop_file, in_file=inps.grib_file_list, print_msg=False) == 'skip' 
            and get_dataset_size(inps.trop_file) == get_dataset_size(inps.geom_file)):
        print('{} file exists and is newer than all GRIB files, skip updating.'.format(inps.trop_file))
    else:
        if any(i is None for i in [inps.geom_file, inps.ref_yx]):
            print('No DEM / incidenceAngle / ref_yx found, skip calculating tropospheric delays.')
            if not os.path.isfile(inps.trop_file):
                inps.trop_file = None
            return

        # calculate phase delay
        length, width = int(atr['LENGTH']), int(atr['WIDTH'])
        num_date = len(inps.grib_file_list)
        date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_file_list]
        trop_data = np.zeros((num_date, length, width), np.float32)

        print('calcualting delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...')
        print('number of grib files used: {}'.format(num_date))
        prog_bar = ptime.progressBar(maxValue=num_date)
        for i in range(num_date):
            grib_file = inps.grib_file_list[i]
            trop_data[i] = get_delay(grib_file, inps)
            prog_bar.update(i+1, suffix=os.path.basename(grib_file))
        prog_bar.close()

        # Convert relative phase delay on reference date
        try:
            inps.ref_date = atr['REF_DATE']
        except:
            inps.ref_date = date_list[0]
        print('convert to relative phase delay with reference date: '+inps.ref_date)
        inps.ref_idx = date_list.index(inps.ref_date)
        trop_data -= np.tile(trop_data[inps.ref_idx, :, :], (num_date, 1, 1))

        # Write tropospheric delay to HDF5
        atr['REF_Y'] = inps.ref_yx[0]
        atr['REF_X'] = inps.ref_yx[1]
        ts_obj = timeseries(inps.trop_file)
        ts_obj.write2hdf5(data=trop_data,
                          dates=date_list,
                          metadata=atr,
                          refFile=inps.timeseries_file)

    # Delete temporary DEM file in ROI_PAC format
    if inps.geom_file:
        temp_files =[fname for fname in [inps.dem_file,
                                         inps.inc_angle_file,
                                         inps.lat_file,
                                         inps.lon_file] 
                     if (fname is not None and 'pyaps' in fname)]
        if temp_files:
            print('delete temporary geometry files')
            rmCmd = 'rm '
            for fname in temp_files:
                rmCmd += ' {f} {f}.rsc '.format(f=fname)
            print(rmCmd)
            os.system(rmCmd)
    return
示例#35
0
def get_igs_tec_value(tec_file,
                      utc_sec,
                      lat,
                      lon,
                      interp_method='linear3d',
                      rotate_tec_map=False,
                      print_msg=True):
    """Get the TEC value based on input lat/lon/datetime
    Parameters: tec_file - str, path of local TEC file
                utc_sec  - float or 1D np.ndarray, UTC time of the day in seconds
                lat/lon  - float or 1D np.ndarray, latitude / longitude in degrees
                interp_method  - str, interpolation method
                rotate_tec_map - bool, rotate the TEC map along the SUN direction, for linear3d only.
                print_msg      - bool, print out progress bar or not.
    Returns:    tec_val  - float or 1D np.ndarray, TEC value in TECU
    """
    def interp_3d_rotate(interpfs, lons, lats, mins, lon, lat, utc_min):
        ind0 = np.where((mins - utc_min) <= 0)[0][-1]
        ind1 = ind0 + 1
        lon0 = lon + (utc_min - mins[ind0]) * 360. / (24. * 60.)
        lon1 = lon + (utc_min - mins[ind1]) * 360. / (24. * 60.)
        tec_val0 = interpfs[ind0](lon0, lat)
        tec_val1 = interpfs[ind1](lon1, lat)
        tec_val = ((mins[ind1] - utc_min) /
                   (mins[ind1] - mins[ind0]) * tec_val0 +
                   (utc_min - mins[ind0]) /
                   (mins[ind1] - mins[ind0]) * tec_val1)
        return tec_val

    # read TEC file
    lons, lats, mins, tecs = read_ionex_tec(tec_file)[:4]
    tec_maps = tecs[0]

    # time info
    utc_min = utc_sec / 60.

    # resample
    if interp_method == 'nearest':
        lon_ind = np.abs(lons - lon).argmin()
        lat_ind = np.abs(lats - lat).argmin()
        time_ind = np.abs(mins - utc_min).argmin()
        tec_val = tec_maps[lon_ind, lat_ind, time_ind]

    elif interp_method in ['linear', 'linear2d', 'bilinear']:
        time_ind = np.abs(mins.reshape(-1, 1) - utc_min).argmin(axis=0)
        if isinstance(time_ind, np.ndarray):
            num = len(time_ind)
            tec_val = np.zeros(num, dtype=np.float32)
            prog_bar = ptime.progressBar(maxValue=num, print_msg=print_msg)
            for i in range(num):
                tec_val[i] = interpolate.interp2d(lons,
                                                  lats,
                                                  tec_maps[:, :,
                                                           time_ind[i]].T,
                                                  kind='linear')(lon[i],
                                                                 lat[i])
                prog_bar.update(i + 1, every=200)
            prog_bar.close()
        else:
            tec_val = interpolate.interp2d(lons,
                                           lats,
                                           tec_maps[:, :, time_ind].T,
                                           kind='linear')(lon, lat)

    elif interp_method in ['linear3d', 'trilinear']:
        if not rotate_tec_map:
            # option 1: interpolate between consecutive TEC maps
            # testings shows better agreement with SAR obs than option 2.
            tec_val = interpolate.interpn((lons, np.flip(lats), mins),
                                          np.flip(tec_maps, axis=1),
                                          (lon, lat, utc_min),
                                          method='linear')

        else:
            # option 2: interpolate between consecutive rotated TEC maps
            # reference: equation (3) in Schaer and Gurtner (1998)

            # prepare interpolation functions in advance to speed up
            interpfs = []
            for i in range(len(mins)):
                interpfs.append(
                    interpolate.interp2d(lons,
                                         lats,
                                         tec_maps[:, :, i].T,
                                         kind='linear'))

            if isinstance(utc_min, np.ndarray):
                num = len(utc_min)
                tec_val = np.zeros(num, dtype=np.float32)
                prog_bar = ptime.progressBar(maxValue=num, print_msg=print_msg)
                for i in range(num):
                    tec_val[i] = interp_3d_rotate(interpfs, lons, lats, mins,
                                                  lon[i], lat[i], utc_min[i])
                    prog_bar.update(i + 1, every=200)
                prog_bar.close()
            else:
                tec_val = interp_3d_rotate(interpfs, lons, lats, mins, lon,
                                           lat, utc_min)

    return tec_val
示例#36
0
def calculate_delay_timeseries(inps):
    """Calculate delay time-series and write it to HDF5 file.
    Parameters: inps : namespace, all input parameters
    Returns:    tropo_file : str, file name of ECMWF.h5
    """
    def get_dataset_size(fname):
        atr = readfile.read_attribute(fname)
        shape = (int(atr['LENGTH']), int(atr['WIDTH']))
        return shape

    # check existing tropo delay file
    if (ut.run_or_skip(out_file=inps.tropo_file,
                       in_file=inps.grib_files,
                       print_msg=False) == 'skip'
            and get_dataset_size(inps.tropo_file) == get_dataset_size(
                inps.geom_file)):
        print(
            '{} file exists and is newer than all GRIB files, skip updating.'.
            format(inps.tropo_file))
        return

    # prepare geometry data
    geom_obj = geometry(inps.geom_file)
    geom_obj.open()
    inps.dem = geom_obj.read(datasetName='height')
    inps.inc = geom_obj.read(datasetName='incidenceAngle')

    if 'latitude' in geom_obj.datasetNames:
        # for dataset in geo OR radar coord with lookup table in radar-coord (isce, doris)
        inps.lat = geom_obj.read(datasetName='latitude')
        inps.lon = geom_obj.read(datasetName='longitude')
    elif 'Y_FIRST' in geom_obj.metadata:
        # for geo-coded dataset (gamma, roipac)
        inps.lat, inps.lon = ut.get_lat_lon(geom_obj.metadata)
    else:
        # for radar-coded dataset (gamma, roipac)
        inps.lat, inps.lon = ut.get_lat_lon_rdc(geom_obj.metadata)

    # calculate phase delay
    length, width = int(inps.atr['LENGTH']), int(inps.atr['WIDTH'])
    num_date = len(inps.grib_files)
    date_list = [str(re.findall('\d{8}', i)[0]) for i in inps.grib_files]
    tropo_data = np.zeros((num_date, length, width), np.float32)
    print(
        '\n------------------------------------------------------------------------------'
    )
    print(
        'calcualting absolute delay for each date using PyAPS (Jolivet et al., 2011; 2014) ...'
    )
    print('number of grib files used: {}'.format(num_date))
    prog_bar = ptime.progressBar(maxValue=num_date)
    for i in range(num_date):
        grib_file = inps.grib_files[i]
        tropo_data[i] = get_delay(grib_file, inps)
        prog_bar.update(i + 1, suffix=os.path.basename(grib_file))
    prog_bar.close()

    # remove metadata related with double reference
    # because absolute delay is calculated and saved
    for key in ['REF_DATE', 'REF_X', 'REF_Y', 'REF_LAT', 'REF_LON']:
        if key in inps.atr.keys():
            inps.atr.pop(key)

    # Write tropospheric delay to HDF5
    ts_obj = timeseries(inps.tropo_file)
    ts_obj.write2hdf5(data=tropo_data,
                      dates=date_list,
                      metadata=inps.atr,
                      refFile=inps.timeseries_file)
    return inps.tropo_file
示例#37
0
def write_ifgram_stack(outfile,
                       unwStack,
                       cohStack,
                       connCompStack,
                       ampStack=None,
                       box=None,
                       xstep=1,
                       ystep=1):
    """Write ifgramStack HDF5 file from stack VRT files
    """

    print('-' * 50)
    stackFiles = [unwStack, cohStack, connCompStack, ampStack]
    max_digit = max([len(os.path.basename(str(i))) for i in stackFiles])
    for stackFile in stackFiles:
        if stackFile is not None:
            print('open {f:<{w}} with gdal ...'.format(
                f=os.path.basename(stackFile), w=max_digit))

    dsUnw = gdal.Open(unwStack, gdal.GA_ReadOnly)
    dsCoh = gdal.Open(cohStack, gdal.GA_ReadOnly)
    dsComp = gdal.Open(connCompStack, gdal.GA_ReadOnly)
    if ampStack is not None:
        dsAmp = gdal.Open(ampStack, gdal.GA_ReadOnly)
    else:
        dsAmp = None

    # extract NoDataValue (from the last */date2_date1.vrt file for example)
    ds = gdal.Open(dsUnw.GetFileList()[-1], gdal.GA_ReadOnly)
    noDataValueUnw = ds.GetRasterBand(1).GetNoDataValue()
    print('grab NoDataValue for unwrapPhase     : {:<5} and convert to 0.'.
          format(noDataValueUnw))

    ds = gdal.Open(dsCoh.GetFileList()[-1], gdal.GA_ReadOnly)
    noDataValueCoh = ds.GetRasterBand(1).GetNoDataValue()
    print('grab NoDataValue for coherence       : {:<5} and convert to 0.'.
          format(noDataValueCoh))

    ds = gdal.Open(dsComp.GetFileList()[-1], gdal.GA_ReadOnly)
    noDataValueComp = ds.GetRasterBand(1).GetNoDataValue()
    print('grab NoDataValue for connectComponent: {:<5} and convert to 0.'.
          format(noDataValueComp))
    ds = None

    if dsAmp is not None:
        ds = gdal.Open(dsAmp.GetFileList()[-1], gdal.GA_ReadOnly)
        noDataValueAmp = ds.GetRasterBand(1).GetNoDataValue()
        print('grab NoDataValue for magnitude       : {:<5} and convert to 0.'.
              format(noDataValueAmp))
        ds = None

    # sort the order of interferograms based on date1_date2 with date1 < date2
    nPairs = dsUnw.RasterCount
    d12BandDict = {}
    for ii in range(nPairs):
        bnd = dsUnw.GetRasterBand(ii + 1)
        d12 = bnd.GetMetadata("unwrappedPhase")["Dates"]
        d12 = sorted(d12.split("_"))
        d12 = '{}_{}'.format(d12[0], d12[1])
        d12BandDict[d12] = ii + 1
    d12List = sorted(d12BandDict.keys())
    print('number of interferograms: {}'.format(len(d12List)))

    # box to gdal arguments
    # link: https://gdal.org/python/osgeo.gdal.Band-class.html#ReadAsArray
    if box is not None:
        kwargs = dict(xoff=box[0],
                      yoff=box[1],
                      win_xsize=box[2] - box[0],
                      win_ysize=box[3] - box[1])
    else:
        kwargs = dict()

    print('writing data to HDF5 file {} with a mode ...'.format(outfile))
    h5 = h5py.File(outfile, "a")

    prog_bar = ptime.progressBar(maxValue=nPairs)
    for ii in range(nPairs):
        d12 = d12List[ii]
        bndIdx = d12BandDict[d12]
        prog_bar.update(ii + 1, suffix='{}'.format(d12))

        h5["date"][ii, 0] = d12.split("_")[0].encode("utf-8")
        h5["date"][ii, 1] = d12.split("_")[1].encode("utf-8")
        h5["dropIfgram"][ii] = True

        bnd = dsUnw.GetRasterBand(bndIdx)
        data = bnd.ReadAsArray(**kwargs)
        data = multilook_data(data, ystep, xstep, method='nearest')
        data[data == noDataValueUnw] = 0  #assign pixel with no-data to 0
        h5["unwrapPhase"][ii, :, :] = -1.0 * data  #date2_date1 -> date1_date2

        bperp = float(
            bnd.GetMetadata("unwrappedPhase")["perpendicularBaseline"])
        h5["bperp"][ii] = -1.0 * bperp  #date2_date1 -> date1_date2

        bnd = dsCoh.GetRasterBand(bndIdx)
        data = bnd.ReadAsArray(**kwargs)
        data = multilook_data(data, ystep, xstep, method='nearest')
        data[data == noDataValueCoh] = 0  #assign pixel with no-data to 0
        h5["coherence"][ii, :, :] = data

        bnd = dsComp.GetRasterBand(bndIdx)
        data = bnd.ReadAsArray(**kwargs)
        data = multilook_data(data, ystep, xstep, method='nearest')
        data[data == noDataValueComp] = 0  #assign pixel with no-data to 0
        h5["connectComponent"][ii, :, :] = data

        if dsAmp is not None:
            bnd = dsAmp.GetRasterBand(bndIdx)
            data = bnd.ReadAsArray(**kwargs)
            data = multilook_data(data, ystep, xstep, method='nearest')
            data[data == noDataValueAmp] = 0  #assign pixel with no-data to 0
            h5["magnitude"][ii, :, :] = data

    prog_bar.close()

    # add MODIFICATION_TIME metadata to each 3D dataset
    for dsName in ['unwrapPhase', 'coherence', 'connectComponent']:
        h5[dsName].attrs['MODIFICATION_TIME'] = str(time.time())

    h5.close()
    print('finished writing to HD5 file: {}'.format(outfile))
    dsUnw = None
    dsCoh = None
    dsComp = None
    dsAmp = None
    return outfile
def get_common_region_int_ambiguity(ifgram_file,
                                    cc_mask_file,
                                    water_mask_file=None,
                                    num_sample=100,
                                    dsNameIn='unwrapPhase'):
    """Solve the phase unwrapping integer ambiguity for the common regions among all interferograms
    Parameters: ifgram_file     : str, path of interferogram stack file
                cc_mask_file    : str, path of common connected components file
                water_mask_file : str, path of water mask file
                num_sample      : int, number of pixel sampled for each region
                dsNameIn        : str, dataset name of the unwrap phase to be corrected
    Returns:    common_regions  : list of skimage.measure._regionprops._RegionProperties object
                    modified by adding two more variables:
                    sample_coords : 2D np.ndarray in size of (num_sample, 2) in int64 format
                    int_ambiguity : 1D np.ndarray in size of (num_ifgram,) in int format
    """
    print('-' * 50)
    print(
        'calculating the integer ambiguity for the common regions defined in',
        cc_mask_file)
    # stack info
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open()
    date12_list = stack_obj.get_date12_list(dropIfgram=True)
    num_ifgram = len(date12_list)
    C = matrix(
        ifgramStack.get_design_matrix4triplet(date12_list).astype(float))
    ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsNameIn,
                                              dropIfgram=True).reshape(
                                                  num_ifgram, -1)

    # prepare common label
    print('read common mask from', cc_mask_file)
    cc_mask = readfile.read(cc_mask_file)[0]
    if water_mask_file is not None and os.path.isfile(water_mask_file):
        water_mask = readfile.read(water_mask_file)[0]
        print('refine common mask based on water mask file', water_mask_file)
        cc_mask[water_mask == 0] = 0

    label_img, num_label = connectComponent.get_large_label(cc_mask,
                                                            min_area=2.5e3,
                                                            print_msg=True)
    common_regions = measure.regionprops(label_img)
    print('number of common regions:', num_label)

    # add sample_coords / int_ambiguity
    print('number of samples per region:', num_sample)
    print('solving the phase-unwrapping integer ambiguity for {}'.format(
        dsNameIn))
    print(
        '\tbased on the closure phase of interferograms triplets (Yunjun et al., 2019)'
    )
    print(
        '\tusing the L1-norm regularzed least squares approximation (LASSO) ...'
    )
    for i in range(num_label):
        common_reg = common_regions[i]
        # sample_coords
        idx = sorted(
            np.random.choice(common_reg.area, num_sample, replace=False))
        common_reg.sample_coords = common_reg.coords[idx, :].astype(int)

        # solve for int_ambiguity
        U = np.zeros((num_ifgram, num_sample))
        if common_reg.label == label_img[stack_obj.refY, stack_obj.refX]:
            print('{}/{} skip calculation for the reference region'.format(
                i + 1, num_label))
        else:
            prog_bar = ptime.progressBar(maxValue=num_sample,
                                         prefix='{}/{}'.format(
                                             i + 1, num_label))
            for j in range(num_sample):
                # read unwrap phase
                y, x = common_reg.sample_coords[j, :]
                unw = ifginv.read_unwrap_phase(stack_obj,
                                               box=(x, y, x + 1, y + 1),
                                               ref_phase=ref_phase,
                                               unwDatasetName=dsNameIn,
                                               dropIfgram=True,
                                               print_msg=False).reshape(
                                                   num_ifgram, -1)

                # calculate closure_int
                closure_pha = np.dot(C, unw)
                closure_int = matrix(
                    np.round(
                        (closure_pha - ut.wrap(closure_pha)) / (2. * np.pi)))

                # solve for U
                U[:, j] = np.round(
                    l1regls(-C, closure_int, alpha=1e-2,
                            show_progress=0)).flatten()
                prog_bar.update(j + 1, every=5)
            prog_bar.close()
        # add int_ambiguity
        common_reg.int_ambiguity = np.median(U, axis=1)
        common_reg.date12_list = date12_list

    #sort regions by size to facilitate the region matching later
    common_regions.sort(key=lambda x: x.area, reverse=True)

    # plot sample result
    fig_size = pp.auto_figure_size(label_img.shape, disp_cbar=False)
    fig, ax = plt.subplots(figsize=fig_size)
    ax.imshow(label_img, cmap='jet')
    for common_reg in common_regions:
        ax.plot(common_reg.sample_coords[:, 1],
                common_reg.sample_coords[:, 0],
                'k.',
                ms=2)
    pp.auto_flip_direction(stack_obj.metadata, ax, print_msg=False)
    out_img = 'common_region_sample.png'
    fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300)
    print('saved common regions and sample pixels to file', out_img)

    return common_regions
示例#39
0
文件: utils1.py 项目: hfattahi/PySAR
def run_deramp(fname, ramp_type, mask_file=None, out_file=None, datasetName=None):
    """ Remove ramp from each 2D matrix of input file
    Parameters: fname     : str, data file to be derampped
                ramp_type : str, name of ramp to be estimated.
                mask_file : str, file of mask of pixels used for ramp estimation
                out_file  : str, output file name
                datasetName : str, output dataset name, for ifgramStack file type only
    Returns:    out_file  : str, output file name
    """
    print('remove {} ramp from file: {}'.format(ramp_type, fname))
    if not out_file:
        fbase, fext = os.path.splitext(fname)
        out_file = '{}_ramp{}'.format(fbase, fext)

    start_time = time.time()
    atr = readfile.read_attribute(fname)

    # mask
    if os.path.isfile(mask_file):
        mask = readfile.read(mask_file, datasetName='mask')[0]
        print('read mask file: '+mask_file)
    else:
        mask = np.ones((int(atr['LENGTH']), int(atr['WIDTH'])))
        print('use mask of the whole area')

    # deramping
    k = atr['FILE_TYPE']
    if k == 'timeseries':
        print('reading data ...')
        data = readfile.read(fname)[0]
        print('estimating phase ramp ...')
        data = deramp(data, mask, ramp_type=ramp_type, metadata=atr)[0]
        writefile.write(data, out_file, ref_file=fname)

    elif k == 'ifgramStack':
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        if not datasetName:
            datasetName = 'unwrapPhase'
        with h5py.File(fname, 'a') as f:
            ds = f[datasetName]
            dsNameOut = '{}_ramp'.format(datasetName)
            if dsNameOut in f.keys():
                dsOut = f[dsNameOut]
                print('access HDF5 dataset /{}'.format(dsNameOut))
            else:
                dsOut = f.create_dataset(dsNameOut, shape=(obj.numIfgram, obj.length, obj.width),
                                         dtype=np.float32, chunks=True, compression=None)
                print('create HDF5 dataset /{}'.format(dsNameOut))

            prog_bar = ptime.progressBar(maxValue=obj.numIfgram)
            for i in range(obj.numIfgram):
                data = ds[i, :, :]
                data = deramp(data, mask, ramp_type=ramp_type, metadata=atr)[0]
                dsOut[i, :, :] = data
                prog_bar.update(i+1, suffix='{}/{}'.format(i+1, obj.numIfgram))
            prog_bar.close()
            print('finished writing to file: {}'.format(fname))

    # Single Dataset File
    else:
        data = readfile.read(fname)[0]
        data = deramp(data, mask, ramp_type, metadata=atr)[0]
        print('writing >>> {}'.format(out_file))
        writefile.write(data, out_file=out_file, ref_file=fname)

    m, s = divmod(time.time()-start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
    return out_file
示例#40
0
    def write2hdf5(self, outputFile='geometryRadar.h5', access_mode='w', box=None, compression='gzip', extra_metadata=None):
        '''
        /                        Root level
        Attributes               Dictionary for metadata. 'X/Y_FIRST/STEP' attribute for geocoded.
        /height                  2D array of float32 in size of (l, w   ) in meter.
        /latitude (azimuthCoord) 2D array of float32 in size of (l, w   ) in degree.
        /longitude (rangeCoord)  2D array of float32 in size of (l, w   ) in degree.
        /incidenceAngle          2D array of float32 in size of (l, w   ) in degree.
        /slantRangeDistance      2D array of float32 in size of (l, w   ) in meter.
        /azimuthAngle            2D array of float32 in size of (l, w   ) in degree. (optional)
        /shadowMask              2D array of bool    in size of (l, w   ).           (optional)
        /waterMask               2D array of bool    in size of (l, w   ).           (optional)
        /bperp                   3D array of float32 in size of (n, l, w) in meter   (optional)
        /date                    1D array of string  in size of (n,     ) in YYYYMMDD(optional)
        ...
        '''
        if len(self.datasetDict) == 0:
            print('No dataset file path in the object, skip HDF5 file writing.')
            return None

        self.outputFile = outputFile
        f = h5py.File(self.outputFile, access_mode)
        print('create HDF5 file {} with {} mode'.format(self.outputFile, access_mode))

        #groupName = self.name
        #group = f.create_group(groupName)
        #print('create group   /{}'.format(groupName))

        maxDigit = max([len(i) for i in geometryDatasetNames])
        length, width = self.get_size(box=box)
        self.length, self.width = self.get_size()

        ###############################
        for dsName in self.dsNames:
            # 3D datasets containing bperp
            if dsName == 'bperp':
                self.dateList = list(self.datasetDict[dsName].keys())
                dsDataType = dataType
                self.numDate = len(self.dateList)
                dsShape = (self.numDate, length, width)
                ds = f.create_dataset(dsName,
                                      shape=dsShape,
                                      maxshape=(None, dsShape[1], dsShape[2]),
                                      dtype=dsDataType,
                                      chunks=True,
                                      compression=compression)
                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=str(compression)))

                print('read coarse grid baseline files and linear interpolate into full resolution ...')
                prog_bar = ptime.progressBar(maxValue=self.numDate)
                for i in range(self.numDate):
                    fname = self.datasetDict[dsName][self.dateList[i]]
                    data = read_isce_bperp_file(fname=fname,
                                                out_shape=(self.length, self.width),
                                                box=box)
                    ds[i, :, :] = data
                    prog_bar.update(i+1, suffix=self.dateList[i])
                prog_bar.close()

                # Write 1D dataset date
                dsName = 'date'
                dsShape = (self.numDate,)
                dsDataType = np.string_
                print(('create dataset /{d:<{w}} of {t:<25}'
                       ' in size of {s}').format(d=dsName,
                                                 w=maxDigit,
                                                 t=str(dsDataType),
                                                 s=dsShape))
                data = np.array(self.dateList, dtype=dsDataType)
                ds = f.create_dataset(dsName, data=data)

            # 2D datasets containing height, latitude, incidenceAngle, shadowMask, etc.
            else:
                dsDataType = dataType
                if dsName.lower().endswith('mask'):
                    dsDataType = np.bool_
                dsShape = (length, width)
                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=str(compression)))
                data = np.array(self.read(family=dsName, box=box)[0], dtype=dsDataType)
                ds = f.create_dataset(dsName,
                                      data=data,
                                      chunks=True,
                                      compression=compression)

        ###############################
        # Generate Dataset if not existed in binary file: incidenceAngle, slantRangeDistance
        for dsName in [i for i in ['incidenceAngle', 'slantRangeDistance']
                       if i not in self.dsNames]:
            # Calculate data
            data = None
            if dsName == 'incidenceAngle':
                data = self.get_incidence_angle(box=box)
            elif dsName == 'slantRangeDistance':
                data = self.get_slant_range_distance(box=box)

            # Write dataset
            if data is not None:
                dsShape = data.shape
                dsDataType = dataType
                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=str(compression)))
                ds = f.create_dataset(dsName,
                                      data=data,
                                      dtype=dataType,
                                      chunks=True,
                                      compression=compression)

        ###############################
        # Attributes
        self.get_metadata()
        if extra_metadata:
            self.metadata.update(extra_metadata)
            print('add extra metadata: {}'.format(extra_metadata))
        self.metadata = ut.subset_attribute(self.metadata, box)
        self.metadata['FILE_TYPE'] = self.name
        for key, value in self.metadata.items():
            f.attrs[key] = value

        f.close()
        print('Finished writing to {}'.format(self.outputFile))
        return self.outputFile
def run_unwrap_error_phase_closure(ifgram_file,
                                   common_regions,
                                   water_mask_file=None,
                                   ccName='connectComponent',
                                   dsNameIn='unwrapPhase',
                                   dsNameOut='unwrapPhase_phaseClosure'):
    print('-' * 50)
    print('correct unwrapping error in {} with phase closure ...'.format(
        ifgram_file))
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open()
    length, width = stack_obj.length, stack_obj.width
    ref_y, ref_x = stack_obj.refY, stack_obj.refX
    date12_list = stack_obj.get_date12_list(dropIfgram=False)
    num_ifgram = len(date12_list)
    shape_out = (num_ifgram, length, width)

    # read water mask
    if water_mask_file and os.path.isfile(water_mask_file):
        print('read water mask from file:', water_mask_file)
        water_mask = readfile.read(water_mask_file)[0]
    else:
        water_mask = None

    # prepare output data writing
    print('open {} with r+ mode'.format(ifgram_file))
    f = h5py.File(ifgram_file, 'r+')
    print('input  dataset:', dsNameIn)
    print('output dataset:', dsNameOut)
    if dsNameOut in f.keys():
        ds = f[dsNameOut]
        print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut,
                                                                s=shape_out))
    else:
        ds = f.create_dataset(dsNameOut,
                              shape_out,
                              maxshape=(None, None, None),
                              chunks=True,
                              compression=None)
        print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut,
                                                                s=shape_out))

    # correct unwrap error ifgram by ifgram
    prog_bar = ptime.progressBar(maxValue=num_ifgram)
    for i in range(num_ifgram):
        date12 = date12_list[i]

        # read unwrap phase to be updated
        unw_cor = np.squeeze(f[dsNameIn][i, :, :]).astype(np.float32)
        unw_cor -= unw_cor[ref_y, ref_x]

        # update kept interferograms only
        if stack_obj.dropIfgram[i]:
            # get local region info from connectComponent
            cc = np.squeeze(f[ccName][i, :, :])
            if water_mask is not None:
                cc[water_mask == 0] = 0
            cc_obj = connectComponent(conncomp=cc, metadata=stack_obj.metadata)
            cc_obj.label()
            local_regions = measure.regionprops(cc_obj.labelImg)

            # matching regions and correct unwrap error
            idx_common = common_regions[0].date12_list.index(date12)
            for local_reg in local_regions:
                local_mask = cc_obj.labelImg == local_reg.label
                U = 0
                for common_reg in common_regions:
                    y = common_reg.sample_coords[:, 0]
                    x = common_reg.sample_coords[:, 1]
                    if all(local_mask[y, x]):
                        U = common_reg.int_ambiguity[idx_common]
                        break
                unw_cor[local_mask] += 2. * np.pi * U

        # write to hdf5 file
        ds[i, :, :] = unw_cor
        prog_bar.update(i + 1, suffix=date12)
    prog_bar.close()
    ds.attrs['MODIFICATION_TIME'] = str(time.time())
    f.close()
    print('close {} file.'.format(ifgram_file))
    return ifgram_file
示例#42
0
    def write2hdf5(self, outputFile='ifgramStack.h5', access_mode='w', box=None, compression=None, extra_metadata=None):
        '''Save/write an ifgramStackDict object into an HDF5 file with the structure below:

        /                  Root level
        Attributes         Dictionary for metadata
        /date              2D array of string  in size of (m, 2   ) in YYYYMMDD format for master and slave date
        /bperp             1D array of float32 in size of (m,     ) in meter.
        /dropIfgram        1D array of bool    in size of (m,     ).
        /unwrapPhase       3D array of float32 in size of (m, l, w) in radian.
        /coherence         3D array of float32 in size of (m, l, w).
        /connectComponent  3D array of int16   in size of (m, l, w).           (optional)
        /wrapPhase         3D array of float32 in size of (m, l, w) in radian. (optional)
        /iono              3D array of float32 in size of (m, l, w) in radian. (optional)
        /rangeOffset       3D array of float32 in size of (m, l, w).           (optional)
        /azimuthOffset     3D array of float32 in size of (m, l, w).           (optional)

        Parameters: outputFile : str, Name of the HDF5 file for the InSAR stack
                    access_mode : str, access mode of output File, e.g. w, r+
                    box : tuple, subset range in (x0, y0, x1, y1)
                    extra_metadata : dict, extra metadata to be added into output file
        Returns:    outputFile
        '''

        self.outputFile = outputFile
        f = h5py.File(self.outputFile, access_mode)
        print('create HDF5 file {} with {} mode'.format(self.outputFile, access_mode))

        self.pairs = sorted([pair for pair in self.pairsDict.keys()])
        self.dsNames = list(self.pairsDict[self.pairs[0]].datasetDict.keys())
        self.dsNames = [i for i in ifgramDatasetNames if i in self.dsNames]
        maxDigit = max([len(i) for i in self.dsNames])
        self.get_size(box)

        self.bperp = np.zeros(self.numIfgram)
        ###############################
        # 3D datasets containing unwrapPhase, coherence, connectComponent, wrapPhase, etc.
        for dsName in self.dsNames:
            dsShape = (self.numIfgram, self.length, self.width)
            dsDataType = dataType
            if dsName in ['connectComponent']:
                dsDataType = np.bool_
            print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                   ' with compression = {c}').format(d=dsName,
                                                     w=maxDigit,
                                                     t=str(dsDataType),
                                                     s=dsShape,
                                                     c=str(compression)))
            ds = f.create_dataset(dsName,
                                  shape=dsShape,
                                  maxshape=(None, dsShape[1], dsShape[2]),
                                  dtype=dsDataType,
                                  chunks=True,
                                  compression=compression)

            prog_bar = ptime.progressBar(maxValue=self.numIfgram)
            for i in range(self.numIfgram):
                ifgramObj = self.pairsDict[self.pairs[i]]
                data = ifgramObj.read(dsName, box=box)[0]
                ds[i, :, :] = data
                self.bperp[i] = ifgramObj.get_perp_baseline()
                prog_bar.update(i+1, suffix='{}_{}'.format(self.pairs[i][0],
                                                           self.pairs[i][1]))
            prog_bar.close()
            ds.attrs['MODIFICATION_TIME'] = str(time.time())

        ###############################
        # 2D dataset containing master and slave dates of all pairs
        dsName = 'date'
        dsDataType = np.string_
        dsShape = (self.numIfgram, 2)
        print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName,
                                                                          w=maxDigit,
                                                                          t=str(dsDataType),
                                                                          s=dsShape))
        data = np.array(self.pairs, dtype=dsDataType)
        f.create_dataset(dsName, data=data)

        ###############################
        # 1D dataset containing perpendicular baseline of all pairs
        dsName = 'bperp'
        dsDataType = dataType
        dsShape = (self.numIfgram,)
        print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName,
                                                                          w=maxDigit,
                                                                          t=str(dsDataType),
                                                                          s=dsShape))
        data = np.array(self.bperp, dtype=dsDataType)
        f.create_dataset(dsName, data=data)

        ###############################
        # 1D dataset containing bool value of dropping the interferograms or not
        dsName = 'dropIfgram'
        dsDataType = np.bool_
        dsShape = (self.numIfgram,)
        print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(d=dsName,
                                                                          w=maxDigit,
                                                                          t=str(dsDataType),
                                                                          s=dsShape))
        data = np.ones(dsShape, dtype=dsDataType)
        f.create_dataset(dsName, data=data)

        ###############################
        # Attributes
        self.get_metadata()
        if extra_metadata:
            self.metadata.update(extra_metadata)
            print('add extra metadata: {}'.format(extra_metadata))
        self.metadata = ut.subset_attribute(self.metadata, box)
        self.metadata['FILE_TYPE'] = self.name
        for key, value in self.metadata.items():
            f.attrs[key] = value

        f.close()
        print('Finished writing to {}'.format(self.outputFile))
        return self.outputFile
示例#43
0
def subset_file(fname, subset_dict_input, out_file=None):
    """Subset file with
    Inputs:
        fname        : str, path/name of file
        out_file     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        out_file :  str, path/name of output file; 
                   out_file = 'subset_'+fname, if fname is in current directory;
                   out_file = fname, if fname is not in the current directory.
    """

    # Input File Info
    atr = readfile.read_attribute(fname)
    width = int(atr['WIDTH'])
    length = int(atr['LENGTH'])
    k = atr['FILE_TYPE']
    print('subset ' + k + ' file: ' + fname + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr)

    coord = ut.coordinate(atr)
    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in subset_dict.keys() and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = coord.check_box_within_data_coverage(pix_box)
        subset_dict['fill_value'] = np.nan

    geo_box = coord.box_pixel2geo(pix_box)
    data_box = (0, 0, width, length)
    print('data   range in (x0,y0,x1,y1): {}'.format(data_box))
    print('subset range in (x0,y0,x1,y1): {}'.format(pix_box))
    print('data   range in (W, N, E, S): {}'.format(
        coord.box_pixel2geo(data_box)))
    print('subset range in (W, N, E, S): {}'.format(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return fname

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not out_file:
        if os.getcwd() == os.path.dirname(os.path.abspath(fname)):
            if 'tight' in subset_dict.keys() and subset_dict['tight']:
                out_file = '{}_tight{}'.format(
                    os.path.splitext(fname)[0],
                    os.path.splitext(fname)[1])
            else:
                out_file = 'sub_' + os.path.basename(fname)
        else:
            out_file = os.path.basename(fname)
    print('writing >>> ' + out_file)

    # update metadata
    atr = attr.update_attribute4subset(atr, pix_box)

    # subset datasets one by one
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])

    ext = os.path.splitext(out_file)[1]
    if ext in ['.h5', '.he5']:
        # initiate the output file
        writefile.layout_hdf5(out_file, metadata=atr, ref_file=fname)

        # subset dataset one-by-one
        for dsName in dsNames:
            with h5py.File(fname, 'r') as fi:
                ds = fi[dsName]
                ds_shape = ds.shape
                ds_ndim = ds.ndim
                print('cropping {d} in {b} from {f} ...'.format(
                    d=dsName, b=pix_box4data, f=os.path.basename(fname)))

                if ds_ndim == 2:
                    # read
                    data = ds[pix_box4data[1]:pix_box4data[3],
                              pix_box4data[0]:pix_box4data[2]]

                    # crop
                    data_out = np.ones(
                        (pix_box[3] - pix_box[1], pix_box[2] - pix_box[0]),
                        data.dtype) * subset_dict['fill_value']
                    data_out[pix_box4subset[1]:pix_box4subset[3],
                             pix_box4subset[0]:pix_box4subset[2]] = data
                    data_out = np.array(data_out, dtype=data.dtype)

                    # write
                    block = [0, int(atr['LENGTH']), 0, int(atr['WIDTH'])]
                    writefile.write_hdf5_block(out_file,
                                               data=data_out,
                                               datasetName=dsName,
                                               block=block,
                                               print_msg=True)

                if ds_ndim == 3:
                    prog_bar = ptime.progressBar(maxValue=ds_shape[0])
                    for i in range(ds_shape[0]):
                        # read
                        data = ds[i, pix_box4data[1]:pix_box4data[3],
                                  pix_box4data[0]:pix_box4data[2]]

                        # crop
                        data_out = np.ones(
                            (1, pix_box[3] - pix_box[1],
                             pix_box[2] - pix_box[0]),
                            data.dtype) * subset_dict['fill_value']
                        data_out[:, pix_box4subset[1]:pix_box4subset[3],
                                 pix_box4subset[0]:pix_box4subset[2]] = data

                        # write
                        block = [
                            i, i + 1, 0,
                            int(atr['LENGTH']), 0,
                            int(atr['WIDTH'])
                        ]
                        writefile.write_hdf5_block(out_file,
                                                   data=data_out,
                                                   datasetName=dsName,
                                                   block=block,
                                                   print_msg=False)

                        prog_bar.update(i + 1,
                                        suffix='{}/{}'.format(
                                            i + 1, ds_shape[0]))
                    prog_bar.close()
                    print('finished writing to file: {}'.format(out_file))

    else:
        # IO for binary files
        dsDict = dict()
        for dsName in dsNames:
            dsDict[dsName] = subset_dataset(
                fname,
                dsName,
                pix_box,
                pix_box4data,
                pix_box4subset,
                fill_value=subset_dict['fill_value'])
        writefile.write(dsDict,
                        out_file=out_file,
                        metadata=atr,
                        ref_file=fname)

        # write extra metadata files for ISCE data files
        if os.path.isfile(fname + '.xml') or os.path.isfile(fname +
                                                            '.aux.xml'):
            # write ISCE XML file
            dtype_gdal = readfile.NUMPY2GDAL_DATATYPE[atr['DATA_TYPE']]
            dtype_isce = readfile.GDAL2ISCE_DATATYPE[dtype_gdal]
            writefile.write_isce_xml(out_file,
                                     width=int(atr['WIDTH']),
                                     length=int(atr['LENGTH']),
                                     bands=len(dsDict.keys()),
                                     data_type=dtype_isce,
                                     scheme=atr['scheme'],
                                     image_type=atr['FILE_TYPE'])
            print(f'write file: {out_file}.xml')

            # write GDAL VRT file
            if os.path.isfile(fname + '.vrt'):
                from isceobj.Util.ImageUtil import ImageLib as IML
                img = IML.loadImage(out_file)[0]
                img.renderVRT()
                print(f'write file: {out_file}.vrt')

    return out_file
def get_common_region_int_ambiguity(ifgram_file, cc_mask_file, water_mask_file=None, num_sample=100,
                                    dsNameIn='unwrapPhase'):
    """Solve the phase unwrapping integer ambiguity for the common regions among all interferograms
    Parameters: ifgram_file     : str, path of interferogram stack file
                cc_mask_file    : str, path of common connected components file
                water_mask_file : str, path of water mask file
                num_sample      : int, number of pixel sampled for each region
                dsNameIn        : str, dataset name of the unwrap phase to be corrected
    Returns:    common_regions  : list of skimage.measure._regionprops._RegionProperties object
                    modified by adding two more variables:
                    sample_coords : 2D np.ndarray in size of (num_sample, 2) in int64 format
                    int_ambiguity : 1D np.ndarray in size of (num_ifgram,) in int format
    """
    print('-'*50)
    print('calculating the integer ambiguity for the common regions defined in', cc_mask_file)
    # stack info
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open()
    date12_list = stack_obj.get_date12_list(dropIfgram=True)
    num_ifgram = len(date12_list)
    C = matrix(ifgramStack.get_design_matrix4triplet(date12_list).astype(float))
    ref_phase = stack_obj.get_reference_phase(unwDatasetName=dsNameIn, dropIfgram=True).reshape(num_ifgram, -1)

    # prepare common label
    print('read common mask from', cc_mask_file)
    cc_mask = readfile.read(cc_mask_file)[0]
    if water_mask_file is not None and os.path.isfile(water_mask_file):
        water_mask = readfile.read(water_mask_file)[0]
        print('refine common mask based on water mask file', water_mask_file)
        cc_mask[water_mask == 0] = 0

    label_img, num_label = connectComponent.get_large_label(cc_mask, min_area=2.5e3, print_msg=True)
    common_regions = measure.regionprops(label_img)
    print('number of common regions:', num_label)

    # add sample_coords / int_ambiguity
    print('number of samples per region:', num_sample)
    print('solving the phase-unwrapping integer ambiguity for {}'.format(dsNameIn))
    print('\tbased on the closure phase of interferograms triplets (Yunjun et al., 2019)')
    print('\tusing the L1-norm regularzed least squares approximation (LASSO) ...')
    for i in range(num_label):
        common_reg = common_regions[i]
        # sample_coords
        idx = sorted(np.random.choice(common_reg.area, num_sample, replace=False))
        common_reg.sample_coords = common_reg.coords[idx, :].astype(int)

        # solve for int_ambiguity
        U = np.zeros((num_ifgram, num_sample))
        if common_reg.label == label_img[stack_obj.refY, stack_obj.refX]:
            print('{}/{} skip calculation for the reference region'.format(i+1, num_label))
        else:
            prog_bar = ptime.progressBar(maxValue=num_sample, prefix='{}/{}'.format(i+1, num_label))
            for j in range(num_sample):
                # read unwrap phase
                y, x = common_reg.sample_coords[j, :]
                unw = ifginv.read_unwrap_phase(stack_obj,
                                               box=(x, y, x+1, y+1),
                                               ref_phase=ref_phase,
                                               unwDatasetName=dsNameIn,
                                               dropIfgram=True,
                                               print_msg=False).reshape(num_ifgram, -1)

                # calculate closure_int
                closure_pha = np.dot(C, unw)
                closure_int = matrix(np.round((closure_pha - ut.wrap(closure_pha)) / (2.*np.pi)))

                # solve for U
                U[:,j] = np.round(l1regls(-C, closure_int, alpha=1e-2, show_progress=0)).flatten()
                prog_bar.update(j+1, every=5)
            prog_bar.close()
        # add int_ambiguity
        common_reg.int_ambiguity = np.median(U, axis=1)
        common_reg.date12_list = date12_list

    #sort regions by size to facilitate the region matching later
    common_regions.sort(key=lambda x: x.area, reverse=True)

    # plot sample result
    fig_size = pp.auto_figure_size(label_img.shape, disp_cbar=False)
    fig, ax = plt.subplots(figsize=fig_size)
    ax.imshow(label_img, cmap='jet')
    for common_reg in common_regions:
        ax.plot(common_reg.sample_coords[:,1],
                common_reg.sample_coords[:,0], 'k.', ms=2)
    pp.auto_flip_direction(stack_obj.metadata, ax, print_msg=False)
    out_img = 'common_region_sample.png'
    fig.savefig(out_img, bbox_inches='tight', transparent=True, dpi=300)
    print('saved common regions and sample pixels to file', out_img)

    return common_regions
示例#45
0
def coherence2decorrelation_phase(coh, L, coh_step=0.01, num_repeat=1, scale=1.0, display=False, print_msg=True):
    """Simulate decorrelation phase based on coherence array/matrix
    based on the phase PDF of DS from Tough et al. (1995).

    Parameters: coh        - 1/2/3D np.ndarray of float32 for spatial coherence
                L          - int, number of independent looks
                coh_step   - float, step of coherence to generate lookup table
                num_repeat - int, number of repeatetion
    Returns:    pha        - np.ndarray of float32 for decorrelation phase in radians
                                for num_repeat == 1, pha.shape = coh.shape
                                for num_repeat >  1, pha.shape = (coh.size, num_repeat)
    """
    shape_orig = coh.shape
    coh = coh.reshape(-1,1)
    num_coh = coh.size

    # check number of looks
    L = int(L)
    msg = 'number of independent looks L={}'.format(L)
    if L > 80:
        L = 80
        msg += ', use L=80 to avoid dividing by 0 in calculation with negligible effect'
    if print_msg:
        print(msg)

    # code for debug
    debug_mode = False
    if debug_mode is True:
        decor = sample_decorrelation_phase(0.4, L, size=int(1e4), scale=scale)
        return decor

    # initiate output matrix
    pha = np.zeros((num_coh, num_repeat), dtype=np.float32)

    # sampling strategy
    num_step = int(1 / coh_step)
    if num_coh <= num_step * 2:
        # for small size --> loop through each input coherence
        for i in range(num_coh):
            pha[i,:] = sample_decorrelation_phase(coh[i], L, size=num_repeat, scale=scale)

    else:
        # for large size --> loop through coherence lookup table and map into input coherence matrix
        prog_bar = ptime.progressBar(maxValue=num_step, print_msg=print_msg)
        for i in range(num_step):
            # find index within the coherence intervals
            coh_i = i * coh_step
            flag = np.multiply(coh >= coh_i, coh < coh_i + coh_step).flatten()
            num_coh_i = np.sum(flag)
            if num_coh_i > 0:
                pha_i = sample_decorrelation_phase(coh_i, L, size=num_coh_i*num_repeat, scale=scale)
                pha[flag,:] = pha_i.reshape(-1, num_repeat)
            prog_bar.update(i+1, suffix='{:.3f}'.format(coh_i))
        prog_bar.close()

    if num_repeat == 1:
        pha = pha.reshape(shape_orig)

    # plot
    if display and len(shape_orig) == 2:
        fig, axs = plt.subplots(nrows=1, ncols=2, figsize=[6,3])
        axs[0].imshow(coh.reshape(shape_orig), vmin=0, vmax=1, cmap='gray')
        axs[1].imshow(pha[:,0].reshape(shape_orig), vmin=-np.pi, vmax=np.pi, cmap='jet')
        plt.show()

    return pha
def run_unwrap_error_phase_closure(ifgram_file, common_regions, water_mask_file=None, ccName='connectComponent',
                                   dsNameIn='unwrapPhase', dsNameOut='unwrapPhase_phaseClosure'):
    print('-'*50)
    print('correct unwrapping error in {} with phase closure ...'.format(ifgram_file))
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open()
    length, width = stack_obj.length, stack_obj.width
    ref_y, ref_x = stack_obj.refY, stack_obj.refX
    date12_list = stack_obj.get_date12_list(dropIfgram=False)
    num_ifgram = len(date12_list)
    shape_out = (num_ifgram, length, width)

    # read water mask
    if water_mask_file and os.path.isfile(water_mask_file):
        print('read water mask from file:', water_mask_file)
        water_mask = readfile.read(water_mask_file)[0]
    else:
        water_mask = None

    # prepare output data writing
    print('open {} with r+ mode'.format(ifgram_file))
    f = h5py.File(ifgram_file, 'r+')
    print('input  dataset:', dsNameIn)
    print('output dataset:', dsNameOut)
    if dsNameOut in f.keys():
        ds = f[dsNameOut]
        print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out))
    else:
        ds = f.create_dataset(dsNameOut,
                              shape_out,
                              maxshape=(None, None, None),
                              chunks=True,
                              compression=None)
        print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out))

    # correct unwrap error ifgram by ifgram
    prog_bar = ptime.progressBar(maxValue=num_ifgram)
    for i in range(num_ifgram):
        date12 = date12_list[i]

        # read unwrap phase to be updated
        unw_cor = np.squeeze(f[dsNameIn][i, :, :]).astype(np.float32)
        unw_cor -= unw_cor[ref_y, ref_x]

        # update kept interferograms only
        if stack_obj.dropIfgram[i]:
            # get local region info from connectComponent
            cc = np.squeeze(f[ccName][i, :, :])
            if water_mask is not None:
                cc[water_mask == 0] = 0
            cc_obj = connectComponent(conncomp=cc, metadata=stack_obj.metadata)
            cc_obj.label()
            local_regions = measure.regionprops(cc_obj.labelImg)

            # matching regions and correct unwrap error
            idx_common = common_regions[0].date12_list.index(date12)
            for local_reg in local_regions:
                local_mask = cc_obj.labelImg == local_reg.label
                U = 0
                for common_reg in common_regions:
                    y = common_reg.sample_coords[:,0]
                    x = common_reg.sample_coords[:,1]
                    if all(local_mask[y, x]):
                        U = common_reg.int_ambiguity[idx_common]
                        break
                unw_cor[local_mask] += 2. * np.pi * U

        # write to hdf5 file
        ds[i, :, :] = unw_cor
        prog_bar.update(i+1, suffix=date12)
    prog_bar.close()
    ds.attrs['MODIFICATION_TIME'] = str(time.time())
    f.close()
    print('close {} file.'.format(ifgram_file))
    return ifgram_file