예제 #1
0
def add_file(fnames, out_file=None):
    """Generate sum of all input files
    Parameters: fnames : list of str, path/name of input files to be added
                out_file : str, optional, path/name of output file
    Returns:    out_file : str, path/name of output file
    Example:    'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    """
    # Default output file name
    ext = os.path.splitext(fnames[0])[1]
    if not out_file:
        out_file = os.path.splitext(fnames[0])[0]
        for i in range(1, len(fnames)):
            out_file += '_plus_' + os.path.splitext(os.path.basename(
                fnames[i]))[0]
        out_file += ext

    atr = readfile.read_attribute(fnames[0])
    dsNames = readfile.get_dataset_list(fnames[0])
    dsDict = {}
    for dsName in dsNames:
        print('adding {} ...'.format(dsName))
        data = readfile.read(fnames[0], datasetName=dsName)[0]
        for i in range(1, len(fnames)):
            d = readfile.read(fnames[i], datasetName=dsName)[0]
            data = add_matrix(data, d)
        dsDict[dsName] = data
    writefile.write(dsDict,
                    out_file=out_file,
                    metadata=atr,
                    ref_file=fnames[0])
    return out_file
예제 #2
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.file)[0]+'.h5'

    if inps.data_type:
        if inps.data_type in ['float', 'float32', 'np.float32']:
            inps.data_type = np.float32
        elif inps.data_type in ['float64', 'np.float64']:
            inps.data_type = np.float64
        elif inps.data_type in ['int', 'int16', 'np.int16']:
            inps.data_type = np.int16
        elif inps.data_type in ['bool', 'np.bool_']:
            inps.data_type = np.bool_
        elif inps.data_type in ['complex', 'np.complex64']:
            inps.data_type = np.complex64
        elif inps.data_type in ['complex128', 'np.complex128']:
            inps.data_type = np.complex128
        else:
            raise ValueError('un-recognized input data type: {}'.format(inps.data_type))

    atr = readfile.read_attribute(inps.file)
    dsNames = readfile.get_dataset_list(inps.file)
    dsDict = {}
    for dsName in dsNames:
        data = readfile.read(inps.file, datasetName=dsName)[0]
        if inps.data_type:
            data = np.array(data, inps.data_type)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=inps.outfile, metadata=atr)
    return inps.outfile
예제 #3
0
파일: multilook.py 프로젝트: whigg/PySAR
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    # input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking {} {} file: {}'.format(atr['PROCESSOR'], k, infile))
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    # output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    #print('writing >>> '+outfile)

    # read source data and multilooking
    dsNames = readfile.get_dataset_list(infile)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = dict()
    for dsName in dsNames:
        print('multilooking {d:<{w}} from {f} ...'.format(
            d=dsName, w=maxDigit, f=os.path.basename(infile)))
        data = readfile.read(infile, datasetName=dsName, print_msg=False)[0]
        data = multilook_data(data, lks_y, lks_x)
        dsDict[dsName] = data
    atr = multilook_attribute(atr, lks_y, lks_x)
    writefile.write(dsDict, out_file=outfile, metadata=atr, ref_file=infile)
    return outfile
예제 #4
0
def file_operation(fname, operator, operand, out_file=None):
    """Mathmathic operation of file"""

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is ' + k + ' file: ' + fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not out_file:
        if operator in ['+', 'plus', 'add', 'addition']:
            suffix = 'plus'
        elif operator in ['-', 'minus', 'substract', 'substraction']:
            suffix = 'minus'
        elif operator in ['*', 'times', 'multiply', 'multiplication']:
            suffix = 'multiply'
        elif operator in ['/', 'obelus', 'divide', 'division']:
            suffix = 'divide'
        elif operator in ['^', 'pow', 'power']:
            suffix = 'pow'
        out_file = '{}_{}{}{}'.format(
            os.path.splitext(fname)[0], suffix, str(operand),
            os.path.splitext(fname)[1])

    atr = readfile.read_attribute(fname)
    dsNames = readfile.get_dataset_list(fname)
    dsDict = {}
    for dsName in dsNames:
        data = readfile.read(fname, datasetName=dsName)[0]
        data = data_operation(data, operator, operand)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=out_file, metadata=atr, ref_file=fname)
    return out_file
예제 #5
0
파일: mask.py 프로젝트: fossabot/PySAR
def mask_file(fname, mask_file, out_file=None, inps=None):
    """ Mask input fname with mask_file
    Inputs:
        fname/mask_file - string, 
        inps_dict - dictionary including the following options:
                    subset_x/y - list of 2 ints, subset in x/y direction
                    threshold - float, threshold/minValue to generate mask
    Output:
        out_file - string
    """
    if not inps:
        inps = cmd_line_parse()

    if not out_file:
        out_file = '{}_masked{}'.format(
            os.path.splitext(fname)[0],
            os.path.splitext(fname)[1])

    # read mask_file
    mask = readfile.read(mask_file)[0]
    mask = update_mask_with_inps(mask, inps)

    # masking input file
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = {}
    for dsName in dsNames:
        if dsName not in ['coherence']:
            print('masking {d:<{w}} from {f} ...'.format(d=dsName,
                                                         w=maxDigit,
                                                         f=fname))
            data = readfile.read(fname, datasetName=dsName, print_msg=False)[0]
            data = mask_matrix(data, mask, fill_value=inps.fill_value)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=out_file, ref_file=fname)
예제 #6
0
파일: geocode.py 프로젝트: wchch1010/PySAR
def run_geocode(inps):
    """geocode all input files"""
    start_time = time.time()

    # Prepare geometry for geocoding
    res_obj = resample(lookupFile=inps.lookupFile,
                       dataFile=inps.file[0],
                       SNWE=inps.SNWE,
                       laloStep=inps.laloStep,
                       processor=inps.processor)
    res_obj.open()

    if not inps.nprocs:
        inps.nprocs = multiprocessing.cpu_count()

    # resample input files one by one
    for infile in inps.file:
        print('-' * 50+'\nresampling file: {}'.format(infile))
        atr = readfile.read_attribute(infile, datasetName=inps.dset)
        outfile = auto_output_filename(infile, inps)
        if inps.updateMode and ut.run_or_skip(outfile, in_file=[infile, inps.lookupFile]) == 'skip':
            print('update mode is ON, skip geocoding.')
            continue

        # read source data and resample
        dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset)
        maxDigit = max([len(i) for i in dsNames])
        dsResDict = dict()
        for dsName in dsNames:
            print('reading {d:<{w}} from {f} ...'.format(d=dsName,
                                                         w=maxDigit,
                                                         f=os.path.basename(infile)))
            data = readfile.read(infile,
                                 datasetName=dsName,
                                 print_msg=False)[0]

            if atr['FILE_TYPE'] == 'timeseries' and len(data.shape) == 2:
                data = np.reshape(data, (1, data.shape[0], data.shape[1]))
            res_data = res_obj.run_resample(src_data=data,
                                            interp_method=inps.interpMethod,
                                            fill_value=inps.fillValue,
                                            nprocs=inps.nprocs,
                                            print_msg=True)
            dsResDict[dsName] = res_data

        # update metadata
        if inps.radar2geo:
            atr = metadata_radar2geo(atr, res_obj)
        else:
            atr = metadata_geo2radar(atr, res_obj)
        #if len(dsNames) == 1 and dsName not in ['timeseries']:
        #    atr['FILE_TYPE'] = dsNames[0]
        #    infile = None

        writefile.write(dsResDict, out_file=outfile, metadata=atr, ref_file=infile)

    m, s = divmod(time.time()-start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))
    return outfile
예제 #7
0
파일: pysarApp.py 프로젝트: wchch1010/PySAR
    def run_load_data(self, step_name):
        """Load InSAR stacks into HDF5 files in ./INPUTS folder.
        It 1) copy auxiliary files into work directory (for Unvi of Miami only)
           2) load all interferograms stack files into PYSAR/INPUTS directory.
           3) check loading result
           4) add custom metadata (optional, for HDF-EOS5 format only)
        """
        # 1) copy aux files (optional)
        self._copy_aux_file()

        # 2) loading data
        scp_args = '--template {}'.format(self.templateFile)
        if self.customTemplateFile:
            scp_args += ' {}'.format(self.customTemplateFile)
        if self.projectName:
            scp_args += ' --project {}'.format(self.projectName)
        # run
        print("load_data.py", scp_args)
        pysar.load_data.main(scp_args.split())
        os.chdir(self.workDir)

        # 3) check loading result
        load_complete, stack_file, geom_file = ut.check_loaded_dataset(self.workDir, print_msg=True)[0:3]

        # 3.1) output waterMask.h5
        water_mask_file = 'waterMask.h5'
        if 'waterMask' in readfile.get_dataset_list(geom_file):
            print('generate {} from {} for conveniency'.format(water_mask_file, geom_file))
            if ut.run_or_skip(out_file=water_mask_file, in_file=geom_file) == 'run':
                water_mask, atr = readfile.read(geom_file, datasetName='waterMask')
                atr['FILE_TYPE'] = 'waterMask'
                writefile.write(water_mask, out_file=water_mask_file, metadata=atr)

        # 4) add custom metadata (optional)
        if self.customTemplateFile:
            print('updating {}, {} metadata based on custom template file: {}'.format(
                os.path.basename(stack_file),
                os.path.basename(geom_file),
                os.path.basename(self.customTemplateFile)))
            # use ut.add_attribute() instead of add_attribute.py because of
            # better control of special metadata, such as SUBSET_X/YMIN
            ut.add_attribute(stack_file, self.customTemplate)
            ut.add_attribute(geom_file, self.customTemplate)

        # 5) if not load_complete, plot and raise exception
        if not load_complete:
            # plot result if error occured
            self.plot_result(print_aux=False, plot=plot)

            # go back to original directory
            print('Go back to directory:', self.cwd)
            os.chdir(self.cwd)

            # raise error
            msg = 'step {}: NOT all required dataset found, exit.'.format(step_name)
            raise RuntimeError(msg)
        return
예제 #8
0
def filter_file(fname, filter_type, filter_par=None, fname_out=None):
    """Filter 2D matrix with selected filter
    Inputs:
        fname       : string, name/path of file to be filtered
        filter_type : string, filter type
        filter_par  : string, optional, parameter for low/high pass filter
                      for low/highpass_avg, it's kernel size in int
                      for low/highpass_gaussain, it's sigma in float
    Output:
        fname_out   : string, optional, output file name/path
    """
    # Info
    filter_type = filter_type.lower()
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    msg = 'filtering {} file: {} using {} filter'.format(k, fname, filter_type)
    if filter_type.endswith('avg'):
        if not filter_par:
            filter_par = 5
        msg += ' with kernel size of {}'.format(filter_par)
    elif filter_type.endswith('gaussian'):
        if not filter_par:
            filter_par = 3.0
        msg += ' with sigma of {:.1f}'.format(filter_par)
    print(msg)

    # output filename
    if not fname_out:
        fname_out = '{}_{}{}'.format(
            os.path.splitext(fname)[0], filter_type,
            os.path.splitext(fname)[1])

    # filtering file
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = dict()
    for dsName in dsNames:
        msg = 'filtering {d:<{w}} from {f} '.format(d=dsName,
                                                    w=maxDigit,
                                                    f=os.path.basename(fname))
        data = readfile.read(fname, datasetName=dsName, print_msg=False)[0]
        if len(data.shape) == 3:
            num_loop = data.shape[0]
            for i in range(num_loop):
                data[i, :, :] = filter_data(data[i, :, :], filter_type,
                                            filter_par)
                sys.stdout.write('\r{} {}/{} ...'.format(msg, i + 1, num_loop))
                sys.stdout.flush()
            print('')
        else:
            data = filter_data(data, filter_type, filter_par)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=fname_out, metadata=atr, ref_file=fname)
    return fname_out
예제 #9
0
파일: info_gui.py 프로젝트: xuubing/PySAR
def plot_data():

    atr = readfile.read_attribute(file.get())
    file_type = atr['FILE_TYPE']

    datasets = readfile.get_dataset_list(file.get(), file_type)

    item = tree.focus()
    the_item = tree.item(item)
    epoch_num = the_item['text']
    if epoch_num in datasets:
        view.main([file.get(), epoch_num])
예제 #10
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.file)[0] + '.h5'

    atr = readfile.read_attribute(inps.file)
    dsNames = readfile.get_dataset_list(inps.file)
    dsDict = {}
    for dsName in dsNames:
        data = readfile.read(inps.file, datasetName=dsName)[0]
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=inps.outfile, metadata=atr)
    return inps.outfile
예제 #11
0
파일: geocode.py 프로젝트: fossabot/PySAR
def run_resample(inps):
    """resample all input files"""
    start_time = time.time()

    # Prepare geometry for geocoding
    res_obj = resample(lookupFile=inps.lookupFile, dataFile=inps.file[0],
                       SNWE=inps.SNWE, laloStep=inps.laloStep)
    res_obj.get_geometry_definition()

    inps.nprocs = multiprocessing.cpu_count()

    # resample input files one by one
    for infile in inps.file:
        print('-' * 50+'\nresampling file: {}'.format(infile))
        outfile = auto_output_filename(infile, inps)
        if inps.updateMode and not ut.update_file(outfile, [infile, inps.lookupFile]):
            print('update mode is ON, skip geocoding.')
            return outfile

        # read source data and resample
        dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset)
        maxDigit = max([len(i) for i in dsNames])
        dsResDict = dict()
        for dsName in dsNames:
            print('resampling {d:<{w}} from {f} using {n} processor cores ...'.format(
                d=dsName, w=maxDigit, f=os.path.basename(infile), n=inps.nprocs))
            data = readfile.read(infile, datasetName=dsName, print_msg=False)[0]
            res_data = resample_data(data, inps, res_obj)
            dsResDict[dsName] = res_data

        # update metadata
        atr = readfile.read_attribute(infile, datasetName=inps.dset)
        if inps.radar2geo:
            atr = metadata_radar2geo(atr, res_obj)
        else:
            atr = metadata_geo2radar(atr, res_obj)
        if len(dsNames) == 1 and dsName not in ['timeseries']:
            atr['FILE_TYPE'] = dsNames[0]
            infile = None

        writefile.write(dsResDict, out_file=outfile, metadata=atr, ref_file=infile)

    m, s = divmod(time.time()-start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs\nDone.'.format(m, s))
    return outfile
예제 #12
0
def split_ifgram_file(ifgram_file, chunk_size=100e6):
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    metadata = dict(stack_obj.metadata)

    # get reference phase
    ref_phase = get_ifgram_reference_phase(ifgram_file)

    # get list of boxes
    box_list = split_into_boxes(ifgram_file,
                                chunk_size=chunk_size,
                                print_msg=True)
    num_box = len(box_list)

    # read/write each patch file
    outfile_list = []
    for i in range(num_box):
        box = box_list[i]
        outfile = '{}_{:03d}{}'.format(
            os.path.splitext(ifgram_file)[0], i + 1,
            os.path.splitext(ifgram_file)[1])

        # datasets
        print('-' * 50)
        print('reading all datasets in {} from file: {} ...'.format(
            box, ifgram_file))
        dsNames = readfile.get_dataset_list(ifgram_file)
        dsDict = {}
        dsDict['refPhase'] = ref_phase
        for dsName in dsNames:
            data = stack_obj.read(datasetName=dsName, box=box, print_msg=False)
            dsDict[dsName] = data

        # metadata
        metadata['LENGTH'] = box[3] - box[1]
        metadata['WIDTH'] = box[2] - box[0]
        writefile.write(dsDict,
                        out_file=outfile,
                        metadata=metadata,
                        ref_file=ifgram_file)
        outfile_list.append(outfile)
    return outfile_list
예제 #13
0
파일: utils1.py 프로젝트: wchch1010/PySAR
def get_geometry_file(dset, geocoded=False, abspath=True, print_msg=True):
    """Find geometry file containing input specific dataset"""
    if dset not in geometryDatasetNames:
        raise ValueError('unrecognized geometry dataset name: {}'.format(dset))

    if geocoded:
        geom_file = './INPUTS/geometryGeo.h5'
    else:
        geom_file = './INPUTS/geometryRadar.h5'

    if not os.path.isfile(geom_file):
        print('geometry file {} does not exist.'.format(geom_file))
        return None

    if dset not in readfile.get_dataset_list(geom_file):
        print('dataset {} not found in file {}'.format(dset, geom_file))
        return None

    if abspath:
        geom_file = os.path.abspath(geom_file)
    return geom_file
예제 #14
0
파일: subset.py 프로젝트: xuubing/PySAR
def subset_file(fname, subset_dict_input, out_file=None):
    """Subset file with
    Inputs:
        fname        : str, path/name of file
        out_file     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        out_file :  str, path/name of output file; 
                   out_file = 'subset_'+fname, if fname is in current directory;
                   out_file = fname, if fname is not in the current directory.
    """

    # Input File Info
    try:
        atr = readfile.read_attribute(fname)
    except:
        return None

    width = int(atr['WIDTH'])
    length = int(atr['LENGTH'])
    k = atr['FILE_TYPE']
    print('subset ' + k + ' file: ' + fname + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr)

    coord = ut.coordinate(atr)
    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in subset_dict.keys() and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = coord.check_box_within_data_coverage(pix_box)
        subset_dict['fill_value'] = np.nan

    geo_box = coord.box_pixel2geo(pix_box)
    data_box = (0, 0, width, length)
    print('data   range in y/x: ' + str(data_box))
    print('subset range in y/x: ' + str(pix_box))
    print('data   range in lat/lon: ' + str(coord.box_pixel2geo(data_box)))
    print('subset range in lat/lon: ' + str(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return fname

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not out_file:
        if os.getcwd() == os.path.dirname(os.path.abspath(fname)):
            if 'tight' in subset_dict.keys() and subset_dict['tight']:
                out_file = '{}_tight{}'.format(
                    os.path.splitext(fname)[0],
                    os.path.splitext(fname)[1])
            else:
                out_file = 'subset_' + os.path.basename(fname)
        else:
            out_file = os.path.basename(fname)
    print('writing >>> ' + out_file)

    # subset datasets one by one
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = dict()
    for dsName in dsNames:
        print('subsetting {d:<{w}} from {f} ...'.format(
            d=dsName, w=maxDigit, f=os.path.basename(fname)))
        data = readfile.read(fname, datasetName=dsName, print_msg=False)[0]

        # subset 2D data
        if len(data.shape) == 2:
            data_overlap = data[pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]
            data = np.ones((pix_box[3] - pix_box[1], pix_box[2] - pix_box[0]),
                           data.dtype) * subset_dict['fill_value']
            data[pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        # subset 3D data
        elif len(data.shape) == 3:
            data_overlap = data[:, pix_box4data[1]:pix_box4data[3],
                                pix_box4data[0]:pix_box4data[2]]
            data = np.ones(
                (data.shape[0], pix_box[3] - pix_box[1], pix_box[2] -
                 pix_box[0]), data.dtype) * subset_dict['fill_value']
            data[:, pix_box4subset[1]:pix_box4subset[3],
                 pix_box4subset[0]:pix_box4subset[2]] = data_overlap

        dsDict[dsName] = data

    atr = ut.subset_attribute(atr, pix_box)
    writefile.write(dsDict, out_file=out_file, metadata=atr, ref_file=fname)
    return out_file
예제 #15
0
def ifgram_inversion_patch(ifgram_file,
                           box=None,
                           ref_phase=None,
                           weight_func='fim',
                           mask_dataset_name=None,
                           mask_threshold=0.4,
                           water_mask_file=None,
                           skip_zero_phase=True):
    """Invert one patch of an ifgram stack into timeseries.
    Parameters: ifgram_file       : str, interferograms stack HDF5 file, e.g. ./INPUTS/ifgramStack.h5
                box               : tuple of 4 int, indicating (x0, y0, x1, y1) pixel coordinate of area of interest
                                    or None, to process the whole file and write output file
                ref_phase         : 1D array in size of (num_ifgram) 
                                    or None
                weight_func       : str, weight function, choose in ['sbas', 'fim', 'var', 'coh']
                mask_dataset_name : str, dataset name in ifgram_file used to mask unwrapPhase pixelwisely
                mask_threshold    : float, min coherence of pixels if mask_dataset_name='coherence'
                water_mask_file   : str, water mask filename if available,
                                    skip inversion on water to speed up the process
                skip_zero_phase   : bool, skip zero value of unwrapped phase or not, default yes, for comparison
    Returns:    ts             : 3D array in size of (num_date, num_row, num_col)
                temp_coh       : 2D array in size of (num_row, num_col)
                ts_std         : 3D array in size of (num_date, num_row, num_col)
                num_inv_ifgram : 2D array in size of (num_row, num_col)
    Example:    ifgram_inversion_patch('ifgramStack.h5', box=(0,200,1316,400), ref_phase=np.array(),
                                       weight_func='fim', mask_dataset_name='coherence')
                ifgram_inversion_patch('ifgramStack_001.h5', box=None, ref_phase=None,
                                       weight_func='fim', mask_dataset_name='coherence')
    """

    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)

    # Size Info - Patch
    if box:
        print('processing %8d/%d lines ...' % (box[3], stack_obj.length))
        num_row = box[3] - box[1]
        num_col = box[2] - box[0]
    else:
        num_row = stack_obj.length
        num_col = stack_obj.width
    num_pixel = num_row * num_col

    # Design matrix
    date12_list = stack_obj.get_date12_list(dropIfgram=True)
    A, B = stack_obj.get_design_matrix(date12_list=date12_list)
    num_ifgram = len(date12_list)
    num_date = A.shape[1] + 1
    try:
        ref_date = str(
            np.loadtxt('reference_date.txt', dtype=bytes).astype(str))
    except:
        ref_date = stack_obj.dateList[0]
    ref_idx = stack_obj.dateList.index(ref_date)
    time_idx = [i for i in range(num_date)]
    time_idx.remove(ref_idx)
    Astd = stack_obj.get_design_matrix(refDate=ref_date, dropIfgram=True)[0]

    # Initialization of output matrix
    print('number of interferograms: {}'.format(num_ifgram))
    print('number of acquisitions  : {}'.format(num_date))
    print('number of lines  : {}'.format(stack_obj.length))
    print('number of columns: {}'.format(stack_obj.width))
    ts = np.zeros((num_date, num_pixel), np.float32)
    ts_std = np.zeros((num_date, num_pixel), np.float32)
    temp_coh = np.zeros(num_pixel, np.float32)
    num_inv_ifgram = np.zeros(num_pixel, np.int16)

    # Read/Mask unwrapPhase
    pha_data = read_unwrap_phase(stack_obj,
                                 box,
                                 ref_phase,
                                 skip_zero_phase=skip_zero_phase)

    pha_data = mask_unwrap_phase(pha_data,
                                 stack_obj,
                                 box,
                                 mask_ds_name=mask_dataset_name,
                                 mask_threshold=mask_threshold)

    # Mask for pixels to invert
    mask = np.ones(num_pixel, np.bool_)
    # 1 - Water Mask
    if water_mask_file:
        print(('skip pixels on water with mask from'
               ' file: {}').format(os.path.basename(water_mask_file)))
        dsNames = readfile.get_dataset_list(water_mask_file)
        dsName = [i for i in dsNames if i in ['waterMask', 'mask']][0]
        waterMask = readfile.read(water_mask_file, datasetName=dsName,
                                  box=box)[0].flatten()
        mask *= np.array(waterMask, np.bool_)
        del waterMask

    # 2 - Mask for Zero Phase in ALL ifgrams
    print('skip pixels with zero/nan value in all interferograms')
    phase_stack = np.nanmean(pha_data, axis=0)
    mask *= np.multiply(~np.isnan(phase_stack), phase_stack != 0.)
    del phase_stack

    # Invert pixels on mask 1+2
    num_pixel2inv = int(np.sum(mask))
    idx_pixel2inv = np.where(mask)[0]
    print(('number of pixels to invert: {} out of {}'
           ' ({:.1f}%)').format(num_pixel2inv, num_pixel,
                                num_pixel2inv / num_pixel * 100))
    if num_pixel2inv < 1:
        ts = ts.reshape(num_date, num_row, num_col)
        temp_coh = temp_coh.reshape(num_row, num_col)
        ts_std = ts_std.reshape(num_date, num_row, num_col)
        num_inv_ifgram = num_inv_ifgram.reshape(num_row, num_col)
        return ts, temp_coh, ts_std, num_inv_ifgram

    # Inversion - SBAS
    if weight_func == 'sbas':
        # get tbase_diff (for SBAS approach)
        date_list = stack_obj.get_date_list(dropIfgram=True)
        tbase = np.array(ptime.date_list2tbase(date_list)[0],
                         np.float32) / 365.25
        tbase_diff = np.diff(tbase).reshape(-1, 1)

        # Mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion)
        mask_all_net = np.all(pha_data, axis=0)
        mask_all_net *= mask
        mask_part_net = mask ^ mask_all_net

        if np.sum(mask_all_net) > 0:
            print(('inverting pixels with valid phase in all  ifgrams'
                   ' ({:.0f} pixels) ...').format(np.sum(mask_all_net)))
            # num_all_net = int(np.sum(mask_all_net))
            # pha_data_temp = pha_data[:, mask_all_net]
            # ts1 = np.zeros((num_date-1, num_all_net))
            # temp_coh1 = np.zeros(num_all_net)
            # step = 1000
            # loop_num = int(np.floor(num_all_net/step))
            # prog_bar = ptime.progressBar(maxValue=loop_num)
            # for i in range(loop_num):
            #     [i0, i1] = [i * step, min((i + 1) * step, num_all_net)]
            #     ts1[:, i0:i1], temp_coh1[i0:i1] = network_inversion_sbas(B,
            #                                                              ifgram=pha_data_temp[:, i0:i1],
            #                                                              tbase_diff=tbase_diff,
            #                                                              skip_zero_phase=False)
            #     prog_bar.update(i+1, suffix=i0)
            # prog_bar.close()
            ts1, temp_coh1, ifg_num1 = network_inversion_sbas(
                B,
                ifgram=pha_data[:, mask_all_net],
                tbase_diff=tbase_diff,
                skip_zero_phase=False)
            ts[1:, mask_all_net] = ts1
            temp_coh[mask_all_net] = temp_coh1
            num_inv_ifgram[mask_all_net] = ifg_num1

        if np.sum(mask_part_net) > 0:
            print(('inverting pixels with valid phase in some ifgrams'
                   ' ({:.0f} pixels) ...').format(np.sum(mask_part_net)))
            num_pixel2inv = int(np.sum(mask_part_net))
            idx_pixel2inv = np.where(mask_part_net)[0]
            prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
            for i in range(num_pixel2inv):
                idx = idx_pixel2inv[i]
                ts1, temp_coh1, ifg_num1 = network_inversion_sbas(
                    B,
                    ifgram=pha_data[:, idx],
                    tbase_diff=tbase_diff,
                    skip_zero_phase=skip_zero_phase)
                ts[1:, idx] = ts1.flatten()
                temp_coh[idx] = temp_coh1
                num_inv_ifgram[idx] = ifg_num1
                prog_bar.update(i + 1,
                                every=100,
                                suffix='{}/{} pixels'.format(
                                    i + 1, num_pixel2inv))
            prog_bar.close()

    # Inversion - WLS
    else:
        weight = read_coherence2weight(stack_obj,
                                       box=box,
                                       weight_func=weight_func)

        # Converting to 32 bit floats leads to 2X speedup
        # (comment it out as we now convert it beforehand)
        # A = np.array(A, np.float32)
        # pha_data = np.array(pha_data, np.float32)
        # weight = np.array(weight, np.float32)
        # Astd = np.array(Astd, np.float32)

        # Weighted Inversion pixel by pixel
        print('inverting network of interferograms into time series ...')
        prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
        for i in range(num_pixel2inv):
            idx = idx_pixel2inv[i]
            ts1, temp_coh1, ts_std1, ifg_numi = network_inversion_wls(
                A,
                ifgram=pha_data[:, idx],
                weight=weight[:, idx],
                skip_zero_phase=skip_zero_phase,
                Astd=Astd)
            ts[1:, idx] = ts1.flatten()
            temp_coh[idx] = temp_coh1
            ts_std[time_idx, idx] = ts_std1.flatten()
            num_inv_ifgram[idx] = ifg_numi
            prog_bar.update(i + 1,
                            every=100,
                            suffix='{}/{} pixels'.format(i + 1, num_pixel2inv))
        prog_bar.close()

    ts = ts.reshape(num_date, num_row, num_col)
    ts_std = ts_std.reshape(num_date, num_row, num_col)
    temp_coh = temp_coh.reshape(num_row, num_col)
    num_inv_ifgram = num_inv_ifgram.reshape(num_row, num_col)

    # write output files if input file is splitted (box == None)
    if box is None:
        # metadata
        metadata = dict(stack_obj.metadata)
        metadata[key_prefix + 'weightFunc'] = weight_func
        suffix = re.findall('_\d{3}', ifgram_file)[0]
        write2hdf5_file(ifgram_file, metadata, ts, temp_coh, ts_std,
                        num_inv_ifgram, suffix)
        return
    else:
        return ts, temp_coh, ts_std, num_inv_ifgram
예제 #16
0
def ifgram_inversion_patch(ifgram_file,
                           box=None,
                           ref_phase=None,
                           unwDatasetName='unwrapPhase',
                           weight_func='var',
                           min_norm_velocity=True,
                           mask_dataset_name=None,
                           mask_threshold=0.4,
                           min_redundancy=1.0,
                           water_mask_file=None,
                           skip_zero_phase=True):
    """Invert one patch of an ifgram stack into timeseries.
    Parameters: ifgram_file       : str, interferograms stack HDF5 file, e.g. ./INPUTS/ifgramStack.h5
                box               : tuple of 4 int, indicating (x0, y0, x1, y1) pixel coordinate of area of interest
                                    or None, to process the whole file and write output file
                ref_phase         : 1D array in size of (num_ifgram) 
                                    or None
                weight_func       : str, weight function, choose in ['no', 'fim', 'var', 'coh']
                mask_dataset_name : str, dataset name in ifgram_file used to mask unwrapPhase pixelwisely
                mask_threshold    : float, min coherence of pixels if mask_dataset_name='coherence'
                water_mask_file   : str, water mask filename if available,
                                    skip inversion on water to speed up the process
                skip_zero_phase   : bool, skip zero value of unwrapped phase or not, default yes, for comparison
    Returns:    ts             : 3D array in size of (num_date, num_row, num_col)
                temp_coh       : 2D array in size of (num_row, num_col)
                ts_std         : 3D array in size of (num_date, num_row, num_col)
                num_inv_ifg : 2D array in size of (num_row, num_col)
    Example:    ifgram_inversion_patch('ifgramStack.h5', box=(0,200,1316,400), ref_phase=np.array(),
                                       weight_func='var', min_norm_velocity=True, mask_dataset_name='coherence')
                ifgram_inversion_patch('ifgramStack_001.h5', box=None, ref_phase=None,
                                       weight_func='var', min_norm_velocity=True, mask_dataset_name='coherence')
    """

    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)

    ## debug
    #y, x = 258, 454
    #box = (x, y, x+1, y+1)

    # Size Info - Patch
    if box:
        #print('processing \t %d-%d / %d lines ...' % (box[1], box[3], stack_obj.length))
        num_row = box[3] - box[1]
        num_col = box[2] - box[0]
    else:
        num_row = stack_obj.length
        num_col = stack_obj.width
    num_pixel = num_row * num_col

    # get tbase_diff
    date_list = stack_obj.get_date_list(dropIfgram=True)
    num_date = len(date_list)
    tbase = np.array(ptime.date_list2tbase(date_list)[0], np.float32) / 365.25
    tbase_diff = np.diff(tbase).reshape(-1, 1)

    # Design matrix
    date12_list = stack_obj.get_date12_list(dropIfgram=True)
    A, B = stack_obj.get_design_matrix4timeseries_estimation(
        date12_list=date12_list)[0:2]
    num_ifgram = len(date12_list)

    # prep for decor std time-series
    try:
        ref_date = str(
            np.loadtxt('reference_date.txt', dtype=bytes).astype(str))
    except:
        ref_date = date_list[0]
    Astd = stack_obj.get_design_matrix4timeseries_estimation(
        refDate=ref_date, dropIfgram=True)[0]
    #ref_idx = date_list.index(ref_date)
    #time_idx = [i for i in range(num_date)]
    #time_idx.remove(ref_idx)

    # Initialization of output matrix
    ts = np.zeros((num_date, num_pixel), np.float32)
    ts_std = np.zeros((num_date, num_pixel), np.float32)
    temp_coh = np.zeros(num_pixel, np.float32)
    num_inv_ifg = np.zeros(num_pixel, np.int16)

    # Read/Mask unwrapPhase
    pha_data = read_unwrap_phase(stack_obj,
                                 box,
                                 ref_phase,
                                 unwDatasetName=unwDatasetName,
                                 dropIfgram=True,
                                 skip_zero_phase=skip_zero_phase)

    pha_data = mask_unwrap_phase(pha_data,
                                 stack_obj,
                                 box,
                                 dropIfgram=True,
                                 mask_ds_name=mask_dataset_name,
                                 mask_threshold=mask_threshold)

    # Mask for pixels to invert
    mask = np.ones(num_pixel, np.bool_)
    # 1 - Water Mask
    if water_mask_file:
        print(('skip pixels on water with mask from'
               ' file: {}').format(os.path.basename(water_mask_file)))
        atr_msk = readfile.read_attribute(water_mask_file)
        if (int(atr_msk['LENGTH']), int(
                atr_msk['WIDTH'])) != (stack_obj.length, stack_obj.width):
            raise ValueError(
                'Input water mask file has different size from ifgramStack file.'
            )
        del atr_msk
        dsName = [
            i for i in readfile.get_dataset_list(water_mask_file)
            if i in ['waterMask', 'mask']
        ][0]
        waterMask = readfile.read(water_mask_file, datasetName=dsName,
                                  box=box)[0].flatten()
        mask *= np.array(waterMask, np.bool_)
        del waterMask

    # 2 - Mask for Zero Phase in ALL ifgrams
    print('skip pixels with zero/nan value in all interferograms')
    phase_stack = np.nanmean(pha_data, axis=0)
    mask *= np.multiply(~np.isnan(phase_stack), phase_stack != 0.)
    del phase_stack

    # Invert pixels on mask 1+2
    num_pixel2inv = int(np.sum(mask))
    idx_pixel2inv = np.where(mask)[0]
    print(('number of pixels to invert: {} out of {}'
           ' ({:.1f}%)').format(num_pixel2inv, num_pixel,
                                num_pixel2inv / num_pixel * 100))
    if num_pixel2inv < 1:
        ts = ts.reshape(num_date, num_row, num_col)
        ts_std = ts_std.reshape(num_date, num_row, num_col)
        temp_coh = temp_coh.reshape(num_row, num_col)
        num_inv_ifg = num_inv_ifg.reshape(num_row, num_col)
        return ts, temp_coh, ts_std, num_inv_ifg

    # Inversion - SBAS
    if weight_func in ['no', 'sbas']:
        # Mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion)
        mask_all_net = np.all(pha_data, axis=0)
        mask_all_net *= mask
        mask_part_net = mask ^ mask_all_net

        if np.sum(mask_all_net) > 0:
            print(('inverting pixels with valid phase in all  ifgrams'
                   ' ({:.0f} pixels) ...').format(np.sum(mask_all_net)))
            tsi, tcohi, num_ifgi = estimate_timeseries(
                A,
                B,
                tbase_diff,
                ifgram=pha_data[:, mask_all_net],
                weight_sqrt=None,
                min_norm_velocity=min_norm_velocity,
                skip_zero_phase=skip_zero_phase,
                min_redundancy=min_redundancy)
            ts[:, mask_all_net] = tsi
            temp_coh[mask_all_net] = tcohi
            num_inv_ifg[mask_all_net] = num_ifgi

        if np.sum(mask_part_net) > 0:
            print(('inverting pixels with valid phase in some ifgrams'
                   ' ({:.0f} pixels) ...').format(np.sum(mask_part_net)))
            num_pixel2inv = int(np.sum(mask_part_net))
            idx_pixel2inv = np.where(mask_part_net)[0]
            prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
            for i in range(num_pixel2inv):
                idx = idx_pixel2inv[i]
                tsi, tcohi, num_ifgi = estimate_timeseries(
                    A,
                    B,
                    tbase_diff,
                    ifgram=pha_data[:, idx],
                    weight_sqrt=None,
                    min_norm_velocity=min_norm_velocity,
                    skip_zero_phase=skip_zero_phase,
                    min_redundancy=min_redundancy)
                ts[:, idx] = tsi.flatten()
                temp_coh[idx] = tcohi
                num_inv_ifg[idx] = num_ifgi
                prog_bar.update(i + 1,
                                every=1000,
                                suffix='{}/{} pixels'.format(
                                    i + 1, num_pixel2inv))
            prog_bar.close()

    # Inversion - WLS
    else:
        L = int(stack_obj.metadata['ALOOKS']) * int(
            stack_obj.metadata['RLOOKS'])
        weight = read_coherence(stack_obj, box=box, dropIfgram=True)
        weight = coherence2weight(weight,
                                  weight_func=weight_func,
                                  L=L,
                                  epsilon=5e-2)
        weight = np.sqrt(weight)

        # Weighted Inversion pixel by pixel
        print('inverting network of interferograms into time-series ...')
        prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
        for i in range(num_pixel2inv):
            idx = idx_pixel2inv[i]
            tsi, tcohi, num_ifgi = estimate_timeseries(
                A,
                B,
                tbase_diff,
                ifgram=pha_data[:, idx],
                weight_sqrt=weight[:, idx],
                min_norm_velocity=min_norm_velocity,
                skip_zero_phase=skip_zero_phase,
                min_redundancy=min_redundancy)
            ts[:, idx] = tsi.flatten()
            temp_coh[idx] = tcohi
            num_inv_ifg[idx] = num_ifgi
            prog_bar.update(i + 1,
                            every=1000,
                            suffix='{}/{} pixels'.format(i + 1, num_pixel2inv))
        prog_bar.close()

    ts = ts.reshape(num_date, num_row, num_col)
    ts_std = ts_std.reshape(num_date, num_row, num_col)
    temp_coh = temp_coh.reshape(num_row, num_col)
    num_inv_ifg = num_inv_ifg.reshape(num_row, num_col)

    # write output files if input file is splitted (box == None)
    if box is None:
        # metadata
        metadata = dict(stack_obj.metadata)
        metadata[key_prefix + 'weightFunc'] = weight_func
        suffix = re.findall('_\d{3}', ifgram_file)[0]
        write2hdf5_file(ifgram_file,
                        metadata,
                        ts,
                        temp_coh,
                        ts_std,
                        num_inv_ifg,
                        suffix,
                        inps=inps)
        return
    else:
        return ts, temp_coh, ts_std, num_inv_ifg
예제 #17
0
def run_unwrap_error_patch(ifgram_file, box=None, mask_file=None, ref_phase=None, fast_mode=False,
                           thres=0.1, dsNameIn='unwrapPhase'):
    """Estimate/Correct unwrapping error in ifgram stack on area defined by box.
    Parameters: ifgram_file : string, ifgramStack file
                box : tuple of 4 int, indicating areas to be read and analyzed
                mask_file : string, file name of mask file for pixels to be analyzed
                ref_pahse : 1D np.array in size of (num_ifgram,) phase value on reference pixel, because:
                    1) phase value stored in pysar is not reference yet
                    2) reference point may be out of box definition
                fast_mode : bool, apply zero jump constraint on ifgrams without unwrapping error.
                thres : float, threshold of non-zero phase closure to be identified as unwrapping error.
    Returns:    pha_data : 3D np.array in size of (num_ifgram_all, box[3]-box[2], box[2]-box[0]),
                    unwrapped phase value after error correction
    """
    # Basic info
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    num_ifgram = stack_obj.numIfgram

    # Size Info - Patch
    if box:
        num_row = box[3] - box[1]
        num_col = box[2] - box[0]
    else:
        num_row = stack_obj.length
        num_col = stack_obj.width
    num_pixel = num_row * num_col

    C = stack_obj.get_design_matrix4ifgram_triangle(dropIfgram=True)
    print('number of interferograms: {}'.format(C.shape[1]))
    print('number of triangles: {}'.format(C.shape[0]))

    # read unwrapPhase
    pha_data_all = ifginv.read_unwrap_phase(stack_obj, box, ref_phase,
                                            unwDatasetName=dsNameIn,
                                            dropIfgram=False)
    pha_data = np.array(pha_data_all[stack_obj.dropIfgram, :])

    # mask of pixels to analyze
    mask = np.ones((num_pixel), np.bool_)
    print('number of pixels read: {}'.format(num_pixel))
    # mask 1. mask of water or area of interest
    if mask_file:
        dsNames = readfile.get_dataset_list(mask_file)
        dsName = [i for i in dsNames if i in ['waterMask', 'mask']][0]
        waterMask = readfile.read(mask_file, datasetName=dsName, box=box)[0].flatten()
        mask *= np.array(waterMask, np.bool_)
        del waterMask
        print('number of pixels left after mask: {}'.format(np.sum(mask)))

    # mask 2. mask of pixels without unwrap error: : zero phase closure on all triangles
    print('calculating phase closure of all possible triangles ...')
    pha_closure = np.dot(C, pha_data)
    pha_closure = np.abs(pha_closure - ut.wrap(pha_closure))       # Eq 4.2 (Fattahi, 2015)
    num_nonzero_closure = np.sum(pha_closure >= thres, axis=0)
    mask *= (num_nonzero_closure != 0.)
    del pha_closure
    print('number of pixels left after checking phase closure: {}'.format(np.sum(mask)))

    # mask summary
    num_pixel2proc = int(np.sum(mask))
    if num_pixel2proc > 0:
        ifgram = pha_data[:, mask]
        ifgram_cor = np.array(ifgram, np.float32)
        print('number of pixels to process: {} out of {} ({:.2f}%)'.format(num_pixel2proc, num_pixel,
                                                                           num_pixel2proc/num_pixel*100))

        # correcting unwrap error based on phase closure
        print('correcting unwrapping error ...')
        if fast_mode:
            ifgram_cor = correct_unwrap_error(ifgram, C, Dconstraint=False)[0]

        else:
            prog_bar = ptime.progressBar(maxValue=num_pixel2proc)
            for i in range(num_pixel2proc):
                ifgram_cor[:, i] = correct_unwrap_error(ifgram[:, i], C, Dconstraint=True)[0].flatten()
                prog_bar.update(i+1, every=10, suffix='{}/{}'.format(i+1, num_pixel2proc))
            prog_bar.close()

        pha_data[:, mask] = ifgram_cor
        pha_data_all[stack_obj.dropIfgram, :] = pha_data

    pha_data_all = pha_data_all.reshape(num_ifgram, num_row, num_col)
    num_nonzero_closure = num_nonzero_closure.reshape(num_row, num_col)
    return pha_data_all, num_nonzero_closure