Example #1
0
def file_operation(fname, operator, operand, out_file=None):
    """Mathmathic operation of file"""

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is '+k+' file: '+fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not out_file:
        if operator in ['+', 'plus',  'add',      'addition']:
            suffix = 'plus'
        elif operator in ['-', 'minus', 'substract', 'substraction']:
            suffix = 'minus'
        elif operator in ['*', 'times', 'multiply', 'multiplication']:
            suffix = 'multiply'
        elif operator in ['/', 'obelus', 'divide',   'division']:
            suffix = 'divide'
        elif operator in ['^', 'pow', 'power']:
            suffix = 'pow'
        out_file = '{}_{}{}{}'.format(os.path.splitext(fname)[0], suffix,
                                      str(operand), os.path.splitext(fname)[1])

    atr = readfile.read_attribute(fname)
    dsNames = readfile.get_dataset_list(fname)
    dsDict = {}
    for dsName in dsNames:
        data = readfile.read(fname, datasetName=dsName)[0]
        data = data_operation(data, operator, operand)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=out_file, metadata=atr, ref_file=fname)
    return out_file
Example #2
0
def read_subset_box(inpsDict):
    # Read subset info from template
    inpsDict['box'] = None
    inpsDict['box4geo_lut'] = None
    pix_box, geo_box = subset.read_subset_template2box(inpsDict['template_file'][0])

    # Grab required info to read input geo_box into pix_box
    try:
        lookupFile = [glob.glob(str(inpsDict['mintpy.load.lookupYFile']))[0],
                      glob.glob(str(inpsDict['mintpy.load.lookupXFile']))[0]]
    except:
        lookupFile = None

    try:
        pathKey = [i for i in datasetName2templateKey.values()
                   if i in inpsDict.keys()][0]
        file = glob.glob(str(inpsDict[pathKey]))[0]
        atr = readfile.read_attribute(file)
    except:
        atr = dict()

    geocoded = None
    if 'Y_FIRST' in atr.keys():
        geocoded = True
    else:
        geocoded = False

    # Check conflict
    if geo_box and not geocoded and lookupFile is None:
        geo_box = None
        print(('WARNING: mintpy.subset.lalo is not supported'
               ' if 1) no lookup file AND'
               '    2) radar/unkonwn coded dataset'))
        print('\tignore it and continue.')
    if not geo_box and not pix_box:
        return inpsDict

    # geo_box --> pix_box
    coord = ut.coordinate(atr, lookup_file=lookupFile)
    if geo_box is not None:
        pix_box = coord.bbox_geo2radar(geo_box)
        pix_box = coord.check_box_within_data_coverage(pix_box)
        print('input bounding box of interest in lalo: {}'.format(geo_box))
    print('box to read for datasets in y/x: {}'.format(pix_box))

    # Get box for geocoded lookup table (for gamma/roipac)
    box4geo_lut = None
    if lookupFile is not None:
        atrLut = readfile.read_attribute(lookupFile[0])
        if not geocoded and 'Y_FIRST' in atrLut.keys():
            geo_box = coord.bbox_radar2geo(pix_box)
            box4geo_lut = ut.coordinate(atrLut).bbox_geo2radar(geo_box)
            print('box to read for geocoded lookup file in y/x: {}'.format(box4geo_lut))

    inpsDict['box'] = pix_box
    inpsDict['box4geo_lut'] = box4geo_lut
    return inpsDict
Example #3
0
def is_file_exist(file_list, abspath=True):
    """Check if any file in the file list 1) exists and 2) readable
    Parameters: file_list : list of string, file name with/without wildcards
                abspath   : bool, return absolute file name/path or not
    Returns:    file_path : string, found file name/path; None if not.
    """
    try:
        file = get_file_list(file_list, abspath=abspath)[0]
        readfile.read_attribute(file)
    except:
        file = None
    return file
Example #4
0
def add_file(fnames, out_file=None):
    """Generate sum of all input files
    Parameters: fnames : list of str, path/name of input files to be added
                out_file : str, optional, path/name of output file
    Returns:    out_file : str, path/name of output file
    Example:    'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    """
    # Default output file name
    ext = os.path.splitext(fnames[0])[1]
    if not out_file:
        out_file = os.path.splitext(fnames[0])[0]
        for i in range(1, len(fnames)):
            out_file += '_plus_' + os.path.splitext(os.path.basename(fnames[i]))[0]
        out_file += ext

    atr = readfile.read_attribute(fnames[0])
    dsNames = readfile.get_dataset_list(fnames[0])
    dsDict = {}
    for dsName in dsNames:
        print('adding {} ...'.format(dsName))
        data = readfile.read(fnames[0], datasetName=dsName)[0]
        for i in range(1, len(fnames)):
            d = readfile.read(fnames[i], datasetName=dsName)[0]
            data = add_matrix(data, d)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=out_file, metadata=atr, ref_file=fnames[0])
    return out_file
Example #5
0
def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'):
    # read time-series
    atr = readfile.read_attribute(ts_file)
    range2phase = -4.*np.pi / float(atr['WAVELENGTH'])
    print('reading timeseries data from file {} ...'.format(ts_file))
    ts_data = readfile.read(ts_file)[0] * range2phase
    num_date, length, width = ts_data.shape
    ts_data = ts_data.reshape(num_date, -1)

    # reconstruct unwrapPhase
    print('reconstructing the interferograms from timeseries')
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    A1 = stack_obj.get_design_matrix4timeseries(stack_obj.get_date12_list(dropIfgram=False))[0]
    num_ifgram = A1.shape[0]
    A0 = -1.*np.ones((num_ifgram, 1))
    A = np.hstack((A0, A1))
    ifgram_est = np.dot(A, ts_data).reshape(num_ifgram, length, width)
    ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype)
    del ts_data

    # write to ifgram file
    dsDict = {}
    dsDict['unwrapPhase'] = ifgram_est
    writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file)
    return ifgram_file
Example #6
0
def main(argv):
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    lat = float(argv[0])
    lon = float(argv[1])

    try:
        trans_file = argv[2]
    except:
        trans_file = ut.get_lookup_file()

    try:
        radar_file = argv[3]
    except:
        radar_file = 'inputs/ifgramStack.h5'

    atr_rdr = readfile.read_attribute(radar_file)

    print('input geo coord: lat=%.4f, lon=%.4f' % (lat, lon))

    coord = ut.coordinate(atr_rdr, lookup_file=trans_file)
    y, x = coord.geo2radar(np.array(lat), np.array(lon))[0:2]
    print('corresponding radar coord: y=%d, x=%d' % (y, x))
    return
Example #7
0
def get_date12_list(fname, dropIfgram=False):
    """Read Date12 info from input file: Pairs.list or multi-group hdf5 file
    Inputs:
        fname       - string, path/name of input multi-group hdf5 file or text file
        dropIfgram  - bool, check the "DROP_IFGRAM" attribute or not for multi-group hdf5 file
    Output:
        date12_list - list of string in YYMMDD-YYMMDD format
    Example:
        date12List = get_date12_list('ifgramStack.h5')
        date12List = get_date12_list('ifgramStack.h5', dropIfgram=True)
        date12List = get_date12_list('Pairs.list')
    """
    date12_list = []
    ext = os.path.splitext(fname)[1].lower()
    if ext == '.h5':
        k = readfile.read_attribute(fname)['FILE_TYPE']
        if k == 'ifgramStack':
            date12_list = ifgramStack(fname).get_date12_list(dropIfgram=dropIfgram)
        else:
            return None
    else:
        txtContent = np.loadtxt(fname, dtype=bytes).astype(str)
        if len(txtContent.shape) == 1:
            txtContent = txtContent.reshape(-1, 1)
        date12_list = [i for i in txtContent[:, 0]]
    date12_list = sorted(date12_list)
    return date12_list
Example #8
0
def _check_inps(inps):
    inps.file = ut.get_file_list(inps.file)
    if not inps.file:
        raise Exception('ERROR: no input file found!')
    elif len(inps.file) > 1:
        inps.outfile = None

    atr = readfile.read_attribute(inps.file[0])
    if 'Y_FIRST' in atr.keys() and inps.radar2geo:
        print('input file is already geocoded')
        print('to resample geocoded files into radar coordinates, use --geo2radar option')
        print('exit without doing anything.')
        sys.exit(0)
    elif 'Y_FIRST' not in atr.keys() and not inps.radar2geo:
        print('input file is already in radar coordinates, exit without doing anything')
        sys.exit(0)

    inps.lookupFile = ut.get_lookup_file(inps.lookupFile)
    if not inps.lookupFile:
        raise FileNotFoundError('No lookup table found! Can not geocode without it.')

    if inps.SNWE:
        inps.SNWE = tuple(inps.SNWE)

    inps.laloStep = [inps.latStep, inps.lonStep]
    if None in inps.laloStep:
        inps.laloStep = None

    inps.nprocs = check_num_processor(inps.nprocs)
    return inps
Example #9
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    # input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking {} {} file: {}'.format(atr['PROCESSOR'], k, infile))
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    # output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0]+'_'+str(lks_y)+'alks_'+str(lks_x)+'rlks'+ext
        else:
            outfile = os.path.basename(infile)
    #print('writing >>> '+outfile)

    # read source data and multilooking
    dsNames = readfile.get_dataset_list(infile)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = dict()
    for dsName in dsNames:
        print('multilooking {d:<{w}} from {f} ...'.format(
            d=dsName, w=maxDigit, f=os.path.basename(infile)))
        data = readfile.read(infile, datasetName=dsName, print_msg=False)[0]
        data = multilook_data(data, lks_y, lks_x)
        dsDict[dsName] = data
    atr = multilook_attribute(atr, lks_y, lks_x)
    writefile.write(dsDict, out_file=outfile, metadata=atr, ref_file=infile)
    return outfile
Example #10
0
def read_network_info(inps):
    k = readfile.read_attribute(inps.ifgram_file)['FILE_TYPE']
    if k != 'ifgramStack':
        raise ValueError('input file {} is not ifgramStack: {}'.format(inps.ifgram_file, k))

    obj = ifgramStack(inps.ifgram_file)
    obj.open(print_msg=inps.print_msg)
    inps.date12_list    = obj.get_date12_list(dropIfgram=False)
    date12_kept = obj.get_date12_list(dropIfgram=True)
    inps.ex_date12_list = sorted(list(set(inps.date12_list) - set(date12_kept)))
    inps.date_list = obj.get_date_list(dropIfgram=False)
    vprint('number of all     interferograms: {}'.format(len(inps.date12_list)))
    vprint('number of dropped interferograms: {}'.format(len(inps.ex_date12_list)))
    vprint('number of kept    interferograms: {}'.format(len(inps.date12_list) - len(inps.ex_date12_list)))
    vprint('number of acquisitions: {}'.format(len(inps.date_list)))

    if inps.lalo:
        if not inps.lookup_file:            
            lookup_file = os.path.join(os.path.dirname(inps.ifgram_file), 'geometry*.h5')
            inps.lookup_file = ut.get_lookup_file(filePattern=lookup_file)
        coord = ut.coordinate(obj.metadata, lookup_file=inps.lookup_file)
        inps.yx = coord.geo2radar(inps.lalo[0], inps.lalo[1])[0:2]

    if not inps.yx:
        inps.yx = (obj.refY, obj.refX)
        vprint('plot initial coherence matrix at reference pixel: {}'.format(inps.yx))
    return inps
Example #11
0
def prepare_stack(inputDir, filePattern, metadata=dict(), baseline_dict=dict(), update_mode=True):
    print('prepare .rsc file for ', filePattern)
    isce_files = sorted(glob.glob(os.path.join(os.path.abspath(inputDir), '*', filePattern)))
    if len(isce_files) == 0:
        raise FileNotFoundError('no file found in pattern: {}'.format(filePattern))

    # write .rsc file for each interferogram file
    num_file = len(isce_files)
    prog_bar = ptime.progressBar(maxValue=num_file)
    for i in range(num_file):
        isce_file = isce_files[i]
        # prepare metadata for current file
        ifg_metadata = readfile.read_attribute(isce_file, metafile_ext='.xml')
        ifg_metadata.update(metadata)
        dates = os.path.basename(os.path.dirname(isce_file)).split('_')
        ifg_metadata = add_ifgram_metadata(ifg_metadata, dates, baseline_dict)

        # write .rsc file
        rsc_file = isce_file+'.rsc'
        writefile.write_roipac_rsc(ifg_metadata, rsc_file,
                                   update_mode=update_mode,
                                   print_msg=False)
        prog_bar.update(i+1, suffix='{}_{}'.format(dates[0], dates[1]))
    prog_bar.close()
    return
Example #12
0
    def get_metadata(self, family='unwrapPhase'):
        self.file = self.datasetDict[family]
        self.metadata = readfile.read_attribute(self.file)
        self.length = int(self.metadata['LENGTH'])
        self.width = int(self.metadata['WIDTH'])

        # if self.processor is None:
        #    ext = self.file.split('.')[-1]
        #    if 'PROCESSOR' in self.metadata.keys():
        #        self.processor = self.metadata['PROCESSOR']
        #    elif os.path.exists(self.file+'.xml'):
        #        self.processor = 'isce'
        #    elif os.path.exists(self.file+'.rsc'):
        #        self.processor = 'roipac'
        #    elif os.path.exists(self.file+'.par'):
        #        self.processor = 'gamma'
        #    elif ext == 'grd':
        #        self.processor = 'gmtsar'
        #    #what for DORIS/SNAP
        #    else:
        #        self.processor = 'isce'
        #self.metadata['PROCESSOR'] = self.processor

        if self.track:
            self.metadata['TRACK'] = self.track

        if self.platform:
            self.metadata['PLATFORM'] = self.platform

        return self.metadata
Example #13
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    ext = os.path.splitext(inps.file)[1].lower()

    # --date option
    if inps.disp_date:
        print_date_list(inps.file,
                        disp_ifgram=inps.disp_ifgram,
                        disp_num=inps.disp_num,
                        print_msg=True)
        return

    # --slice option
    if inps.disp_slice:
        if inps.disp_ifgram != 'all':
            raise ValueError('--show-ifgram option is not applicable to --slice.')
        print_slice_list(inps.file,
                         disp_num=inps.disp_num,
                         print_msg=True)
        return

    # Basic info
    print_aux_info(inps.file)

    # Generic Attribute/Structure of all files
    if ext in ['.h5', '.he5']:
        print('\n{} {:*<40}'.format('*'*20, 'HDF5 File Structure '))
        print_hdf5_structure(inps.file, max_meta_num=inps.max_meta_num)
    else:
        print('\n{} {:*<40}'.format('*'*20, 'Binary File Attributes '))
        atr = readfile.read_attribute(inps.file)
        print_attributes(atr, max_meta_num=inps.max_meta_num)

    return
Example #14
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'rangeDistance.h5'

    # Calculate look angle
    range_dis = ut.range_distance(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print('Input file is geocoded, only center range distance is calculated: ')
        print(range_dis)
        length = int(atr['LENGTH'])
        width = int(atr['WIDTH'])
        range_dis_mat = np.zeros((length, width), np.float32)
        range_dis_mat[:] = range_dis
        range_dis = range_dis_mat

    print('writing >>> '+outFile)
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'm'
    try:
        atr.pop('REF_DATE')
    except:
        pass
    writefile.write(range_dis, out_file=outFile, metadata=atr)
    return outFile
Example #15
0
 def get_perp_baseline(self, family='unwrapPhase'):
     self.file = self.datasetDict[family]
     metadata = readfile.read_attribute(self.file)
     self.bperp_top = float(metadata['P_BASELINE_TOP_HDR'])
     self.bperp_bottom = float(metadata['P_BASELINE_BOTTOM_HDR'])
     self.bperp = (self.bperp_top + self.bperp_bottom) / 2.0
     return self.bperp
Example #16
0
def get_giant_ifg_list(fnames):
    m_date_list = []
    s_date_list = []
    pbase_list = []

    ext = os.path.splitext(fnames[0])[1]
    if ext == '.h5':
        obj = ifgramStack(fnames[0])
        obj.open()
        m_date_list = obj.mDates[obj.dropIfgram].tolist()
        s_date_list = obj.sDates[obj.dropIfgram].tolist()
        pbase_list = obj.pbaseIfgram[obj.dropIfgram].tolist()

    else:
        ifgramNum = len(fnames)
        print('Number of interferograms: %d' % (ifgramNum))
        for fname in fnames:
            atr = readfile.read_attribute(fname)
            m_date, s_date = ptime.yymmdd(atr['DATE12'].split('-'))
            pbase = (float(atr['P_BASELINE_TOP_HDR']) +
                     float(atr['P_BASELINE_BOTTOM_HDR'])) / 2.
            m_date_list.append(m_date)
            s_date_list.append(s_date)
            pbase_list.append(pbase)
    return m_date_list, s_date_list, pbase_list
Example #17
0
def read_timeseries_yx(timeseries_file, y, x, ref_yx=None):
    '''Read time-series displacement on point (y,x) from timeseries_file
    Inputs:
        timeseries_file : string, name/path of timeseries hdf5 file
        y/x : int, row/column number of point of interest
    Output:
        dis_ts : list of float, displacement time-series of point of interest
    '''
    atr = readfile.read_attribute(timeseries_file)
    k = atr['FILE_TYPE']
    dis_ts = []

    if k in ['GIANT_TS']:
        h5 = h5py.File(timeseries_file, 'r')
        date_list = [dt.fromordinal(int(i)).strftime('%Y%m%d') for i in h5['dates'][:].tolist()]
        dname = [i for i in ['rawts','recons'] if i in list(h5.keys())][0]
        dis_ts = h5[dname][:,y,x]
        if ref_yx is not None:
            dis_ts = h5[dname][:,ref_yx[0],ref_yx[1]]
        h5.close()
    else:
        box = [x, y, x+1, y+1]
        dis_ts = timeseries(timeseries_file).read(box=box, print_msg=False)
        #date_list = list(h5[k].keys())
        #for date in date_list:
        #    dis = h5[k].get(date)[y,x]
        #    if inps.ref_yx:
        #        dis -= h5[k].get(date)[ref_yx[0], ref_yx[1]]
        #    dis_ts.append(dis)
        #dis_ts = np.array(dis_ts)

    return dis_ts
Example #18
0
def add_attribute(File, atr_new=dict(), print_msg=False):
    """Add/update input attribute into File
    Parameters: File - string, path/name of file
                atr_new - dict, attributes to be added/updated
                    if value is None, delete the item from input File attributes
    Returns:    File - string, path/name of updated file
    """
    atr = readfile.read_attribute(File)

    # Compare new attributes with exsiting ones
    update = update_attribute_or_not(atr_new, atr)
    if not update:
        print(('All updated (removed) attributes already exists (do not exists)'
               ' and have the same value, skip update.'))
        return File

    # Update attributes
    f = h5py.File(File, 'r+')
    for key, value in iter(atr_new.items()):
        # delete the item is new value is None
        if value == 'None' or value is None:
            try:
                f.attrs.pop(key)
                if print_msg:
                    print('remove {}'.format(key))
            except:
                pass
        else:
            f.attrs[key] = str(value)
            if print_msg:
                print('{} = {}'.format(key, str(value)))
    f.close()
    return File
Example #19
0
    def run_save2google_earth(self, step_name):
        """Save velocity file in geo coordinates into Google Earth raster image."""
        if self.template['mintpy.save.kmz'] is True:
            print('creating Google Earth KMZ file for geocoded velocity file: ...')
            # input
            vel_file = 'velocity.h5'
            atr = readfile.read_attribute(vel_file)
            if 'Y_FIRST' not in atr.keys():
                vel_file = os.path.join(self.workDir, 'geo/geo_velocity.h5')

            # output
            kmz_file = '{}.kmz'.format(os.path.splitext(vel_file)[0])
            scp_args = '{} -o {}'.format(vel_file, kmz_file)
            print('save_kmz.py', scp_args)

            # update mode
            try:
                fbase = os.path.basename(kmz_file)
                kmz_file = [i for i in [fbase, './geo/{}'.format(fbase), './pic/{}'.format(fbase)] 
                            if os.path.isfile(i)][0]
            except:
                kmz_file = None
            if ut.run_or_skip(out_file=kmz_file, in_file=vel_file, check_readable=False) == 'run':
                mintpy.save_kmz.main(scp_args.split())
        else:
            print('save velocity to Google Earth format is OFF.')
        return
Example #20
0
    def run_geocode(self, step_name):
        """geocode data files in radar coordinates into ./geo folder."""
        if self.template['mintpy.geocode']:
            ts_file = self.get_timeseries_filename(self.template)[step_name]['input']
            atr = readfile.read_attribute(ts_file)
            if 'Y_FIRST' not in atr.keys():
                # 1. geocode
                out_dir = os.path.join(self.workDir, 'geo')
                if not os.path.isdir(out_dir):
                    os.makedirs(out_dir)
                    print('create directory:', out_dir)

                geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2:4]
                in_files = [geom_file, 'temporalCoherence.h5', ts_file, 'velocity.h5']
                scp_args = '-l {l} -t {t} --outdir {o} --update '.format(l=lookup_file,
                                                                         t=self.templateFile,
                                                                         o=out_dir)
                for in_file in in_files:
                    scp_args += ' {}'.format(in_file)
                print('geocode.py', scp_args)
                mintpy.geocode.main(scp_args.split())

                # 2. generate reliable pixel mask in geo coordinate
                geom_file = os.path.join(out_dir, 'geo_{}'.format(os.path.basename(geom_file)))
                tcoh_file = os.path.join(out_dir, 'geo_temporalCoherence.h5')
                mask_file = os.path.join(out_dir, 'geo_maskTempCoh.h5')
                tcoh_min = self.template['mintpy.networkInversion.minTempCoh']
                scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file)
                print('generate_mask.py', scp_args)
                if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file) == 'run':
                    mintpy.generate_mask.main(scp_args.split())
        else:
            print('geocoding is OFF')
        return
Example #21
0
    def get_metadata(self, family=None):
        if not family:
            family = [i for i in self.datasetDict.keys() if i != 'bperp'][0]
        self.file = self.datasetDict[family]
        self.metadata = readfile.read_attribute(self.file)
        self.length = int(self.metadata['LENGTH'])
        self.width = int(self.metadata['WIDTH'])

        # if self.processor is None:
        #    ext = self.file.split('.')[-1]
        #    if 'PROCESSOR' in self.metadata.keys():
        #        self.processor = self.metadata['PROCESSOR']
        #    elif os.path.exists(self.file+'.xml'):
        #        self.processor = 'isce'
        #    elif os.path.exists(self.file+'.rsc'):
        #        self.processor = 'roipac'
        #    elif os.path.exists(self.file+'.par'):
        #        self.processor = 'gamma'
        #    elif ext == 'grd':
        #        self.processor = 'gmtsar'
        #    #what for DORIS/SNAP
        #    else:
        #        self.processor = 'isce'
        #self.metadata['PROCESSOR'] = self.processor
        return self.metadata
Example #22
0
def read_aux_subset2inps(inps):
    # Convert All Inputs into subset_y/x/lat/lon
    # Input Priority: subset_y/x/lat/lon > reference > template > tight
    if all(not i for i in [inps.subset_x,
                           inps.subset_y,
                           inps.subset_lat,
                           inps.subset_lon]):
        # 1. Read subset info from Reference File
        if inps.reference:
            ref_atr = readfile.read_attribute(inps.reference)
            pix_box, geo_box = get_coverage_box(ref_atr)
            print('using subset info from '+inps.reference)

        # 2. Read subset info from template options
        elif inps.template_file:
            pix_box, geo_box = read_subset_template2box(inps.template_file)
            print('using subset info from '+inps.template_file)

        # 3. Use subset from tight info
        elif inps.tight:
            inps.lookup_file = ut.get_lookup_file(inps.lookup_file)
            if not inps.lookup_file:
                raise Exception('No lookup file found! Can not use --tight option without it.')

            atr_lut = readfile.read_attribute(inps.lookup_file)
            coord = ut.coordinate(atr_lut)
            if 'Y_FIRST' in atr_lut.keys():
                rg_lut = readfile.read(inps.lookup_file, datasetName='range')[0]
                rg_unique, rg_pos = np.unique(rg_lut, return_inverse=True)
                idx_row, idx_col = np.where(rg_lut != rg_unique[np.bincount(rg_pos).argmax()])
                pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                           np.max(idx_col) + 10, np.max(idx_row) + 10)
                geo_box = coord.box_pixel2geo(pix_box)
                del rg_lut
            else:
                lat = readfile.read(inps.lookup_file, datasetName='latitude')[0]
                lon = readfile.read(inps.lookup_file, datasetName='longitude')[0]
                geo_box = (np.nanmin(lon), np.nanmax(lat),
                           np.nanmax(lon), np.nanmin(lat))
                pix_box = None
                del lat, lon
        else:
            raise Exception('No subset inputs found!')

        # Update subset_y/x/lat/lon
        inps = subset_box2inps(inps, pix_box, geo_box)
    return inps
Example #23
0
 def get_dataset_data_type(self, dsName):
     ifgramObj = [v for v in self.pairsDict.values()][0]
     dsFile = ifgramObj.datasetDict[dsName]
     metadata = readfile.read_attribute(dsFile)
     dsDataType = dataType
     if 'DATA_TYPE' in metadata.keys():
         dsDataType = dataTypeDict[metadata['DATA_TYPE'].lower()]
     return dsDataType
Example #24
0
def main(argv):
    try:
        file = argv[0]
    except:
        usage()
        sys.exit(1)

    outfile = os.path.splitext(file)[0]+'_wrap'+os.path.splitext(file)[1]
    one_cycle = 2*np.pi
    one_cycle = 0.05

    atr = readfile.read_attribute(file)
    k = atr['FILE_TYPE']

    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5 = h5py.File(file, 'r')
        epochList = sorted(h5[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progressBar(maxValue=epoch_num)

        print('writing >>> '+outfile)
        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            date12_list = ptime.list_ifgram2date12(epochList)
            print('number of interferograms: '+str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k][epoch].get(epoch)[:]

                data_wrap = rewrap(data)

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data_wrap)
                for key, value in h5[k][epoch].attrs.items():
                    gg.attrs[key] = value
                prog_bar.update(i+1, suffix=date12_list[i])

        elif k == 'timeseries':
            print('number of acquisitions: '+str(len(epochList)))
            for i in range(epoch_num):
                epoch = epochList[i]
                data = h5[k].get(epoch)[:]

                data_wrap = rewrap(data, one_cycle)

                dset = group.create_dataset(epoch, data=data_wrap)
                prog_bar.update(i+1, suffix=epoch)
            for key, value in h5[k].attrs.items():
                group.attrs[key] = value

        h5.close()
        h5out.close()
        prog_bar.close()

    print('Done.')
    return outfile
Example #25
0
def run_or_skip(out_file, in_file=None, check_readable=True, print_msg=True):
    """Check whether to update out_file or not.
    return run if any of the following meets:
        1. out_file is empty, e.g. None, []
        2. out_file is not existed
        3. out_file is not readable by readfile.read_attribute() when check_readable=True
        4. out_file is older than in_file, if in_file is not None
    Otherwise, return skip.

    If in_file=None and out_file exists and readable, return skip

    Parameters: out_file : string or list of string, output file(s)
                in_file  : string or list of string, input file(s)
                check_readable : bool, check if the 1st output file has attribute 'WIDTH'
                print_msg      : bool, print message
    Returns:    run/skip : str, whether to update output file or not
    Example:    if ut.run_or_skip(out_file='timeseries_ECMWF_demErr.h5', in_file='timeseries_ECMWF.h5'):
                if ut.run_or_skip(out_file='exclude_date.txt',
                                  in_file=['timeseries_ECMWF_demErrInvResid.h5',
                                           'maskTempCoh.h5',
                                           'smallbaselineApp.cfg'],  
                                  check_readable=False):
    """
    # 1 - check existance of output files
    if not out_file:
        return 'run'
    else:
        if isinstance(out_file, str):
            out_file = [out_file]
        if not all(os.path.isfile(i) for i in out_file):
            return 'run'

    # 2 - check readability of output files
    if check_readable:
        try:
            atr = readfile.read_attribute(out_file[0])
            width = atr['WIDTH']
        except:
            if print_msg:
                print('{} exists, but can not read, remove it.'.format(out_file[0]))
            rmCmd = 'rm {}'.format(out_file[0])
            print(rmCmd)
            os.system(rmCmd)
            return 'run'

    # 3 - check modification time of output and input files
    if in_file:
        in_file = get_file_list(in_file)
        # Check modification time
        if in_file:
            t_in  = max([os.path.getmtime(i) for i in in_file])
            t_out = min([os.path.getmtime(i) for i in out_file])
            if t_in > t_out:
                return 'run'
            elif print_msg:
                print('{} exists and is newer than {} --> skip.'.format(out_file, in_file))
    return 'skip'
Example #26
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    # check input file type
    atr = readfile.read_attribute(inps.timeseries_file[0])
    if 'timeseries' not in atr['FILE_TYPE'].lower():
        raise ValueError('input file type: {} is not timeseries.'.format(atr['FILE_TYPE']))
    return inps
Example #27
0
def correct_local_oscilator_drift(fname, rg_dist_file=None, out_file=None):
    print('-'*50)
    print('correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)')
    print('-'*50)
    atr = readfile.read_attribute(fname)

    # Check Sensor Type
    platform = atr['PLATFORM']
    print('platform: '+platform)
    if not platform.lower() in ['env', 'envisat']:
        print('No need to correct LOD for '+platform)
        return

    # output file name
    if not out_file:
        out_file = '{}_LODcor{}'.format(os.path.splitext(fname)[0], os.path.splitext(fname)[1])

    # Get LOD ramp rate from empirical model
    if not rg_dist_file:
        print('calculate range distance from file metadata')
        rg_dist = get_relative_range_distance(atr)
    else:
        print('read range distance from file: %s' % (rg_dist_file))
        rg_dist = readfile.read(rg_dist_file, datasetName='slantRangeDistance', print_msg=False)[0]
        rg_dist -= rg_dist[int(atr['REF_Y']), int(atr['REF_X'])]
    ramp_rate = np.array(rg_dist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input fname
    range2phase = -4*np.pi / float(atr['WAVELENGTH'])
    k = atr['FILE_TYPE']
    if k == 'timeseries':
        # read
        obj = timeseries(fname)
        obj.open()
        data = obj.read()

        # correct LOD
        diff_year = np.array(obj.yearList)
        diff_year -= diff_year[obj.refIndex]
        for i in range(data.shape[0]):
            data[i, :, :] -= ramp_rate * diff_year[i]

        # write
        obj_out = timeseries(out_file)
        obj_out.write2hdf5(data, refFile=fname)

    elif k in ['.unw']:
        data, atr = readfile.read(fname)

        dates = ptime.yyyymmdd2years(ptime.yyyymmdd(atr['DATE12'].split('-')))
        dt = dates[1] - dates[0]
        data -= ramp_rate * range2phase * dt

        writefile.write(data, out_file=out_file, metadata=atr)
    else:
        print('No need to correct for LOD for %s file' % (k))
    return out_file
Example #28
0
def plot_data():

    atr = readfile.read_attribute(file.get())
    file_type = atr['FILE_TYPE']

    datasets = readfile.get_dataset_list(file.get(), file_type)

    item = tree.focus()
    the_item = tree.item(item)
    epoch_num = the_item['text']
    if epoch_num in datasets:
        view.main([file.get(), epoch_num])
Example #29
0
def read_reference_file2inps(reference_file, inps=None):
    """Read reference info from reference file and update input namespace"""
    if not inps:
        inps = cmd_line_parse([''])
    atrRef = readfile.read_attribute(inps.reference_file)
    if (not inps.ref_y or not inps.ref_x) and 'REF_X' in atrRef.keys():
        inps.ref_y = int(atrRef['REF_Y'])
        inps.ref_x = int(atrRef['REF_X'])
    if (not inps.ref_lat or not inps.ref_lon) and 'REF_LON' in atrRef.keys():
        inps.ref_lat = float(atrRef['REF_LAT'])
        inps.ref_lon = float(atrRef['REF_LON'])
    return inps
Example #30
0
    def open(self):
        """Prepare aux data before interpolation operation"""
        self.lut_metadata = readfile.read_attribute(self.file)
        self.src_metadata = readfile.read_attribute(self.dataFile)

        if not self.processor:
            if 'Y_FIRST' in self.lut_metadata.keys():
                self.processor = 'scipy'
            else:
                self.processor = 'pyresample'

        # get src_def and dest_def, update SNWE and laloStep
        if self.processor == 'pyresample':
            if 'Y_FIRST' not in self.lut_metadata.keys():
                self.prepare_geometry_definition_radar()
            else:
                self.prepare_geometry_definition_geo()
            self.length, self.width = self.dest_def.lats.shape

        elif self.processor == 'scipy' and 'Y_FIRST' in self.lut_metadata.keys():
            self.prepare_regular_grid_interpolator()
Example #31
0
def run_or_skip(inps):
    print('update mode: ON')
    flag = 'skip'

    # check output file
    if not os.path.isfile(inps.outfile):
        flag = 'run'
        print('1) output file {} NOT found.'.format(inps.outfile))
    else:
        print('1) output file {} already exists.'.format(inps.outfile))
        ti = os.path.getmtime(inps.timeseries_file)
        to = os.path.getmtime(inps.outfile)
        if ti > to:
            flag = 'run'
            print('2) output file is NOT newer than input file: {}.'.format(
                inps.timeseries_file))
        else:
            print('2) output file is newer than input file: {}.'.format(
                inps.timeseries_file))

    # check configuration
    if flag == 'skip':
        atr = readfile.read_attribute(inps.outfile)
        if any(
                str(vars(inps)[key]) != atr.get(key_prefix + key, 'None')
                for key in configKeys):
            flag = 'run'
            print('3) NOT all key configration parameters are the same: {}.'.
                  format(configKeys))
        else:
            print(
                '3) all key configuration parameters are the same: {}.'.format(
                    configKeys))

    # result
    print('run or skip: {}.'.format(flag))
    return flag
Example #32
0
def update_box4files_with_inconsistent_size(fnames):
    """Check the size (row / column number) of a list of files
    For SNAP geocoded products has one line missing in some interferograms, Andre, 2019-07-16
    Parameters: fnames  : list of path for interferogram files
    Returns:    pix_box : None if all files are in same size
                          (0, 0, min_width, min_length) if not.
    """
    atr_list = [readfile.read_attribute(fname) for fname in fnames]
    length_list = [int(atr['LENGTH']) for atr in atr_list]
    width_list = [int(atr['WIDTH']) for atr in atr_list]
    if any(len(set(i)) for i in [length_list, width_list]):
        min_length = min(length_list)
        min_width = min(width_list)
        pix_box = (0, 0, min_width, min_length)

        # print out warning message
        msg = '\n' + '*' * 80
        msg += '\nWARNING: NOT all input unwrapped interferograms have the same row/column number!'
        msg += '\nMinimum size is: ({}, {})'.format(min_length, min_width)
        msg += '\n' + '-' * 30
        msg += '\nThe following dates have different size:'

        for i in range(len(fnames)):
            if length_list[i] != min_length or width_list[i] != min_width:
                msg += '\n\t{}\t({}, {})'.format(atr_list[i]['DATE12'],
                                                 length_list[i], width_list[i])

        msg += '\n' + '-' * 30
        msg += '\nAssuming the interferograms above have:'
        msg += '\n\textra line(s) at the bottom OR'
        msg += '\n\textra column(s) at the right'
        msg += '\nContinue to load data using subset of the minimum size.'
        msg += '\n' + '*' * 80 + '\n'
        print(msg)
    else:
        pix_box = None
    return pix_box
Example #33
0
def ll2xy(inps):
    """transfer lat/lon to local coordination"""
    inps.metadata = readfile.read_attribute(inps.file[0])

    # read geometry
    inps.lat, inps.lon = ut.get_lat_lon(inps.metadata)
    #inps.inc_angle = readfile.read(inps.geometry[0], datasetName='incidenceAngle')[0]
    #inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(inps.metadata['HEADING'])
    #inps.height = readfile.read(inps.geometry[0], datasetName='height')[0]

    # read mask file
    inps.mask = readfile.read(inps.file[0])[0]
    # mask data
    #inps.lat[inps.mask==0] = np.nan
    #inps.lon[inps.mask==0] = np.nan
    #inps.inc_angle[inps.mask==0] = np.nan
    #inps.head_angle[inps.mask==0] = np.nan
    #inps.height[inps.mask==0] = np.nan

    # conver latlon to xy
    # origin point
    origin_lat = (inps.lat[0, 0] + inps.lat[-1, 0]) / 2
    origin_lon = (inps.lon[0, 0] + inps.lon[0, -1]) / 2

    lat = np.transpose(inps.lat.reshape(-1, 1))
    lon = np.transpose(inps.lon.reshape(-1, 1))

    llh = np.vstack((lon, lat))
    origin = np.array([origin_lon, origin_lat], dtype=float)
    XY = np.transpose(
        mut.llh2xy(llh, origin)
    ) * 1000  # unit of X/Y is meter and is a [N,2] matrix with [N,0] is X; [N,1] is Y
    X = XY[:, 0]
    Y = XY[:, 1]

    return X, Y, origin
def cmd_line_parse(iargs=None):
    """Command line parser."""
    parser = create_parser()
    inps = parser.parse_args(args=iargs)
    inps.key = readfile.read_attribute(inps.timeseries_file)['FILE_TYPE']
    if inps.key not in ['timeseries', 'giantTimeseries', 'HDFEOS']:
        raise Exception('input file is {}, NOT timeseries!'.format(inps.key))

    # check bootstrap count number
    if inps.bootstrap and inps.bootstrapCount <= 1:
        inps.bootstrap = False
        print(
            'bootstrap-count should be larger than 1, otherwise it does not make sense'
        )
        print('turn OFF bootstrapping and continue without it.')

    if inps.bootstrap:
        print('bootstrapping is turned ON.')
        if (inps.polynomial != 1 or inps.periodic or inps.step):
            raise ValueError(
                'bootstrapping currently support polynomial ONLY and ONLY with the order of 1!'
            )

    return inps
Example #35
0
def run_or_skip(inps):
    print('-'*50)
    print('update mode: ON')
    flag = 'skip'

    # check output file
    if not os.path.isfile(inps.outfile):
        flag = 'run'
        print('1) output file {} NOT found.'.format(inps.outfile))
    else:
        print('1) output file {} already exists.'.format(inps.outfile))
        infiles = [inps.timeseries_file]
        if inps.geom_file:
            infiles.append(inps.geom_file)
        ti = max(os.path.getmtime(i) for i in infiles)
        to = os.path.getmtime(inps.outfile)
        if ti > to:
            flag = 'run'
            print('2) output file is NOT newer than input file: {}.'.format(infiles))
        else:
            print('2) output file is newer than input file: {}.'.format(infiles))

    # check configuration
    if flag == 'skip':
        date_list_all = timeseries(inps.timeseries_file).get_date_list()
        inps.excludeDate = read_exclude_date(inps.excludeDate, date_list_all, print_msg=False)[1]
        atr = readfile.read_attribute(inps.outfile)
        if any(str(vars(inps)[key]) != atr.get(key_prefix+key, 'None') for key in configKeys):
            flag = 'run'
            print('3) NOT all key configration parameters are the same:{}'.format(configKeys))
        else:
            print('3) all key configuration parameters are the same:{}'.format(configKeys))

    # result
    print('run or skip: {}.'.format(flag))
    return flag
Example #36
0
    def run_geocode(self, step_name):
        """geocode data files in radar coordinates into ./geo folder."""
        if self.template['mintpy.geocode']:
            ts_file = self.get_timeseries_filename(self.template)[step_name]['input']
            atr = readfile.read_attribute(ts_file)
            if 'Y_FIRST' not in atr.keys():
                # 1. geocode
                out_dir = os.path.join(self.workDir, 'geo')
                if not os.path.isdir(out_dir):
                    os.makedirs(out_dir)
                    print('create directory:', out_dir)

                geom_file, lookup_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2:4]
                in_files = [geom_file, 'temporalCoherence.h5', ts_file, 'velocity.h5']
                scp_args = '-l {l} -t {t} --outdir {o} --update '.format(l=lookup_file,
                                                                         t=self.templateFile,
                                                                         o=out_dir)
                for in_file in in_files:
                    scp_args += ' {}'.format(in_file)
                print('geocode.py', scp_args)
                mintpy.geocode.main(scp_args.split())

                # 2. generate reliable pixel mask in geo coordinate
                geom_file = os.path.join(out_dir, 'geo_{}'.format(os.path.basename(geom_file)))
                tcoh_file = os.path.join(out_dir, 'geo_temporalCoherence.h5')
                mask_file = os.path.join(out_dir, 'geo_maskTempCoh.h5')
                tcoh_min = self.template['mintpy.networkInversion.minTempCoh']
                scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min, mask_file, geom_file)
                print('generate_mask.py', scp_args)
                if ut.run_or_skip(out_file=mask_file, in_file=tcoh_file) == 'run':
                    mintpy.generate_mask.main(scp_args.split())
            else:
                print('dataset is geocoded, skip geocoding and continue.')
        else:
            print('geocoding is OFF')
        return
Example #37
0
def add_file(fnames, out_file=None):
    """Generate sum of all input files
    Parameters: fnames : list of str, path/name of input files to be added
                out_file : str, optional, path/name of output file
    Returns:    out_file : str, path/name of output file
    Example:    'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    """
    # Default output file name
    ext = os.path.splitext(fnames[0])[1]
    if not out_file:
        out_file = os.path.splitext(fnames[0])[0]
        for i in range(1, len(fnames)):
            out_file += '_plus_' + os.path.splitext(os.path.basename(fnames[i]))[0]
        out_file += ext

    dsDict = {}
    dsNames = readfile.get_dataset_list(fnames[0])
    for dsName in dsNames:
        # ignore dsName if input file has single dataset
        if len(dsNames) == 1:
            dsName2read = None
        else:
            dsName2read = dsName

        print('adding {} ...'.format(dsName))
        data = readfile.read(fnames[0], datasetName=dsName2read)[0]
        for i in range(1, len(fnames)):
            d = readfile.read(fnames[i], datasetName=dsName2read)[0]
            data = add_matrix(data, d)
        dsDict[dsName] = data

    # output
    atr = readfile.read_attribute(fnames[0])
    print('use metadata from the 1st file: {}'.format(fnames[0]))
    writefile.write(dsDict, out_file=out_file, metadata=atr, ref_file=fnames[0])
    return out_file
Example #38
0
def main(argv):
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    y = int(argv[0])
    x = int(argv[1])
    print('input radar coord: y/azimuth=%d, x/range=%d' % (y, x))

    try:
        trans_file = argv[2]
    except:
        trans_file = ut.get_lookup_file()

    try:
        radar_file = argv[3]
    except:
        radar_file = 'inputs/ifgramStack.h5'
    atr_rdr = readfile.read_attribute(radar_file)

    coord = ut.coordinate(atr_rdr, lookup_file=trans_file)
    lat, lon = coord.radar2geo(np.array(y), np.array(x))[0:2]
    print('corresponding geo coord: lat=%.4f, lon=%.4f' % (lat, lon))
    return
Example #39
0
def update_file_attribute(fname, atr_new):
    # Read Original Attributes
    atr = readfile.read_attribute(fname)
    print('update {} file attribute: {}'.format(atr['FILE_TYPE'], fname))

    ext = os.path.splitext(fname)[1]
    if ext in ['.h5', '.he5']:
        fname = ut.add_attribute(fname, atr_new)
    else:
        if not ut.update_attribute_or_not(atr_new, atr):
            print(
                'All updated (removed) attributes already exists (do not exists) and have the same value, skip update.'
            )
        else:
            for key, value in iter(atr_new.items()):
                if value == 'None' and key in atr.keys():
                    atr.pop(key)
                else:
                    atr[key] = value

            rsc_file = '{}.rsc'.format(fname)
            print('writing >>> {}'.format(rsc_file))
            writefile.write_roipac_rsc(atr, out_file=rsc_file)
    return fname
Example #40
0
def run_2to3_timeseries(py2_file, py3_file):
    """Convert timeseries file from py2-MintPy format to py3-MintPy format"""
    # read data from py2_file
    atr = readfile.read_attribute(py2_file)
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    with h5py.File(py2_file, 'r') as f:
        date_list = list(f['timeseries'].keys())
        num_date = len(date_list)
        ts_data = np.zeros((num_date, length, width), np.float32)
        print('reading time-series ...')
        prog_bar = ptime.progressBar(maxValue=num_date)
        for i in range(num_date):
            ts_data[i, :, :] = f['timeseries/{}'.format(date_list[i])][:]
            prog_bar.update(i + 1, suffix=date_list[i])
        prog_bar.close()

    # prepare metadata
    dates = np.array(date_list, np.string_)
    atr['REF_DATE'] = date_list[0]
    if 'P_BASELINE_TIMESERIES' in atr.keys():
        bperp = np.array(
            [float(i) for i in atr['P_BASELINE_TIMESERIES'].split()],
            dtype=np.float32)
    else:
        bperp = None
    for key in [
            'P_BASELINE_TIMESERIES', 'P_BASELINE_TOP_TIMESERIES',
            'P_BASELINE_BOTTOM_TIMESERIES'
    ]:
        if key in atr.keys():
            atr.pop(key)

    # write to py3_file
    ts_obj = timeseries(py3_file)
    ts_obj.write2hdf5(data=ts_data, dates=dates, bperp=bperp, metadata=atr)
    return py3_file
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    # check 1 input file type
    k = readfile.read_attribute(inps.ifgram_file)['FILE_TYPE']
    if k not in ['ifgramStack']:
        raise ValueError('input file is not ifgramStack: {}'.format(k))

    # check 2 cc_mask_file
    if not os.path.isfile(inps.cc_mask_file):
        raise FileNotFoundError(inps.cc_mask_file)

    if not inps.datasetNameOut:
        inps.datasetNameOut = '{}_phaseClosure'.format(inps.datasetNameIn)

    # discard water mask file is not found
    if inps.waterMaskFile and not os.path.isfile(inps.waterMaskFile):
        inps.waterMaskFile = None

    return inps
Example #42
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # check reference date
    print('read metadata from file: {}'.format(inps.file))
    attr = readfile.read_attribute(inps.file)
    if attr['FILE_TYPE'] == 'timeseries' and inps.dset and '_' in inps.dset:
        inps.ref_date, inps.dset = inps.dset.split('_')
    else:
        inps.ref_date = None

    # read data
    print('read data     from file: {}'.format(inps.file))
    array, attr = readfile.read(inps.file, datasetName=inps.dset)
    if attr['FILE_TYPE'] == 'timeseries' and inps.ref_date:
        array -= readfile.read(inps.file, datasetName=inps.ref_date)[0]

    # output filename
    if not inps.outfile:
        fbase = pp.auto_figure_title(inps.file,
                                     datasetNames=inps.dset,
                                     inps_dict=vars(inps))
        inps.outfile = fbase + GDAL_DRIVER2EXT.get(inps.out_format, '')
    else:
        inps.outfile = os.path.abspath(inps.outfile)

    # coordinate info
    rasterOrigin = (float(attr['X_FIRST']), float(attr['Y_FIRST']))
    xStep = float(attr['X_STEP'])
    yStep = float(attr['Y_STEP'])

    # convert array to raster
    array2raster(array, inps.outfile, inps.out_format, rasterOrigin, xStep,
                 yStep)

    return
Example #43
0
def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'):
    # read time-series
    atr = readfile.read_attribute(ts_file)
    range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
    print('reading timeseries data from file {} ...'.format(ts_file))
    ts_data = readfile.read(ts_file)[0] * range2phase
    num_date, length, width = ts_data.shape
    ts_data = ts_data.reshape(num_date, -1)

    # reconstruct unwrapPhase
    print('reconstructing the interferograms from timeseries')
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    date12_list = stack_obj.get_date12_list(dropIfgram=False)
    A = stack_obj.get_design_matrix4timeseries(date12_list, refDate='no')[0]
    ifgram_est = np.dot(A, ts_data).reshape(A.shape[0], length, width)
    ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype)
    del ts_data

    # write to ifgram file
    dsDict = {}
    dsDict['unwrapPhase'] = ifgram_est
    writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file)
    return ifgram_file
Example #44
0
def calc_igs_iono_ramp(tec_dir, date_str, geom_file, box=None, print_msg=True):
    """Get 2D ionospheric delay from IGS TEC data for one acquisition.
    due to the variation of the incidence angle along LOS.

    Parameters: tec_dir        - str, path of the local TEC directory, i.e. ~/data/aux/IGS_TEC
                date_str       - str, date of interest in YYYYMMDD format
                geom_file      - str, path of the geometry file including incidenceAngle data
                box            - tuple of 4 int, subset in (x0, y0, x1, y1)
    Returns:    range_delay    - 2D np.ndarray for the range delay in meters
                vtec           - float, TEC value in zenith direction in TECU
                iono_lat/lon   - float, latitude/longitude in degrees of LOS vector in ionosphere shell
                iono_height    - float, height             in meter   of LOS vector in ionosphere shell
                iono_inc_angle - float, incidence angle    in degrees of LOS vector in ionosphere shell
    """
    # geometry
    tec_file = dload_igs_tec(date_str, tec_dir, print_msg=print_msg)
    iono_height = grab_ionex_height(tec_file)
    (iono_inc_angle, iono_lat,
     iono_lon) = prep_geometry_iono(geom_file,
                                    box=box,
                                    iono_height=iono_height)[:3]

    # time
    meta = readfile.read_attribute(geom_file)
    utc_sec = float(meta['CENTER_LINE_UTC'])
    if print_msg:
        h, s = divmod(utc_sec, 3600)
        m, s = divmod(s, 60)
        print('UTC time: {:02.0f}:{:02.0f}:{:02.1f}'.format(h, m, s))

    # extract zenith TEC
    freq = SPEED_OF_LIGHT / float(meta['WAVELENGTH'])
    vtec = get_igs_tec_value(tec_file, utc_sec, iono_lon, iono_lat)
    rang_delay = vtec2range_delay(vtec, iono_inc_angle, freq)

    return rang_delay, vtec, iono_lat, iono_lon, iono_height, iono_inc_angle
Example #45
0
def check_loaded_dataset(work_dir='./', print_msg=True, relpath=False):
    """Check the result of loading data for the following two rules:
        1. file existance
        2. file attribute readability

    Parameters: work_dir  : string, MintPy working directory
                print_msg : bool, print out message
    Returns:    True, if all required files and dataset exist; otherwise, ERROR
                    If True, PROCESS, SLC folder could be removed.
                stack_file  :
                geom_file   :
                lookup_file :
    Example:    work_dir = os.path.expandvars('./FernandinaSenDT128/mintpy')
                ut.check_loaded_dataset(work_dir)
    """
    load_complete = True

    if not work_dir:
        work_dir = os.getcwd()
    work_dir = os.path.abspath(work_dir)

    # 1. interferograms stack file: unwrapPhase, coherence
    ds_list = ['unwrapPhase', 'rangeOffset', 'azimuthOffset']
    flist = [os.path.join(work_dir, 'inputs/ifgramStack.h5')]
    stack_file = is_file_exist(flist, abspath=True)
    if stack_file is not None:
        obj = ifgramStack(stack_file)
        obj.open(print_msg=False)
        if all(x not in obj.datasetNames for x in ds_list):
            msg = 'required dataset is missing in file {}:'.format(stack_file)
            msg += '\n' + ' OR '.join(ds_list)
            raise ValueError(msg)
        # check coherence for phase stack
        if 'unwrapPhase' in obj.datasetNames and 'coherence' not in obj.datasetNames:
            print('WARNING: "coherence" is missing in file {}'.format(
                stack_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './inputs/ifgramStack.h5')

    atr = readfile.read_attribute(stack_file)

    # 2. geom_file: height
    if 'X_FIRST' in atr.keys():
        flist = [os.path.join(work_dir, 'inputs/geometryGeo.h5')]
    else:
        flist = [os.path.join(work_dir, 'inputs/geometryRadar.h5')]
    geom_file = is_file_exist(flist, abspath=True)
    if geom_file is not None:
        obj = geometry(geom_file)
        obj.open(print_msg=False)
        dname = geometryDatasetNames[0]
        if dname not in obj.datasetNames:
            raise ValueError(
                'required dataset "{}" is missing in file {}'.format(
                    dname, geom_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './inputs/geometry*.h5')

    # 3. lookup_file: latitude,longitude or rangeCoord,azimuthCoord
    # could be different than geometry file in case of roipac and gamma
    flist = [os.path.join(work_dir, 'inputs/geometry*.h5')]
    lookup_file = get_lookup_file(flist, abspath=True, print_msg=print_msg)
    if 'X_FIRST' not in atr.keys():
        if lookup_file is not None:
            obj = geometry(lookup_file)
            obj.open(print_msg=False)

            if atr['PROCESSOR'] in ['isce', 'doris']:
                dnames = geometryDatasetNames[1:3]
            elif atr['PROCESSOR'] in ['gamma', 'roipac']:
                dnames = geometryDatasetNames[3:5]
            else:
                raise AttributeError('InSAR processor: {}'.format(
                    atr['PROCESSOR']))

            for dname in dnames:
                if dname not in obj.datasetNames:
                    load_complete = False
                    raise Exception(
                        'required dataset "{}" is missing in file {}'.format(
                            dname, lookup_file))
        else:
            raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                    './inputs/geometry*.h5')
    else:
        print("Input data seems to be geocoded. Lookup file not needed.")

    if relpath:
        stack_file = os.path.relpath(stack_file) if stack_file else stack_file
        geom_file = os.path.relpath(geom_file) if geom_file else geom_file
        lookup_file = os.path.relpath(
            lookup_file) if lookup_file else lookup_file

    # print message
    if print_msg:
        print(('Loaded dataset are processed by '
               'InSAR software: {}'.format(atr['PROCESSOR'])))
        if 'X_FIRST' in atr.keys():
            print('Loaded dataset is in GEO coordinates')
        else:
            print('Loaded dataset is in RADAR coordinates')
        print('Interferograms Stack: {}'.format(stack_file))
        print('Geometry File       : {}'.format(geom_file))
        print('Lookup Table File   : {}'.format(lookup_file))
        if load_complete:
            print('-' * 50)
            print(
                'All data needed found/loaded/copied. Processed 2-pass InSAR data can be removed.'
            )
        print('-' * 50)

    return load_complete, stack_file, geom_file, lookup_file
Example #46
0
 def get_dataset_size(fname):
     atr = readfile.read_attribute(fname)
     shape = (int(atr['LENGTH']), int(atr['WIDTH']))
     return shape
Example #47
0
def read_inps_dict2ifgram_stack_dict_object(inpsDict):
    """Read input arguments into dict of ifgramStackDict object"""
    # inpsDict --> dsPathDict
    print('-'*50)
    print('searching interferometric pairs info')
    print('input data files:')
    maxDigit = max([len(i) for i in list(datasetName2templateKey.keys())])
    dsPathDict = {}
    dsNumDict = {}
    for dsName in [i for i in ifgramDatasetNames
                   if i in datasetName2templateKey.keys()]:
        key = datasetName2templateKey[dsName]
        if key in inpsDict.keys():
            files = sorted(glob.glob(str(inpsDict[key])))
            if len(files) > 0:
                dsPathDict[dsName] = files
                dsNumDict[dsName] = len(files)
                print('{:<{width}}: {path}'.format(dsName,
                                                   width=maxDigit,
                                                   path=inpsDict[key]))

    # Check 1: required dataset
    dsName0 = 'unwrapPhase'
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))
        return None

    # Check 2: number of files for all dataset types
    for key, value in dsNumDict.items():
        print('number of {:<{width}}: {num}'.format(key, width=maxDigit, num=value))

    dsNumList = list(dsNumDict.values())
    if any(i != dsNumList[0] for i in dsNumList):
        msg = 'WARNING: NOT all types of dataset have the same number of files.'
        msg += ' -> skip interferograms with missing files and continue.'
        print(msg)
        #raise Exception(msg)

    # Check 3: data dimension for all files

    # dsPathDict --> pairsDict --> stackObj
    dsNameList = list(dsPathDict.keys())
    pairsDict = {}
    for dsPath in dsPathDict[dsName0]:
        dates = ptime.yyyymmdd(readfile.read_attribute(dsPath)['DATE12'].split('-'))

        #####################################
        # A dictionary of data files for a given pair.
        # One pair may have several types of dataset.
        # example ifgramPathDict = {'unwrapPhase': /pathToFile/filt.unw, 'iono':/PathToFile/iono.bil}
        # All path of data file must contain the master and slave date, either in file name or folder name.

        ifgramPathDict = {}
        for i in range(len(dsNameList)):
            dsName = dsNameList[i]
            dsPath1 = dsPathDict[dsName][0]
            if all(d[2:8] in dsPath1 for d in dates):
                ifgramPathDict[dsName] = dsPath1
            else:
                dsPath2 = [i for i in dsPathDict[dsName]
                           if all(d[2:8] in i for d in dates)]
                if len(dsPath2) > 0:
                    ifgramPathDict[dsName] = dsPath2[0]
                else:
                    print('WARNING: {} file missing for pair {}'.format(dsName, dates))
        ifgramObj = ifgramDict(dates=tuple(dates),
                               datasetDict=ifgramPathDict)
        pairsDict[tuple(dates)] = ifgramObj

    if len(pairsDict) > 0:
        stackObj = ifgramStackDict(pairsDict=pairsDict)
    else:
        stackObj = None
    return stackObj
Example #48
0
def read_subset_box(inpsDict):
    # Read subset info from template
    inpsDict['box'] = None
    inpsDict['box4geo_lut'] = None
    pix_box, geo_box = subset.read_subset_template2box(inpsDict['template_file'][0])

    # Grab required info to read input geo_box into pix_box
    try:
        lookupFile = [glob.glob(str(inpsDict['mintpy.load.lookupYFile']))[0],
                      glob.glob(str(inpsDict['mintpy.load.lookupXFile']))[0]]
    except:
        lookupFile = None

    try:
        pathKey = [i for i in datasetName2templateKey.values()
                   if i in inpsDict.keys()][0]
        file = glob.glob(str(inpsDict[pathKey]))[0]
        atr = readfile.read_attribute(file)
    except:
        atr = dict()

    geocoded = None
    if 'Y_FIRST' in atr.keys():
        geocoded = True
    else:
        geocoded = False

    # Check conflict
    if geo_box and not geocoded and lookupFile is None:
        geo_box = None
        print(('WARNING: mintpy.subset.lalo is not supported'
               ' if 1) no lookup file AND'
               '    2) radar/unkonwn coded dataset'))
        print('\tignore it and continue.')

    if not geo_box and not pix_box:
        # adjust for the size inconsistency problem in SNAP geocoded products
        # ONLY IF there is no input subset
        # Use the min bbox if files size are different
        if inpsDict['processor'] == 'snap':
            fnames = ut.get_file_list(inpsDict['mintpy.load.unwFile'])
            pix_box = check_files_size(fnames)

        if not pix_box:
            return inpsDict

    # geo_box --> pix_box
    coord = ut.coordinate(atr, lookup_file=lookupFile)
    if geo_box is not None:
        pix_box = coord.bbox_geo2radar(geo_box)
        pix_box = coord.check_box_within_data_coverage(pix_box)
        print('input bounding box of interest in lalo: {}'.format(geo_box))
    print('box to read for datasets in y/x: {}'.format(pix_box))

    # Get box for geocoded lookup table (for gamma/roipac)
    box4geo_lut = None
    if lookupFile is not None:
        atrLut = readfile.read_attribute(lookupFile[0])
        if not geocoded and 'Y_FIRST' in atrLut.keys():
            geo_box = coord.bbox_radar2geo(pix_box)
            box4geo_lut = ut.coordinate(atrLut).bbox_geo2radar(geo_box)
            print('box to read for geocoded lookup file in y/x: {}'.format(box4geo_lut))

    inpsDict['box'] = pix_box
    inpsDict['box4geo_lut'] = box4geo_lut
    return inpsDict
Example #49
0
def read_data(inps):
    # metadata
    atr = readfile.read_attribute(inps.file)

    if 'WAVELENGTH' in atr.keys():
        range2phase = -4 * np.pi / float(atr['WAVELENGTH'])

    # change reference pixel
    if inps.ref_yx:
        atr['REF_Y'] = inps.ref_yx[0]
        atr['REF_X'] = inps.ref_yx[1]
        print('change reference point to y/x: {}'.format(inps.ref_yx))

    # various file types
    print('read {} from file {}'.format(inps.dset, inps.file))
    k = atr['FILE_TYPE']
    if k == 'velocity':
        # read/prepare data
        data = readfile.read(inps.file)[0] * range2phase
        print(
            "converting velocity to an interferogram with one year temporal baseline"
        )
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    elif k == 'timeseries':
        # date1 and date2
        if '_' in inps.dset:
            date1, date2 = ptime.yyyymmdd(inps.dset.split('_'))
        else:
            date1 = atr['REF_DATE']
            date2 = ptime.yyyymmdd(inps.dset)

        # read/prepare data
        data = readfile.read(inps.file, datasetName=date2)[0]
        data -= readfile.read(inps.file, datasetName=date1)[0]
        print('converting range to phase')
        data *= range2phase
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}_{}.unw'.format(date1, date2)
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    elif k == 'HDFEOS':
        dname = inps.dset.split('-')[0]

        # date1 and date2
        if dname == 'displacement':
            if '-' in inps.dset:
                suffix = inps.dset.split('-')[1]
                if '_' in suffix:
                    date1, date2 = ptime.yyyymmdd(suffix.split('_'))
                else:
                    date1 = atr['REF_DATE']
                    date2 = ptime.yyyymmdd(suffix)
            else:
                raise ValueError(
                    "No '-' in input dataset! It is required for {}".format(
                        dname))
        else:
            date_list = HDFEOS(inps.file).get_date_list()
            date1 = date_list[0]
            date2 = date_list[-1]
        date12 = '{}_{}'.format(date1, date2)

        # read / prepare data
        slice_list = readfile.get_slice_list(inps.file)
        if 'displacement' in inps.dset:
            # read/prepare data
            slice_name1 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date1))[0][0]
            slice_name2 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date2))[0][0]
            data = readfile.read(inps.file, datasetName=slice_name1)[0]
            data -= readfile.read(inps.file, datasetName=slice_name2)[0]
            print('converting range to phase')
            data *= range2phase
            if inps.ref_yx:
                data -= data[inps.ref_yx[0], inps.ref_yx[1]]
        else:
            slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0]
            data = readfile.read(inps.file, datasetName=slice_name)[0]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname == 'displacement':
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif 'coherence' in dname.lower():
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'height':
            atr['FILE_TYPE'] = '.dem'
            atr['DATA_TYPE'] = 'int16'
        else:
            raise ValueError('unrecognized input dataset type: {}'.format(
                inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])

    elif k == 'ifgramStack':
        dname, date12 = inps.dset.split('-')
        date1, date2 = date12.split('_')

        # read / prepare data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]
        if dname.startswith('unwrapPhase'):
            if 'REF_X' in atr.keys():
                data -= data[int(atr['REF_Y']), int(atr['REF_X'])]
                print('consider reference pixel in y/x: ({}, {})'.format(
                    atr['REF_Y'], atr['REF_X']))
            else:
                print('No REF_Y/X found.')

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname.startswith('unwrapPhase'):
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif dname == 'coherence':
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'wrapPhase':
            atr['FILE_TYPE'] = '.int'
            atr['UNIT'] = 'radian'
        else:
            raise ValueError('unrecognized dataset type: {}'.format(inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    else:
        # read data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]

        if inps.outfile:
            fext = os.path.splitext(inps.outfile)[1]
            atr['FILE_TYPE'] = fext
        else:
            # metadata
            if 'coherence' in k.lower():
                atr['FILE_TYPE'] = '.cor'
            elif k in ['mask']:
                atr['FILE_TYPE'] = '.msk'
            elif k in ['geometry'] and inps.dset == 'height':
                if 'Y_FIRST' in atr.keys():
                    atr['FILE_TYPE'] = '.dem'
                else:
                    atr['FILE_TYPE'] = '.hgt'
                atr['UNIT'] = 'm'
            else:
                atr['FILE_TYPE'] = '.unw'

            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    # get rid of starting . if output as hdf5 file
    if inps.outfile.endswith('.h5'):
        if atr['FILE_TYPE'].startswith('.'):
            atr['FILE_TYPE'] = atr['FILE_TYPE'][1:]

    atr['PROCESSOR'] = 'roipac'
    return data, atr, inps.outfile
Example #50
0
def read_init_info(inps):
    # Time Series Info
    ts_file0 = inps.file[0]
    atr = readfile.read_attribute(ts_file0)
    inps.key = atr['FILE_TYPE']
    if inps.key == 'timeseries':
        obj = timeseries(ts_file0)
    elif inps.key == 'giantTimeseries':
        obj = giantTimeseries(ts_file0)
    elif inps.key == 'HDFEOS':
        obj = HDFEOS(ts_file0)
    else:
        raise ValueError('input file is {}, not timeseries.'.format(inps.key))
    obj.open(print_msg=inps.print_msg)

    if not inps.file_label:
        inps.file_label = [str(i) for i in list(range(len(inps.file)))]

    # default mask file
    if not inps.mask_file and 'masked' not in ts_file0:
        dir_name = os.path.dirname(ts_file0)
        if 'Y_FIRST' in atr.keys():
            inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5')
        else:
            inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5')
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None

    # date info
    inps.date_list = obj.dateList
    inps.num_date = len(inps.date_list)
    if inps.start_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) >= int(inps.start_date)
        ]
    if inps.end_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) <= int(inps.end_date)
        ]
    inps.num_date = len(inps.date_list)
    inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list)
    (inps.ex_date_list, inps.ex_dates,
     inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list)

    # initial display index
    #if atr['REF_DATE'] in inps.date_list:
    #    inps.ref_idx = inps.date_list.index(atr['REF_DATE'])
    #else:
    #    inps.ref_idx = 0

    if inps.ref_date:
        inps.ref_idx = inps.date_list.index(inps.ref_date)
    else:
        inps.ref_idx = 3

    if not inps.idx:
        if inps.ref_idx < inps.num_date / 2.:
            inps.idx = inps.num_date - 3
        else:
            inps.idx = 3

    # Display Unit
    (inps.disp_unit,
     inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr,
                                              disp_unit=inps.disp_unit)[1:3]

    # Map info - coordinate unit
    inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower()

    # Read Error List
    inps.ts_plot_func = plot_ts_scatter
    inps.error_ts = None
    inps.ex_error_ts = None
    if inps.error_file:
        # assign plot function
        inps.ts_plot_func = plot_ts_errorbar
        # read error file
        error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str)
        inps.error_ts = error_fc[:, 1].astype(np.float) * inps.unit_fac
        # update error file with exlcude date
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = e_ts[inps.ex_flag == 0]
            inps.error_ts = e_ts[inps.ex_flag == 1]

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0]))

    # default lookup table file
    if not inps.lookup_file:
        inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5')
    inps.coord = ut.coordinate(atr, inps.lookup_file)

    # size and lalo info
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr)
    inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box)
    inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box)
    data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
    vprint('data   coverage in y/x: ' + str(data_box))
    vprint('subset coverage in y/x: ' + str(inps.pix_box))
    vprint('data   coverage in lat/lon: ' +
           str(inps.coord.box_pixel2geo(data_box)))
    vprint('subset coverage in lat/lon: ' + str(inps.geo_box))
    vprint(
        '------------------------------------------------------------------------'
    )

    # reference pixel
    if not inps.ref_lalo and 'REF_LAT' in atr.keys():
        inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON']))
    if inps.ref_lalo:
        if inps.ref_lalo[1] > 180.:
            inps.ref_lalo[1] -= 360.
        inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0],
                                           inps.ref_lalo[1],
                                           print_msg=False)[0:2]
    if not inps.ref_yx and 'REF_Y' in atr.keys():
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]

    # Initial Pixel Coord
    if inps.lalo:
        inps.yx = inps.coord.geo2radar(inps.lalo[0],
                                       inps.lalo[1],
                                       print_msg=False)[0:2]
    try:
        inps.lalo = inps.coord.radar2geo(inps.yx[0],
                                         inps.yx[1],
                                         print_msg=False)[0:2]
    except:
        inps.lalo = None

    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(
            atr, print_msg=inps.print_msg)

    # Transparency - Alpha
    if not inps.transparency:
        # Auto adjust transparency value when showing shaded relief DEM
        if inps.dem_file and inps.disp_dem_shade:
            inps.transparency = 0.7
        else:
            inps.transparency = 1.0

    # display unit ans wrap
    # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian;
    # otherwise set disp_unit_img = disp_unit
    inps.disp_unit_img = inps.disp_unit
    if inps.wrap:
        inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
        if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100.
        elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000.
        elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1.
        else:
            raise ValueError('un-recognized display unit: {}'.format(
                inps.disp_unit))

        if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi:
            inps.disp_unit_img = 'radian'
        inps.vlim = inps.wrap_range
    inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img)
    return inps, atr
Example #51
0
def run_unwrap_error_bridge(ifgram_file, water_mask_file, ramp_type=None, radius=50, 
                            ccName='connectComponent', dsNameIn='unwrapPhase',
                            dsNameOut='unwrapPhase_bridging'):
    """Run unwrapping error correction with bridging
    Parameters: ifgram_file     : str, path of ifgram stack file
                water_mask_file : str, path of water mask file
                ramp_type       : str, name of phase ramp to be removed during the phase jump estimation
                ccName          : str, dataset name of connected components
                dsNameIn        : str, dataset name of unwrap phase to be corrected
                dsNameOut       : str, dataset name of unwrap phase to be saved after correction
    Returns:    ifgram_file     : str, path of ifgram stack file
    """
    print('-'*50)
    print('correct unwrapping error in {} with bridging ...'.format(ifgram_file))
    if ramp_type is not None:
        print('estimate and remove a {} ramp while calculating phase offset'.format(ramp_type))

    # read water mask
    if water_mask_file and os.path.isfile(water_mask_file):
        print('read water mask from file:', water_mask_file)
        water_mask = readfile.read(water_mask_file)[0]
    else:
        water_mask = None

    # file info
    atr = readfile.read_attribute(ifgram_file)
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    k = atr['FILE_TYPE']

    # correct unwrap error ifgram by ifgram
    if k == 'ifgramStack':
        date12_list = ifgramStack(ifgram_file).get_date12_list(dropIfgram=False)
        num_ifgram = len(date12_list)
        shape_out = (num_ifgram, length, width)

        # prepare output data writing
        print('open {} with r+ mode'.format(ifgram_file))
        f = h5py.File(ifgram_file, 'r+')
        print('input  dataset:', dsNameIn)
        print('output dataset:', dsNameOut)
        if dsNameOut in f.keys():
            ds = f[dsNameOut]
            print('access /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out))
        else:
            ds = f.create_dataset(dsNameOut,
                                  shape_out,
                                  maxshape=(None, None, None),
                                  chunks=True,
                                  compression=None)
            print('create /{d} of np.float32 in size of {s}'.format(d=dsNameOut, s=shape_out))

        # correct unwrap error ifgram by ifgram
        prog_bar = ptime.progressBar(maxValue=num_ifgram)
        for i in range(num_ifgram):
            # read unwrapPhase and connectComponent
            date12 = date12_list[i]
            unw = np.squeeze(f[dsNameIn][i, :, :])
            cc = np.squeeze(f[ccName][i, :, :])
            if water_mask is not None:
                cc[water_mask == 0] = 0

            # bridging
            cc_obj = connectComponent(conncomp=cc, metadata=atr)
            cc_obj.label()
            cc_obj.find_mst_bridge()
            unw_cor = cc_obj.unwrap_conn_comp(unw, ramp_type=ramp_type)

            # write to hdf5 file
            ds[i, :, :] = unw_cor
            prog_bar.update(i+1, suffix=date12)
        prog_bar.close()
        ds.attrs['MODIFICATION_TIME'] = str(time.time())
        f.close()
        print('close {} file.'.format(ifgram_file))

    if k == '.unw':
        # read unwrap phase
        unw = readfile.read(ifgram_file)[0]

        # read connected components
        cc_files0 = [ifgram_file+'.conncomp', os.path.splitext(ifgram_file)[0]+'_snap_connect.byt']
        cc_files = [i for i in cc_files0 if os.path.isfile(i)]
        if len(cc_files) == 0:
            raise FileNotFoundError(cc_files0)
        cc = readfile.read(cc_files[0])[0]
        if water_mask is not None:
            cc[water_mask == 0] = 0

        # bridging
        cc_obj = connectComponent(conncomp=cc, metadata=atr)
        cc_obj.label()
        cc_obj.find_mst_bridge()
        unw_cor = cc_obj.unwrap_conn_comp(unw, ramp_type=ramp_type)

        # write to hdf5 file
        out_file = '{}_unwCor{}'.format(os.path.splitext(ifgram_file)[0],
                                        os.path.splitext(ifgram_file)[1])
        print('writing >>> {}'.format(out_file))
        writefile.write(unw_cor, out_file=out_file, ref_file=ifgram_file)

    return ifgram_file
Example #52
0
def write(datasetDict,
          out_file,
          metadata=None,
          ref_file=None,
          compression=None):
    """ Write one file.
    Parameters: datasetDict : dict of dataset, with key = datasetName and value = 2D/3D array, e.g.:
                    {'height'        : np.ones((   200,300), dtype=np.int16),
                     'incidenceAngle': np.ones((   200,300), dtype=np.float32),
                     'bperp'         : np.ones((80,200,300), dtype=np.float32),
                     ...}
                out_file : str, output file name
                metadata : dict of attributes
                ref_file : str, reference file to get auxliary info
                compression : str, compression while writing to HDF5 file, None, "lzf", "gzip"
    Returns:    out_file : str
    Examples:   dsDict = dict()
                dsDict['velocity'] = np.ones((200,300), dtype=np.float32)
                write(datasetDict=dsDict, out_file='velocity.h5', metadata=atr)
    """
    # copy metadata to meta
    if metadata:
        meta = {key: value for key, value in metadata.items()}
    elif ref_file:
        meta = readfile.read_attribute(ref_file)
    else:
        raise ValueError('No metadata or reference file input.')

    # convert ndarray input into dict type
    if isinstance(datasetDict, np.ndarray):
        data = np.array(datasetDict, datasetDict.dtype)
        datasetDict = dict()
        datasetDict[meta['FILE_TYPE']] = data

    ext = os.path.splitext(out_file)[1].lower()
    # HDF5 File
    if ext in ['.h5', '.he5']:
        # grab info from reference h5 file
        if ref_file and os.path.splitext(ref_file)[1] in ['.h5', '.he5']:
            # compression
            if compression is None:
                compression = readfile.get_hdf5_compression(ref_file)

            # list of auxiliary datasets
            shape2d = (int(meta['LENGTH']), int(meta['WIDTH']))
            with h5py.File(ref_file, 'r') as fr:
                auxDsNames = [
                    i for i in fr.keys()
                    if (i not in list(datasetDict.keys()) and isinstance(
                        fr[i], h5py.Dataset) and fr[i].shape[-2:] != shape2d)
                ]
        else:
            auxDsNames = []

        # check required datasets
        dsNames = list(datasetDict.keys()) + auxDsNames
        if meta['FILE_TYPE'] in ['timeseries', 'ifgramStack']:
            if 'date' not in dsNames:
                raise Exception(
                    "Can not write {} file without 'date' dataset!".format(
                        meta['FILE_TYPE']))

        # remove existing file
        if os.path.isfile(out_file):
            print('delete exsited file: {}'.format(out_file))
            os.remove(out_file)

        # writing
        print('create HDF5 file: {} with w mode'.format(out_file))
        maxDigit = max([len(i) for i in dsNames])
        with h5py.File(out_file, 'w') as f:
            # 1. write input datasets
            for dsName in datasetDict.keys():
                data = datasetDict[dsName]
                print(
                    ('create dataset /{d:<{w}} of {t:<10} in size of {s:<20} '
                     'with compression={c}').format(d=dsName,
                                                    w=maxDigit,
                                                    t=str(data.dtype),
                                                    s=str(data.shape),
                                                    c=compression))
                ds = f.create_dataset(dsName,
                                      data=data,
                                      chunks=True,
                                      compression=compression)

            # 2. Write extra/auxliary datasets from ref_file
            if len(auxDsNames) > 0:
                with h5py.File(ref_file, 'r') as fr:
                    for dsName in auxDsNames:
                        ds = fr[dsName]
                        print((
                            'create dataset /{d:<{w}} of {t:<10} in size of {s:<10} '
                            'with compression={c}').format(d=dsName,
                                                           w=maxDigit,
                                                           t=str(ds.dtype),
                                                           s=str(ds.shape),
                                                           c=compression))
                        f.create_dataset(dsName,
                                         data=ds[:],
                                         chunks=True,
                                         compression=compression)

            # 3. metadata
            for key, value in meta.items():
                try:
                    f.attrs[key] = str(value)
                except:
                    f.attrs[key] = str(value.encode('utf-8'))
            print('finished writing to {}'.format(out_file))

    # ISCE / ROI_PAC GAMMA / Image product
    else:
        key_list = list(datasetDict.keys())
        data_list = []
        for key in key_list:
            data_list.append(datasetDict[key])
        data_type = meta.get('DATA_TYPE', str(data_list[0].dtype)).lower()

        # Write Data File
        print('write {}'.format(out_file))
        # determined by ext
        if ext in ['.unw', '.cor', '.hgt']:
            write_float32(data_list[0], out_file)
            meta['DATA_TYPE'] = 'float32'

        elif ext == '.dem':
            write_real_int16(data_list[0], out_file)
            meta['DATA_TYPE'] = 'int16'

        elif ext in ['.trans']:
            write_float32(data_list[0], data_list[1], out_file)
            meta['DATA_TYPE'] = 'float32'

        elif ext in ['.utm_to_rdc', '.UTM_TO_RDC']:
            data = np.zeros(data_list[0].shape, dtype=np.complex64)
            data.real = datasetDict['rangeCoord']
            data.imag = datasetDict['azimuthCoord']
            data.astype('>c8').tofile(out_file)

        elif ext in ['.mli', '.flt']:
            write_real_float32(data_list[0], out_file)

        elif ext == '.slc':
            write_complex_int16(data_list[0], out_file)

        elif ext == '.int':
            write_complex64(data_list[0], out_file)

        elif ext == '.msk':
            write_byte(data_list[0], out_file)
            meta['DATA_TYPE'] = 'byte'

        # determined by DATA_TYPE
        elif data_type in ['float64']:
            write_real_float64(data_list[0], out_file)

        elif data_type in ['float32', 'float']:
            if len(data_list) == 1:
                write_real_float32(data_list[0], out_file)

            elif len(data_list) == 2 and meta['scheme'] == 'BIL':
                write_float32(data_list[0], data_list[1], out_file)

        elif data_type in ['int16', 'short']:
            write_real_int16(data_list[0], out_file)

        elif data_type in ['int8', 'byte']:
            write_byte(data_list[0], out_file)

        elif data_type in ['bool']:
            write_bool(data_list[0], out_file)

        else:
            print('Un-supported file type: ' + ext)
            return 0

        # write metadata file
        write_roipac_rsc(meta, out_file + '.rsc', print_msg=True)
    return out_file
Example #53
0
def run_timeseries2time_func(inps):

    # basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    length, width = int(atr['LENGTH']), int(atr['WIDTH'])
    num_date = inps.numDate
    dates = np.array(inps.dateList)
    seconds = atr.get('CENTER_LINE_UTC', 0)

    # use the 1st date as reference if not found, e.g. timeseriesResidual.h5 file
    if "REF_DATE" not in atr.keys() and not inps.ref_date:
        inps.ref_date = inps.dateList[0]
        print(
            'WARNING: No REF_DATE found in time-series file or input in command line.'
        )
        print('  Set "--ref-date {}" and continue.'.format(inps.dateList[0]))

    # get deformation model from parsers
    model, num_param = read_inps2model(inps)

    ## output preparation

    # time_func_param: attributes
    atrV = dict(atr)
    atrV['FILE_TYPE'] = 'velocity'
    atrV['UNIT'] = 'm/year'
    atrV['START_DATE'] = inps.dateList[0]
    atrV['END_DATE'] = inps.dateList[-1]
    atrV['DATE12'] = '{}_{}'.format(inps.dateList[0], inps.dateList[-1])
    if inps.ref_yx:
        atrV['REF_Y'] = inps.ref_yx[0]
        atrV['REF_X'] = inps.ref_yx[1]
    if inps.ref_date:
        atrV['REF_DATE'] = inps.ref_date

    # time_func_param: config parameter
    print('add/update the following configuration metadata:\n{}'.format(
        configKeys))
    for key in configKeys:
        atrV[key_prefix + key] = str(vars(inps)[key])

    # time_func_param: instantiate output file
    ds_name_dict, ds_unit_dict = model2hdf5_dataset(model,
                                                    ds_shape=(length,
                                                              width))[1:]
    writefile.layout_hdf5(inps.outfile,
                          metadata=atrV,
                          ds_name_dict=ds_name_dict,
                          ds_unit_dict=ds_unit_dict)

    # timeseries_res: attributes + instantiate output file
    if inps.save_res:
        atrR = dict(atr)
        for key in ['REF_DATE']:
            if key in atrR.keys():
                atrR.pop(key)
        writefile.layout_hdf5(inps.res_file,
                              metadata=atrR,
                              ref_file=inps.timeseries_file)

    ## estimation

    # calc number of box based on memory limit
    memoryAll = (num_date + num_param * 2 + 2) * length * width * 4
    if inps.bootstrap:
        memoryAll += inps.bootstrapCount * num_param * length * width * 4
    num_box = int(np.ceil(memoryAll * 3 / (inps.maxMemory * 1024**3)))
    box_list = cluster.split_box2sub_boxes(box=(0, 0, width, length),
                                           num_split=num_box,
                                           dimension='y',
                                           print_msg=True)

    # loop for block-by-block IO
    for i, box in enumerate(box_list):
        box_wid = box[2] - box[0]
        box_len = box[3] - box[1]
        num_pixel = box_len * box_wid
        if num_box > 1:
            print('\n------- processing patch {} out of {} --------------'.
                  format(i + 1, num_box))
            print('box width:  {}'.format(box_wid))
            print('box length: {}'.format(box_len))

        # initiate output
        m = np.zeros((num_param, num_pixel), dtype=dataType)
        m_std = np.zeros((num_param, num_pixel), dtype=dataType)

        # read input
        print('reading data from file {} ...'.format(inps.timeseries_file))
        ts_data = readfile.read(inps.timeseries_file, box=box)[0]

        # referencing in time and space
        # for file w/o reference info. e.g. ERA5.h5
        if inps.ref_date:
            print('referecing to date: {}'.format(inps.ref_date))
            ref_ind = inps.dateList.index(inps.ref_date)
            ts_data -= np.tile(ts_data[ref_ind, :, :],
                               (ts_data.shape[0], 1, 1))

        if inps.ref_yx:
            print('referencing to point (y, x): ({}, {})'.format(
                inps.ref_yx[0], inps.ref_yx[1]))
            ref_box = (inps.ref_yx[1], inps.ref_yx[0], inps.ref_yx[1] + 1,
                       inps.ref_yx[0] + 1)
            ref_val = readfile.read(inps.timeseries_file, box=ref_box)[0]
            ts_data -= np.tile(ref_val.reshape(ts_data.shape[0], 1, 1),
                               (1, ts_data.shape[1], ts_data.shape[2]))

        ts_data = ts_data[inps.dropDate, :, :].reshape(inps.numDate, -1)
        if atrV['UNIT'] == 'mm':
            ts_data *= 1. / 1000.

        ts_std = None
        if inps.ts_std_file:
            ts_std = readfile.read(inps.ts_std_file, box=box)[0]
            ts_std = ts_std[inps.dropDate, :, :].reshape(inps.numDate, -1)
            # set zero value to a fixed small value to avoid divide by zero

            epsilon = 1e-5
            ts_std[ts_std < epsilon] = epsilon

        # mask invalid pixels
        print('skip pixels with zero/nan value in all acquisitions')
        ts_stack = np.nanmean(ts_data, axis=0)
        mask = np.multiply(~np.isnan(ts_stack), ts_stack != 0.)
        del ts_stack

        if ts_std is not None:
            print('skip pxiels with nan STD value in any acquisition')
            num_std_nan = np.sum(np.isnan(ts_std), axis=0)
            mask *= num_std_nan == 0
            del num_std_nan

        ts_data = ts_data[:, mask]
        num_pixel2inv = int(np.sum(mask))
        idx_pixel2inv = np.where(mask)[0]
        print('number of pixels to invert: {} out of {} ({:.1f}%)'.format(
            num_pixel2inv, num_pixel, num_pixel2inv / num_pixel * 100))

        # go to next if no valid pixel found
        if num_pixel2inv == 0:
            continue

        ### estimation / solve Gm = d
        print('estimating time functions via linalg.lstsq ...')

        if inps.bootstrap:
            ## option 1 - least squares with bootstrapping
            # Bootstrapping is a resampling method which can be used to estimate properties
            # of an estimator. The method relies on independently sampling the data set with
            # replacement.
            print(
                'estimating time function STD with bootstrap resampling ({} times) ...'
                .format(inps.bootstrapCount))

            # calc model of all bootstrap sampling
            rng = np.random.default_rng()
            m_boot = np.zeros((inps.bootstrapCount, num_param, num_pixel2inv),
                              dtype=dataType)
            prog_bar = ptime.progressBar(maxValue=inps.bootstrapCount)
            for i in range(inps.bootstrapCount):
                # bootstrap resampling
                boot_ind = rng.choice(inps.numDate,
                                      size=inps.numDate,
                                      replace=True)
                boot_ind.sort()

                # estimation
                m_boot[i] = time_func.estimate_time_func(
                    model=model,
                    date_list=dates[boot_ind].tolist(),
                    dis_ts=ts_data[boot_ind],
                    seconds=seconds)[1]

                prog_bar.update(i + 1,
                                suffix='iteration {} / {}'.format(
                                    i + 1, inps.bootstrapCount))
            prog_bar.close()
            #del ts_data

            # get mean/std among all bootstrap sampling
            m[:, mask] = m_boot.mean(axis=0).reshape(num_param, -1)
            m_std[:, mask] = m_boot.std(axis=0).reshape(num_param, -1)
            del m_boot

        else:
            ## option 2 - least squares with uncertainty propagation
            G, m[:, mask], e2 = time_func.estimate_time_func(
                model=model,
                date_list=inps.dateList,
                dis_ts=ts_data,
                seconds=seconds)
            #del ts_data

            ## Compute the covariance matrix for model parameters: Gm = d
            # C_m_hat = (G.T * C_d^-1, * G)^-1  # linear propagation from the TS covariance matrix. (option 2.1)
            #         = sigma^2 * (G.T * G)^-1  # assuming obs errors are normally dist. in time.   (option 2.2a)
            # Based on the law of integrated expectation, we estimate the obs sigma^2 using
            # the OLS estimation residual e_hat_i = d_i - d_hat_i
            # sigma^2 = sigma_hat^2 * N / (N - P)                                                   (option 2.2b)
            #         = (e_hat.T * e_hat) / (N - P)  # sigma_hat^2 = (e_hat.T * e_hat) / N

            if ts_std is not None:
                # option 2.1 - linear propagation from time-series covariance matrix
                print(
                    'estimating time function STD from time-series STD pixel-by-pixel ...'
                )
                prog_bar = ptime.progressBar(maxValue=num_pixel2inv)
                for i in range(num_pixel2inv):
                    idx = idx_pixel2inv[i]

                    try:
                        C_ts_inv = np.diag(1. /
                                           np.square(ts_std[:, idx].flatten()))
                        m_var = np.diag(linalg.inv(
                            G.T.dot(C_ts_inv).dot(G))).astype(np.float32)
                        m_std[:, idx] = np.sqrt(m_var)
                    except linalg.LinAlgError:
                        m_std[:, idx] = np.nan

                    prog_bar.update(i + 1,
                                    every=200,
                                    suffix='{}/{} pixels'.format(
                                        i + 1, num_pixel2inv))
                prog_bar.close()

            else:
                # option 2.2a - assume obs errors following normal dist. in time
                print(
                    'estimating time function STD from time-series fitting residual ...'
                )
                G_inv = linalg.inv(np.dot(G.T, G))
                m_var = e2.reshape(1, -1) / (num_date - num_param)
                m_std[:, mask] = np.sqrt(
                    np.dot(np.diag(G_inv).reshape(-1, 1), m_var))

                # option 2.2b - simplified form for linear velocity (without matrix linear algebra)
                # The STD can also be calculated using Eq. (10) from Fattahi and Amelung (2015, JGR)
                # ts_diff = ts_data - np.dot(G, m)
                # t_diff = G[:, 1] - np.mean(G[:, 1])
                # vel_std = np.sqrt(np.sum(ts_diff ** 2, axis=0) / np.sum(t_diff ** 2)  / (num_date - 2))

        # write - time func params
        block = [box[1], box[3], box[0], box[2]]
        ds_dict = model2hdf5_dataset(model, m, m_std, mask=mask)[0]
        for ds_name, data in ds_dict.items():
            writefile.write_hdf5_block(inps.outfile,
                                       data=data.reshape(box_len, box_wid),
                                       datasetName=ds_name,
                                       block=block)

        # write - residual file
        if inps.save_res:
            block = [0, num_date, box[1], box[3], box[0], box[2]]
            ts_res = np.ones(
                (num_date, box_len * box_wid), dtype=np.float32) * np.nan
            ts_res[:, mask] = ts_data - np.dot(G, m)[:, mask]
            writefile.write_hdf5_block(inps.res_file,
                                       data=ts_res.reshape(
                                           num_date, box_len, box_wid),
                                       datasetName='timeseries',
                                       block=block)

    return inps.outfile
Example #54
0
def read_init_info(inps):
    # Time Series Info
    atr = readfile.read_attribute(inps.file[0])
    inps.key = atr['FILE_TYPE']
    if inps.key == 'timeseries':
        obj = timeseries(inps.file[0])
    elif inps.key == 'giantTimeseries':
        obj = giantTimeseries(inps.file[0])
    elif inps.key == 'HDFEOS':
        obj = HDFEOS(inps.file[0])
    else:
        raise ValueError('input file is {}, not timeseries.'.format(inps.key))
    obj.open(print_msg=inps.print_msg)
    inps.seconds = atr.get('CENTER_LINE_UTC', 0)

    if not inps.file_label:
        inps.file_label = []
        for fname in inps.file:
            fbase = os.path.splitext(os.path.basename(fname))[0]
            fbase = fbase.replace('timeseries', '')
            inps.file_label.append(fbase)

    # default mask file
    if not inps.mask_file and 'msk' not in inps.file[0]:
        dir_name = os.path.dirname(inps.file[0])
        if 'Y_FIRST' in atr.keys():
            inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5')
        else:
            inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5')
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None

    ## date info
    inps.date_list = obj.dateList
    inps.num_date = len(inps.date_list)
    if inps.start_date:
        inps.date_list = [i for i in inps.date_list if int(i) >= int(inps.start_date)]
    if inps.end_date:
        inps.date_list = [i for i in inps.date_list if int(i) <= int(inps.end_date)]
    inps.num_date = len(inps.date_list)
    inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list)

    (inps.ex_date_list,
     inps.ex_dates,
     inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list)

    # reference date/index
    if not inps.ref_date:
        inps.ref_date = atr.get('REF_DATE', None)
    if inps.ref_date:
        inps.ref_idx = inps.date_list.index(inps.ref_date)
    else:
        inps.ref_idx = None

    # date/index of interest for initial display
    if not inps.idx:
        if (not inps.ref_idx) or (inps.ref_idx < inps.num_date / 2.):
            inps.idx = inps.num_date - 2
        else:
            inps.idx = 2

    # Display Unit
    (inps.disp_unit,
     inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr, disp_unit=inps.disp_unit)[1:3]

    # Map info - coordinate unit
    inps.coord_unit = atr.get('Y_UNIT', 'degrees').lower()

    # Read Error List
    inps.ts_plot_func = plot_ts_scatter
    inps.error_ts = None
    inps.ex_error_ts = None
    if inps.error_file:
        # assign plot function
        inps.ts_plot_func = plot_ts_errorbar

        # read error file
        error_fc = np.loadtxt(inps.error_file, dtype=bytes).astype(str)
        inps.error_ts = error_fc[:, 1].astype(np.float)*inps.unit_fac

        # update error file with exlcude date
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = e_ts[inps.ex_flag == 0]
            inps.error_ts = e_ts[inps.ex_flag == 1]

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0]))

    # default lookup table file and coordinate object
    if not inps.lookup_file:
        inps.lookup_file = ut.get_lookup_file('./inputs/geometryRadar.h5')
    inps.coord = ut.coordinate(atr, inps.lookup_file)

    ## size and lalo info
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr)
    inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box)
    inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box)
    data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
    vprint('data   coverage in y/x: '+str(data_box))
    vprint('subset coverage in y/x: '+str(inps.pix_box))
    vprint('data   coverage in lat/lon: '+str(inps.coord.box_pixel2geo(data_box)))
    vprint('subset coverage in lat/lon: '+str(inps.geo_box))
    vprint('------------------------------------------------------------------------')

    # calculate multilook_num
    # ONLY IF:
    #   inps.multilook is True (no --nomultilook input) AND
    #   inps.multilook_num ==1 (no --multilook-num input)
    # Note: inps.multilook is used for this check ONLY
    # Note: multilooking is only applied to the 3D data cubes and their related operations:
    # e.g. spatial indexing, referencing, etc. All the other variables are in the original grid
    # so that users get the same result as the non-multilooked version.
    if inps.multilook and inps.multilook_num == 1:
        inps.multilook_num = pp.auto_multilook_num(inps.pix_box, inps.num_date,
                                                   max_memory=inps.maxMemory,
                                                   print_msg=inps.print_msg)

    ## reference pixel
    if not inps.ref_lalo and 'REF_LAT' in atr.keys():
        inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON']))
    if inps.ref_lalo:
        # set longitude to [-180, 180)
        if inps.coord_unit.lower().startswith('deg') and inps.ref_lalo[1] >= 180.:
            inps.ref_lalo[1] -= 360.
        # ref_lalo --> ref_yx if not set in cmd
        if not inps.ref_yx:
            inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1], print_msg=False)[0:2]

    # use REF_Y/X if ref_yx not set in cmd
    if not inps.ref_yx and 'REF_Y' in atr.keys():
        inps.ref_yx = (int(atr['REF_Y']), int(atr['REF_X']))

    # ref_yx --> ref_lalo if in geo-coord
    # for plotting purpose only
    if inps.ref_yx and 'Y_FIRST' in atr.keys():
        inps.ref_lalo = inps.coord.radar2geo(inps.ref_yx[0], inps.ref_yx[1], print_msg=False)[0:2]

    # do not plot native reference point if it's out of the coverage due to subset
    if (inps.ref_yx and 'Y_FIRST' in atr.keys()
        and inps.ref_yx == (int(atr.get('REF_Y',-999)), int(atr.get('REF_X',-999)))
        and not (    inps.pix_box[0] <= inps.ref_yx[1] < inps.pix_box[2]
                 and inps.pix_box[1] <= inps.ref_yx[0] < inps.pix_box[3])):
        inps.disp_ref_pixel = False
        print('the native REF_Y/X is out of subset box, thus do not display')

    ## initial pixel coord
    if inps.lalo:
        inps.yx = inps.coord.geo2radar(inps.lalo[0], inps.lalo[1], print_msg=False)[0:2]
    try:
        inps.lalo = inps.coord.radar2geo(inps.yx[0], inps.yx[1], print_msg=False)[0:2]
    except:
        inps.lalo = None

    ## figure settings
    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr, print_msg=inps.print_msg)

    # Transparency - Alpha
    if not inps.transparency:
        # Auto adjust transparency value when showing shaded relief DEM
        if inps.dem_file and inps.disp_dem_shade:
            inps.transparency = 0.7
        else:
            inps.transparency = 1.0

    ## display unit ans wrap
    # if wrap_step == 2*np.pi (default value), set disp_unit_img = radian;
    # otherwise set disp_unit_img = disp_unit
    inps.disp_unit_img = inps.disp_unit
    if inps.wrap:
        inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
        if   'cm' == inps.disp_unit.split('/')[0]:   inps.range2phase /= 100.
        elif 'mm' == inps.disp_unit.split('/')[0]:   inps.range2phase /= 1000.
        elif 'm'  == inps.disp_unit.split('/')[0]:   inps.range2phase /= 1.
        else:
            raise ValueError('un-recognized display unit: {}'.format(inps.disp_unit))

        if (inps.wrap_range[1] - inps.wrap_range[0]) == 2*np.pi:
            inps.disp_unit_img = 'radian'
        inps.vlim = inps.wrap_range
    inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_img)

    ## fit a suite of time func to the time series
    inps.model, inps.num_param = ts2vel.read_inps2model(inps, date_list=inps.date_list)

    # dense TS for plotting
    inps.date_list_fit = ptime.get_date_range(inps.date_list[0], inps.date_list[-1])
    inps.dates_fit = ptime.date_list2vector(inps.date_list_fit)[0]
    inps.G_fit = time_func.get_design_matrix4time_func(
        date_list=inps.date_list_fit,
        model=inps.model,
        seconds=inps.seconds)

    return inps, atr
Example #55
0
def subset_file(fname, subset_dict_input, out_file=None):
    """Subset file with
    Inputs:
        fname        : str, path/name of file
        out_file     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        out_file :  str, path/name of output file; 
                   out_file = 'subset_'+fname, if fname is in current directory;
                   out_file = fname, if fname is not in the current directory.
    """

    # Input File Info
    atr = readfile.read_attribute(fname)
    width = int(atr['WIDTH'])
    length = int(atr['LENGTH'])
    k = atr['FILE_TYPE']
    print('subset ' + k + ' file: ' + fname + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr)

    coord = ut.coordinate(atr)
    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in subset_dict.keys() and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = coord.check_box_within_data_coverage(pix_box)
        subset_dict['fill_value'] = np.nan

    geo_box = coord.box_pixel2geo(pix_box)
    data_box = (0, 0, width, length)
    print('data   range in (x0,y0,x1,y1): {}'.format(data_box))
    print('subset range in (x0,y0,x1,y1): {}'.format(pix_box))
    print('data   range in (W, N, E, S): {}'.format(
        coord.box_pixel2geo(data_box)))
    print('subset range in (W, N, E, S): {}'.format(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return fname

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not out_file:
        if os.getcwd() == os.path.dirname(os.path.abspath(fname)):
            if 'tight' in subset_dict.keys() and subset_dict['tight']:
                out_file = '{}_tight{}'.format(
                    os.path.splitext(fname)[0],
                    os.path.splitext(fname)[1])
            else:
                out_file = 'sub_' + os.path.basename(fname)
        else:
            out_file = os.path.basename(fname)
    print('writing >>> ' + out_file)

    # update metadata
    atr = attr.update_attribute4subset(atr, pix_box)

    # subset datasets one by one
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])

    ext = os.path.splitext(out_file)[1]
    if ext in ['.h5', '.he5']:
        # initiate the output file
        writefile.layout_hdf5(out_file, metadata=atr, ref_file=fname)

        # subset dataset one-by-one
        for dsName in dsNames:
            with h5py.File(fname, 'r') as fi:
                ds = fi[dsName]
                ds_shape = ds.shape
                ds_ndim = ds.ndim
                print('cropping {d} in {b} from {f} ...'.format(
                    d=dsName, b=pix_box4data, f=os.path.basename(fname)))

                if ds_ndim == 2:
                    # read
                    data = ds[pix_box4data[1]:pix_box4data[3],
                              pix_box4data[0]:pix_box4data[2]]

                    # crop
                    data_out = np.ones(
                        (pix_box[3] - pix_box[1], pix_box[2] - pix_box[0]),
                        data.dtype) * subset_dict['fill_value']
                    data_out[pix_box4subset[1]:pix_box4subset[3],
                             pix_box4subset[0]:pix_box4subset[2]] = data
                    data_out = np.array(data_out, dtype=data.dtype)

                    # write
                    block = [0, int(atr['LENGTH']), 0, int(atr['WIDTH'])]
                    writefile.write_hdf5_block(out_file,
                                               data=data_out,
                                               datasetName=dsName,
                                               block=block,
                                               print_msg=True)

                if ds_ndim == 3:
                    prog_bar = ptime.progressBar(maxValue=ds_shape[0])
                    for i in range(ds_shape[0]):
                        # read
                        data = ds[i, pix_box4data[1]:pix_box4data[3],
                                  pix_box4data[0]:pix_box4data[2]]

                        # crop
                        data_out = np.ones(
                            (1, pix_box[3] - pix_box[1],
                             pix_box[2] - pix_box[0]),
                            data.dtype) * subset_dict['fill_value']
                        data_out[:, pix_box4subset[1]:pix_box4subset[3],
                                 pix_box4subset[0]:pix_box4subset[2]] = data

                        # write
                        block = [
                            i, i + 1, 0,
                            int(atr['LENGTH']), 0,
                            int(atr['WIDTH'])
                        ]
                        writefile.write_hdf5_block(out_file,
                                                   data=data_out,
                                                   datasetName=dsName,
                                                   block=block,
                                                   print_msg=False)

                        prog_bar.update(i + 1,
                                        suffix='{}/{}'.format(
                                            i + 1, ds_shape[0]))
                    prog_bar.close()
                    print('finished writing to file: {}'.format(out_file))

    else:
        # IO for binary files
        dsDict = dict()
        for dsName in dsNames:
            dsDict[dsName] = subset_dataset(
                fname,
                dsName,
                pix_box,
                pix_box4data,
                pix_box4subset,
                fill_value=subset_dict['fill_value'])
        writefile.write(dsDict,
                        out_file=out_file,
                        metadata=atr,
                        ref_file=fname)

        # write extra metadata files for ISCE data files
        if os.path.isfile(fname + '.xml') or os.path.isfile(fname +
                                                            '.aux.xml'):
            # write ISCE XML file
            dtype_gdal = readfile.NUMPY2GDAL_DATATYPE[atr['DATA_TYPE']]
            dtype_isce = readfile.GDAL2ISCE_DATATYPE[dtype_gdal]
            writefile.write_isce_xml(out_file,
                                     width=int(atr['WIDTH']),
                                     length=int(atr['LENGTH']),
                                     bands=len(dsDict.keys()),
                                     data_type=dtype_isce,
                                     scheme=atr['scheme'],
                                     image_type=atr['FILE_TYPE'])
            print(f'write file: {out_file}.xml')

            # write GDAL VRT file
            if os.path.isfile(fname + '.vrt'):
                from isceobj.Util.ImageUtil import ImageLib as IML
                img = IML.loadImage(out_file)[0]
                img.renderVRT()
                print(f'write file: {out_file}.vrt')

    return out_file
Example #56
0
def read_inps_dict2geometry_dict_object(iDict):

    # eliminate lookup table dsName for input files in radar-coordinates
    if iDict['processor'] in ['isce', 'doris']:
        # for processors with lookup table in radar-coordinates, remove azimuth/rangeCoord
        iDict['ds_name2key'].pop('azimuthCoord')
        iDict['ds_name2key'].pop('rangeCoord')
    elif iDict['processor'] in ['roipac', 'gamma']:
        # for processors with lookup table in geo-coordinates, remove latitude/longitude
        iDict['ds_name2key'].pop('latitude')
        iDict['ds_name2key'].pop('longitude')
    elif iDict['processor'] in ['aria', 'gmtsar', 'snap', 'hyp3']:
        # for processors with geocoded products support only, do nothing for now.
        # check again when adding products support in radar-coordiantes
        pass
    else:
        print('Un-recognized InSAR processor: {}'.format(iDict['processor']))

    # iDict --> dsPathDict
    print('-'*50)
    print('searching geometry files info')
    print('input data files:')
    maxDigit = max([len(i) for i in list(iDict['ds_name2key'].keys())])
    dsPathDict = {}
    for dsName in [i for i in geometryDatasetNames
                   if i in iDict['ds_name2key'].keys()]:
        key = iDict['ds_name2key'][dsName]
        if key in iDict.keys():
            files = sorted(glob.glob(str(iDict[key])))
            if len(files) > 0:
                if dsName == 'bperp':
                    bperpDict = {}
                    for file in files:
                        date = ptime.yyyymmdd(os.path.basename(os.path.dirname(file)))
                        bperpDict[date] = file
                    dsPathDict[dsName] = bperpDict
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=iDict[key]))
                    print('number of bperp files: {}'.format(len(list(bperpDict.keys()))))
                else:
                    dsPathDict[dsName] = files[0]
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=files[0]))

    # Check required dataset
    dsName0 = geometryDatasetNames[0]
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))

    # metadata
    ifgramRadarMetadata = None
    ifgramKey = iDict['ds_name2key']['unwrapPhase']
    if ifgramKey in iDict.keys():
        ifgramFiles = glob.glob(str(iDict[ifgramKey]))
        if len(ifgramFiles) > 0:
            atr = readfile.read_attribute(ifgramFiles[0])
            if 'Y_FIRST' not in atr.keys():
                ifgramRadarMetadata = atr.copy()

    # dsPathDict --> dsGeoPathDict + dsRadarPathDict
    dsNameList = list(dsPathDict.keys())
    dsGeoPathDict = {}
    dsRadarPathDict = {}
    for dsName in dsNameList:
        if dsName == 'bperp':
            atr = readfile.read_attribute(next(iter(dsPathDict[dsName].values())))
        else:
            atr = readfile.read_attribute(dsPathDict[dsName])
        if 'Y_FIRST' in atr.keys():
            dsGeoPathDict[dsName] = dsPathDict[dsName]
        else:
            dsRadarPathDict[dsName] = dsPathDict[dsName]

    geomRadarObj = None
    geomGeoObj = None
    if len(dsRadarPathDict) > 0:
        geomRadarObj = geometryDict(processor=iDict['processor'],
                                    datasetDict=dsRadarPathDict,
                                    extraMetadata=ifgramRadarMetadata)
    if len(dsGeoPathDict) > 0:
        geomGeoObj = geometryDict(processor=iDict['processor'],
                                  datasetDict=dsGeoPathDict,
                                  extraMetadata=None)
    return geomRadarObj, geomGeoObj
Example #57
0
 def get_size(self, family=ifgramDatasetNames[0]):
     self.file = self.datasetDict[family]
     metadata = readfile.read_attribute(self.file)
     self.length = int(metadata['LENGTH'])
     self.width = int(metadata['WIDTH'])
     return self.length, self.width
Example #58
0
def read_data(inps):
    """
    Returns: defo: 2D np.array with in-valid/masked-out pixel in NaN
    """
    # metadata
    inps.metadata = readfile.read_attribute(inps.file)
    k = inps.metadata['FILE_TYPE']
    inps.range2phase =  -4. * np.pi / float(inps.metadata['WAVELENGTH'])

    # mask
    if inps.mask_file:
        inps.mask = readfile.read(inps.mask_file)[0]
    else:
        inps.mask = np.ones((int(inps.metadata['LENGTH']),
                             int(inps.metadata['WIDTH'])), dtype=np.bool_)

    # data
    if k in ['.unw','velocity']:
        inps.phase = readfile.read(inps.file)[0]
        if k == 'velocity':
            # velocity to displacement
            date1, date2 = inps.metadata['DATE12'].split('_')
            dt1, dt2 = ptime.date_list2vector([date1, date2])[0]
            inps.phase *= (dt2 - dt1).days / 365.25
            # displacement to phase
            inps.phase *= inps.range2phase

        # update mask to exclude pixel with NaN value
        inps.mask *= ~np.isnan(inps.phase)
        # set all masked out pixel to NaN
        inps.phase[inps.mask==0] = np.nan
    else:
        raise ValueError("input file not support yet: {}".format(k))
    print('number of pixels: {}'.format(np.sum(inps.mask)))

    # change reference point
    if inps.ref_lalo:
        coord = ut.coordinate(inps.metadata)
        ref_lat, ref_lon = inps.ref_lalo
        ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2]
        # update data
        inps.phase -= inps.phase[ref_y, ref_x]
        # update metadata
        inps.metadata['REF_LAT'] = ref_lat
        inps.metadata['REF_LON'] = ref_lon
        inps.metadata['REF_Y'] = ref_y
        inps.metadata['REF_X'] = ref_x

    # read geometry
    inps.lat, inps.lon = ut.get_lat_lon(inps.metadata)
    inps.inc_angle = readfile.read(inps.geom_file, datasetName='incidenceAngle')[0]
    inps.head_angle = np.ones(inps.inc_angle.shape, dtype=np.float32) * float(inps.metadata['HEADING'])
    inps.height = readfile.read(inps.geom_file, datasetName='height')[0]

    # convert the height of ellipsoid to geoid (mean sea level)
    # ref: https://github.com/vandry/geoidheight
    if inps.ellipsoid2geoid:
        # import geoid module
        try:
            import geoid
        except:
            raise ImportError('Can not import geoidheight!')

        # calculate offset and correct height
        egm_file = os.path.join(os.path.dirname(geoid.__file__), 'geoids/egm2008-1.pgm')
        gh_obj = geoid.GeoidHeight(egm_file)
        h_offset = gh_obj.get(lat=np.nanmean(inps.lat), lon=np.nanmean(inps.lon))
        inps.height -= h_offset

        # print message
        msg = 'convert height from ellipsoid to geoid'
        msg += '\n\tby subtracting a constant offset of {:.2f} m'.format(h_offset)
        print(msg)

    inps.lat[inps.mask==0] = np.nan
    inps.lon[inps.mask==0] = np.nan
    inps.inc_angle[inps.mask==0] = np.nan
    inps.head_angle[inps.mask==0] = np.nan
    inps.height[inps.mask==0] = np.nan

    # output filename
    if not inps.outfile:
        proj_name = sensor.project_name2sensor_name(inps.file)[1]
        if not proj_name:
            raise ValueError('No custom/auto output filename found.')
        inps.outfile = '{}_{}.mat'.format(proj_name, inps.metadata['DATE12'])

        if not inps.outdir:
            inps.outdir = os.path.dirname(inps.file)
        inps.outfile = os.path.join(inps.outdir, inps.outfile)
    inps.outfile = os.path.abspath(inps.outfile)
    return
Example #59
0
def read_inps_dict2geometry_dict_object(inpsDict):
    # eliminate dsName by processor
    if not 'processor' in inpsDict and 'PROCESSOR'in inpsDict:
        inpsDict['processor'] = inpsDict['PROCESSOR']
    if inpsDict['processor'] in ['isce', 'doris']:
        datasetName2templateKey.pop('azimuthCoord')
        datasetName2templateKey.pop('rangeCoord')
    elif inpsDict['processor'] in ['roipac', 'gamma']:
        datasetName2templateKey.pop('latitude')
        datasetName2templateKey.pop('longitude')
    elif inpsDict['processor'] in ['snap']:
        # check again when there is a SNAP product in radar coordiantes
        pass
    else:
        print('Un-recognized InSAR processor: {}'.format(inpsDict['processor']))

    # inpsDict --> dsPathDict
    print('-' * 50)
    print('searching geometry files info')
    print('input data files:')

    maxDigit = max([len(i) for i in list(datasetName2templateKey.keys())])
    dsPathDict = {}
    for dsName in [i for i in geometryDatasetNames
                   if i in datasetName2templateKey.keys()]:
        key = datasetName2templateKey[dsName]
        if key in inpsDict.keys():
            files = sorted(glob.glob(str(inpsDict[key]) + '.xml'))
            files = [item.split('.xml')[0] for item in files]
            if len(files) > 0:
                if dsName == 'bperp':
                    bperpDict = {}
                    for file in files:
                        date = ptime.yyyymmdd(os.path.basename(os.path.dirname(file)))
                        bperpDict[date] = file
                    dsPathDict[dsName] = bperpDict
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=inpsDict[key]))
                    print('number of bperp files: {}'.format(len(list(bperpDict.keys()))))
                else:
                    dsPathDict[dsName] = files[0]
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=files[0]))

    # Check required dataset
    dsName0 = geometryDatasetNames[0]
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))

    # metadata
    slcRadarMetadata = None
    slcKey = datasetName2templateKey['slc']
    if slcKey in inpsDict.keys():
        slcFiles = glob.glob(str(inpsDict[slcKey]))
        if len(slcFiles) > 0:
            atr = readfile.read_attribute(slcFiles[0])
            if 'Y_FIRST' not in atr.keys():
                slcRadarMetadata = atr.copy()

    # dsPathDict --> dsGeoPathDict + dsRadarPathDict
    dsNameList = list(dsPathDict.keys())
    dsGeoPathDict = {}
    dsRadarPathDict = {}
    for dsName in dsNameList:
        if dsName == 'bperp':
            atr = readfile.read_attribute(next(iter(dsPathDict[dsName].values())))
        else:
            atr = mut.read_attribute(dsPathDict[dsName].split('.xml')[0], metafile_ext='.xml')
        if 'Y_FIRST' in atr.keys():
            dsGeoPathDict[dsName] = dsPathDict[dsName]
        else:
            dsRadarPathDict[dsName] = dsPathDict[dsName]

    geomRadarObj = None
    geomGeoObj = None

    if len(dsRadarPathDict) > 0:
        geomRadarObj = geometryDict(processor=inpsDict['processor'],
                                    datasetDict=dsRadarPathDict,
                                    extraMetadata=slcRadarMetadata)
    if len(dsGeoPathDict) > 0:
        geomGeoObj = geometryDict(processor=inpsDict['processor'],
                                  datasetDict=dsGeoPathDict,
                                  extraMetadata=None)
    return geomRadarObj, geomGeoObj
Example #60
0
 def get_dataset_data_type(self, dsName):
     ifgramObj = [v for v in self.pairsDict.values()][0]
     dsFile = ifgramObj.datasetDict[dsName]
     metadata = readfile.read_attribute(dsFile)
     dataType = dataTypeDict[metadata.get('DATA_TYPE', 'float32').lower()]
     return dataType