def cmd_line_parse(iargs=None): inps = create_parser().parse_args(args=iargs) if inps.outfile or not inps.disp_fig: inps.save_fig = True # input file info inps.file = ut.get_file_list(inps.file) inps.atr = readfile.read_attribute(inps.file[0]) inps.coord = ut.coordinate(inps.atr) if not inps.dset: inps.dset = readfile.get_slice_list(inps.file[0])[0] # lola_file --> start/end_lalo if inps.lola_file: inps.start_lalo, inps.end_lalo = read_lonlat_file(inps.lola_file) # start/end_lalo --> start/end_yx if inps.start_lalo and inps.end_lalo: [y0, y1] = inps.coord.lalo2yx([inps.start_lalo[0], inps.end_lalo[0]], coord_type='lat') [x0, x1] = inps.coord.lalo2yx([inps.start_lalo[1], inps.end_lalo[1]], coord_type='lon') inps.start_yx = [y0, x0] inps.end_yx = [y1, x1] # verbose print using --noverbose option global vprint vprint = print if inps.print_msg else lambda *args, **kwargs: None if not inps.disp_fig: plt.switch_backend('Agg') return inps
def print_slice_list(fname, print_msg=False): """Print slice info of file""" slice_list = readfile.get_slice_list(fname) if print_msg: for slice_name in slice_list: print(slice_name) return slice_list
def print_slice_list(fname, disp_num=False, print_msg=False): """Print slice info of file""" slice_list = readfile.get_slice_list(fname) if print_msg: for i, slice_name in enumerate(slice_list): print(f'{slice_name}\t{i}' if disp_num else slice_name) return slice_list
def cmd_line_parse(iargs=None): inps = create_parser().parse_args(args=iargs) if inps.outfile or not inps.disp_fig: inps.save_fig = True # input file info inps.file = ut.get_file_list(inps.file) inps.atr = readfile.read_attribute(inps.file[0]) inps.coord = ut.coordinate(inps.atr) inps.num_file = len(inps.file) # input offsets if inps.num_file > 1: # default value if not inps.offset: inps.offset = [0.05] num_offset = len(inps.offset) # a) one input: it's interval between adjacent files if num_offset == 1: inps.offset = np.ones(inps.num_file, dtype=np.float32) * inps.offset inps.offset = np.cumsum(inps.offset) # b) multiple input: it's exact offset of all files elif num_offset == inps.num_file: inps.offset = np.array(inps.offset, dtype=np.float32) # c) do not support any other numbers of inputs else: msg = 'input number of offsets: {}.'.format(len(inps.offset)) msg += '\nIt should be 1 or number of files: {}'.format( inps.num_file) raise ValueError(msg) if not inps.dset: inps.dset = readfile.get_slice_list(inps.file[0])[0] # lola_file --> start/end_lalo if inps.lola_file: inps.start_lalo, inps.end_lalo = read_lonlat_file(inps.lola_file) # start/end_lalo --> start/end_yx if inps.start_lalo and inps.end_lalo: [y0, y1] = inps.coord.lalo2yx([inps.start_lalo[0], inps.end_lalo[0]], coord_type='lat') [x0, x1] = inps.coord.lalo2yx([inps.start_lalo[1], inps.end_lalo[1]], coord_type='lon') inps.start_yx = [y0, x0] inps.end_yx = [y1, x1] # verbose print using --noverbose option global vprint vprint = print if inps.print_msg else lambda *args, **kwargs: None if not inps.disp_fig: plt.switch_backend('Agg') return inps
def print_slice_list(fname, disp_num=False, print_msg=False): """Print slice info of file""" slice_list = readfile.get_slice_list(fname) if print_msg: for i in range(len(slice_list)): if disp_num: print('{}\t{}'.format(slice_list[i], i)) else: print(slice_list[i]) return slice_list
def read_HDFEOS(inps): """read displacement from HDFEOS""" print('read displacement, incidence and azimuth information') # read metadata HDFEOS_file = inps.input_HDFEOS[0] atr = readfile.read_attribute(HDFEOS_file) if inps.date == None: date1 = atr['START_DATE'] date2 = atr['END_DATE'] else: # date1 and date2 if '_' in "".join(inps.date): date1, date2 = ptime.yyyymmdd("".join(inps.date).split('_')) else: date1 = atr['START_DATE'] date2 = ptime.yyyymmdd("".join(inps.date)) # read angle infomation azimuth = readfile.read(HDFEOS_file, datasetName='/HDFEOS/GRIDS/timeseries/geometry/azimuthAngle')[0] incidence = readfile.read(HDFEOS_file, datasetName='/HDFEOS/GRIDS/timeseries/geometry/incidenceAngle')[0] if inps.velocity: vel_file = 'velocity.h5' iargs = [HDFEOS_file, '--start-date', date1, '--end-date', date2, '-o', vel_file, '--update'] print('\ntimeseries2velocity.py', ' '.join(iargs)) mintpy.timeseries2velocity.main(iargs) data = readfile.read(vel_file, datasetName='velocity')[0] os.remove(vel_file) else: # read / prepare data slice_list = readfile.get_slice_list(HDFEOS_file) # read/prepare data dname = 'displacement' slice_name1 = view.search_dataset_input(slice_list, '{}-{}'.format(dname, date1))[0][0] slice_name2 = view.search_dataset_input(slice_list, '{}-{}'.format(dname, date2))[0][1] data = readfile.read("".join(inps.input_HDFEOS), datasetName=slice_name2)[0] data -= readfile.read("".join(inps.input_HDFEOS), datasetName=slice_name1)[0] print("mask file") maskfile = readfile.read(HDFEOS_file, datasetName='/HDFEOS/GRIDS/timeseries/quality/mask')[0] data[maskfile == 0] = np.nan azimuth[maskfile == 0] = np.nan incidence[maskfile == 0] = np.nan return date1, date2, data, atr, incidence, azimuth
def read_data(inps): # metadata atr = readfile.read_attribute(inps.file) if 'WAVELENGTH' in atr.keys(): range2phase = -4 * np.pi / float(atr['WAVELENGTH']) # change reference pixel if inps.ref_yx: atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] print('change reference point to y/x: {}'.format(inps.ref_yx)) # various file types print('read {} from file {}'.format(inps.dset, inps.file)) k = atr['FILE_TYPE'] if k == 'velocity': # read/prepare data data = readfile.read(inps.file)[0] * range2phase print( "converting velocity to an interferogram with one year temporal baseline" ) if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = '{}{}'.format( os.path.splitext(inps.file)[0], atr['FILE_TYPE']) elif k == 'timeseries': # date1 and date2 if '_' in inps.dset: date1, date2 = ptime.yyyymmdd(inps.dset.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(inps.dset) # read/prepare data data = readfile.read(inps.file, datasetName=date2)[0] data -= readfile.read(inps.file, datasetName=date1)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = '{}_{}.unw'.format(date1, date2) if inps.file.startswith('geo_'): inps.outfile = 'geo_' + inps.outfile elif k == 'HDFEOS': dname = inps.dset.split('-')[0] # date1 and date2 if dname == 'displacement': if '-' in inps.dset: suffix = inps.dset.split('-')[1] if '_' in suffix: date1, date2 = ptime.yyyymmdd(suffix.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(suffix) else: raise ValueError( "No '-' in input dataset! It is required for {}".format( dname)) else: date_list = HDFEOS(inps.file).get_date_list() date1 = date_list[0] date2 = date_list[-1] date12 = '{}_{}'.format(date1, date2) # read / prepare data slice_list = readfile.get_slice_list(inps.file) if 'displacement' in inps.dset: # read/prepare data slice_name1 = view.check_dataset_input( slice_list, '{}-{}'.format(dname, date1))[0][0] slice_name2 = view.check_dataset_input( slice_list, '{}-{}'.format(dname, date2))[0][0] data = readfile.read(inps.file, datasetName=slice_name1)[0] data -= readfile.read(inps.file, datasetName=slice_name2)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] else: slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0] data = readfile.read(inps.file, datasetName=slice_name)[0] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname == 'displacement': atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif 'coherence' in dname.lower(): atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'height': atr['FILE_TYPE'] = '.dem' atr['DATA_TYPE'] = 'int16' else: raise ValueError('unrecognized input dataset type: {}'.format( inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) elif k == 'ifgramStack': dname, date12 = inps.dset.split('-') date1, date2 = date12.split('_') # read / prepare data data = readfile.read(inps.file, datasetName=inps.dset)[0] if dname.startswith('unwrapPhase'): if 'REF_X' in atr.keys(): data -= data[int(atr['REF_Y']), int(atr['REF_X'])] print('consider reference pixel in y/x: ({}, {})'.format( atr['REF_Y'], atr['REF_X'])) else: print('No REF_Y/X found.') # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname.startswith('unwrapPhase'): atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif dname == 'coherence': atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'wrapPhase': atr['FILE_TYPE'] = '.int' atr['UNIT'] = 'radian' else: raise ValueError('unrecognized dataset type: {}'.format(inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) if inps.file.startswith('geo_'): inps.outfile = 'geo_' + inps.outfile else: # read data data = readfile.read(inps.file, datasetName=inps.dset)[0] if inps.outfile: fext = os.path.splitext(inps.outfile)[1] atr['FILE_TYPE'] = fext else: # metadata if 'coherence' in k.lower(): atr['FILE_TYPE'] = '.cor' elif k in ['mask']: atr['FILE_TYPE'] = '.msk' elif k in ['geometry'] and inps.dset == 'height': if 'Y_FIRST' in atr.keys(): atr['FILE_TYPE'] = '.dem' else: atr['FILE_TYPE'] = '.hgt' atr['UNIT'] = 'm' else: atr['FILE_TYPE'] = '.unw' inps.outfile = '{}{}'.format( os.path.splitext(inps.file)[0], atr['FILE_TYPE']) # get rid of starting . if output as hdf5 file if inps.outfile.endswith('.h5'): if atr['FILE_TYPE'].startswith('.'): atr['FILE_TYPE'] = atr['FILE_TYPE'][1:] atr['PROCESSOR'] = 'roipac' return data, atr, inps.outfile
def read_data(inps): # metadata atr = readfile.read_attribute(inps.file) if 'WAVELENGTH' in atr.keys(): range2phase = -4 * np.pi / float(atr['WAVELENGTH']) # change reference pixel if inps.ref_lalo: if 'Y_FIRST' in atr.keys(): coord = ut.coordinate(atr) ref_y, ref_x = coord.geo2radar(inps.ref_lalo[0], inps.ref_lalo[1])[0:2] inps.ref_yx = [ref_y, ref_x] else: raise ValueError( "input file is not geocoded --> reference point in lat/lon is NOT support" ) if inps.ref_yx: atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] if 'Y_FIRST' in atr.keys(): coord = ut.coordinate(atr) ref_lat, ref_lon = coord.radar2geo(inps.ref_yx[0], inps.ref_yx[1])[0:2] atr['REF_LAT'] = ref_lat atr['REF_LON'] = ref_lon print('change reference point to y/x: {}'.format(inps.ref_yx)) # various file types print('read {} from file {}'.format(inps.dset, inps.file)) k = atr['FILE_TYPE'] if k == 'velocity': # read/prepare data data = readfile.read(inps.file)[0] # velocity to displacement print('convert velocity to displacement for {}'.format(atr['DATE12'])) date1, date2 = atr['DATE12'].split('_') dt1, dt2 = ptime.date_list2vector([date1, date2])[0] data *= (dt2 - dt1).days / 365.25 # displacement to phase print('convert displacement to phase in radian') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = os.path.join(os.path.dirname(inps.file), '{}.unw'.format(atr['DATE12'])) elif k == 'timeseries': # date1 and date2 if '_' in inps.dset: date1, date2 = ptime.yyyymmdd(inps.dset.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(inps.dset) # read/prepare data data = readfile.read(inps.file, datasetName=date2)[0] data -= readfile.read(inps.file, datasetName=date1)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = '{}_{}.unw'.format(date1, date2) if inps.file.startswith('geo_'): inps.outfile = 'geo_' + inps.outfile elif k == 'HDFEOS': dname = inps.dset.split('-')[0] # date1 and date2 if dname == 'displacement': if '-' in inps.dset: suffix = inps.dset.split('-')[1] if '_' in suffix: date1, date2 = ptime.yyyymmdd(suffix.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(suffix) else: raise ValueError( "No '-' in input dataset! It is required for {}".format( dname)) else: date_list = HDFEOS(inps.file).get_date_list() date1 = date_list[0] date2 = date_list[-1] date12 = '{}_{}'.format(date1, date2) # read / prepare data slice_list = readfile.get_slice_list(inps.file) if 'displacement' in inps.dset: # read/prepare data slice_name1 = view.check_dataset_input( slice_list, '{}-{}'.format(dname, date1))[0][0] slice_name2 = view.check_dataset_input( slice_list, '{}-{}'.format(dname, date2))[0][0] data = readfile.read(inps.file, datasetName=slice_name1)[0] data -= readfile.read(inps.file, datasetName=slice_name2)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] else: slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0] data = readfile.read(inps.file, datasetName=slice_name)[0] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname == 'displacement': atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif 'coherence' in dname.lower(): atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'height': atr['FILE_TYPE'] = '.dem' atr['DATA_TYPE'] = 'int16' else: raise ValueError('unrecognized input dataset type: {}'.format( inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) elif k == 'ifgramStack': dname, date12 = inps.dset.split('-') date1, date2 = date12.split('_') # read / prepare data data = readfile.read(inps.file, datasetName=inps.dset)[0] if dname.startswith('unwrapPhase'): if 'REF_X' in atr.keys(): data -= data[int(atr['REF_Y']), int(atr['REF_X'])] print('consider reference pixel in y/x: ({}, {})'.format( atr['REF_Y'], atr['REF_X'])) else: print('No REF_Y/X found.') # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname.startswith('unwrapPhase'): atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif dname == 'coherence': atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'wrapPhase': atr['FILE_TYPE'] = '.int' atr['UNIT'] = 'radian' elif dname == 'connectComponent': atr['FILE_TYPE'] = '.conncomp' atr['UNIT'] = '1' atr['DATA_TYPE'] = 'byte' else: raise ValueError('unrecognized dataset type: {}'.format(inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) if inps.file.startswith('geo_'): inps.outfile = 'geo_' + inps.outfile else: # read data data = readfile.read(inps.file, datasetName=inps.dset)[0] if inps.outfile: fext = os.path.splitext(inps.outfile)[1] atr['FILE_TYPE'] = fext else: # metadata if 'coherence' in k.lower(): atr['FILE_TYPE'] = '.cor' elif k in ['mask']: atr['FILE_TYPE'] = '.msk' elif k in ['geometry'] and inps.dset == 'height': if 'Y_FIRST' in atr.keys(): atr['FILE_TYPE'] = '.dem' else: atr['FILE_TYPE'] = '.hgt' atr['UNIT'] = 'm' else: atr['FILE_TYPE'] = '.unw' inps.outfile = '{}{}'.format( os.path.splitext(inps.file)[0], atr['FILE_TYPE']) # mask if inps.mask_file: for m_file in inps.mask_file: print('mask data based on input file: {}'.format(m_file)) mask = readfile.read(m_file)[0] mask *= ~np.isnan(data) data[mask == 0] = np.nan # get rid of starting . if output as hdf5 file if inps.outfile.endswith('.h5'): if atr['FILE_TYPE'].startswith('.'): atr['FILE_TYPE'] = atr['FILE_TYPE'][1:] atr['PROCESSOR'] = 'roipac' return data, atr, inps.outfile
def layout_hdf5(fname, ds_name_dict=None, metadata=None, ds_unit_dict=None, ref_file=None, compression=None, print_msg=True): """Create HDF5 file with defined metadata and (empty) dataset structure Parameters: fname - str, HDF5 file path ds_name_dict - dict, dataset structure definition {dname : [dtype, dshape], dname : [dtype, dshape, None], dname : [dtype, dshape, 1/2/3/4D np.ndarray], #for aux data ... } metadata - dict, metadata ds_unit_dict - dict, dataset unit definition {dname : dunit, dname : dunit, ... } ref_file - str, reference file for the data structure compression - str, HDF5 compression type Returns: fname - str, HDF5 file path Example: layout_hdf5('timeseries_ERA5.h5', ref_file='timeseries.h5') layout_hdf5('timeseries_ERA5.5h', ds_name_dict, metadata) # structure for ifgramStack ds_name_dict = { "date" : [np.dtype('S8'), (num_ifgram, 2)], "dropIfgram" : [np.bool_, (num_ifgram,)], "bperp" : [np.float32, (num_ifgram,)], "unwrapPhase" : [np.float32, (num_ifgram, length, width)], "coherence" : [np.float32, (num_ifgram, length, width)], "connectComponent" : [np.int16, (num_ifgram, length, width)], } # structure for geometry ds_name_dict = { "height" : [np.float32, (length, width), None], "incidenceAngle" : [np.float32, (length, width), None], "slantRangeDistance" : [np.float32, (length, width), None], } # structure for timeseries dates = np.array(date_list, np.string_) ds_name_dict = { "date" : [np.dtype("S8"), (num_date,), dates], "bperp" : [np.float32, (num_date,), pbase], "timeseries" : [np.float32, (num_date, length, width)], } """ vprint = print if print_msg else lambda *args, **kwargs: None vprint('-'*50) # get meta from metadata and ref_file if metadata: meta = {key: value for key, value in metadata.items()} elif ref_file: with h5py.File(ref_file, 'r') as fr: meta = {key: value for key, value in fr.attrs.items()} vprint('grab metadata from ref_file: {}'.format(ref_file)) else: raise ValueError('No metadata or ref_file found.') # check ds_name_dict if ds_name_dict is None: if not ref_file or not os.path.isfile(ref_file): raise FileNotFoundError('No ds_name_dict or ref_file found!') else: vprint('grab dataset structure from ref_file: {}'.format(ref_file)) ds_name_dict = {} fext = os.path.splitext(ref_file)[1] shape2d = (int(meta['LENGTH']), int(meta['WIDTH'])) if fext in ['.h5', '.he5']: # copy dset structure from HDF5 file with h5py.File(ref_file, 'r') as fr: # in case output mat size is different from the input ref file mat size shape2d_orig = (int(fr.attrs['LENGTH']), int(fr.attrs['WIDTH'])) for key in fr.keys(): ds = fr[key] if isinstance(ds, h5py.Dataset): # auxliary dataset if ds.shape[-2:] != shape2d_orig: ds_name_dict[key] = [ds.dtype, ds.shape, ds[:]] # dataset else: ds_shape = list(ds.shape) ds_shape[-2:] = shape2d ds_name_dict[key] = [ds.dtype, tuple(ds_shape), None] else: # construct dset structure from binary file ds_names = readfile.get_slice_list(ref_file) ds_dtype = meta['DATA_TYPE'] for ds_name in ds_names: ds_name_dict[ds_name] = [ds_dtype, tuple(shape2d), None] # directory fdir = os.path.dirname(os.path.abspath(fname)) if not os.path.isdir(fdir): os.makedirs(fdir) vprint('crerate directory: {}'.format(fdir)) # create file with h5py.File(fname, "w") as f: vprint('create HDF5 file: {} with w mode'.format(fname)) # initiate dataset max_digit = max([len(i) for i in ds_name_dict.keys()]) for key in ds_name_dict.keys(): data_type = ds_name_dict[key][0] data_shape = ds_name_dict[key][1] # turn ON compression for conn comp ds_comp = compression if key in ['connectComponent']: ds_comp = 'lzf' # changable dataset shape if len(data_shape) == 3: max_shape = (None, data_shape[1], data_shape[2]) else: max_shape = data_shape # create empty dataset vprint(("create dataset : {d:<{w}} of {t:<25} in size of {s:<20} with " "compression = {c}").format(d=key, w=max_digit, t=str(data_type), s=str(data_shape), c=ds_comp)) ds = f.create_dataset(key, shape=data_shape, maxshape=max_shape, dtype=data_type, chunks=True, compression=ds_comp) # write auxliary data if len(ds_name_dict[key]) > 2 and ds_name_dict[key][2] is not None: ds[:] = np.array(ds_name_dict[key][2]) # write attributes in root level for key, value in meta.items(): f.attrs[key] = str(value) # write attributes in dataset level if ds_unit_dict is not None: for key, value in ds_unit_dict.items(): if value is not None: f[key].attrs['UNIT'] = value vprint(f'add /{key:<{max_digit}} attribute: UNIT = {value}') vprint('close HDF5 file: {}'.format(fname)) return fname
def extract_data(inps, dataset, outdir): """extract data from HDFEOS file based on the given attribute""" # read HDFEOS file # metadata atr = readfile.read_attribute("".join(inps.input_HDFEOS)) attr = "".join(inps.attribute) # read 2d data if attr == 'displacement' or attr == 'velocity': if inps.date == None: date1 = atr['START_DATE'] date2 = atr['END_DATE'] #raise Exception("ERROR! Date for displacement must be given!") else: # date1 and date2 if '_' in "".join(inps.date): date1, date2 = ptime.yyyymmdd("".join(inps.date).split('_')) else: date1 = atr['START_DATE'] date2 = ptime.yyyymmdd("".join(inps.date)) #date12 = '{}_{}'.format(date1, date2) if attr == 'displacement': # read / prepare data slice_list = readfile.get_slice_list("".join(inps.input_HDFEOS)) # read/prepare data dname = 'displacement' slice_name1 = view.search_dataset_input( slice_list, '{}-{}'.format(dname, date1))[0][0] slice_name2 = view.search_dataset_input( slice_list, '{}-{}'.format(dname, date2))[0][1] data = readfile.read("".join(inps.input_HDFEOS), datasetName=slice_name2)[0] data -= readfile.read("".join(inps.input_HDFEOS), datasetName=slice_name1)[0] data_name = '{}_{}_{}'.format(attr, date1, date2) if inps.mask: print("mask file") maskfile = readfile.read( "".join(inps.input_HDFEOS), datasetName='/HDFEOS/GRIDS/timeseries/quality/mask')[0] data[maskfile == 0] = np.nan outfile = outdir + '/' + data_name + '.h5' atr['FILE_TYPE'] = '.unw' writefile.write(data, out_file=outfile, metadata=atr) #print('converting range to phase') #data *= range2phase #if inps.ref_yx: # data -= data[inps.ref_yx[0], inps.ref_yx[1]] elif attr == 'velocity': maskfile = readfile.read( "".join(inps.input_HDFEOS), datasetName='/HDFEOS/GRIDS/timeseries/quality/mask')[0] dname = 'displacement' data_timeseries = readfile.read("".join(inps.input_HDFEOS), datasetName=dataset + dname)[0] if inps.mask: print("mask file") data_timeseries[:, maskfile == 0] = np.nan bperp_date = h5py.File("".join(inps.input_HDFEOS), 'r') data_bperp = bperp_date[(dataset + 'bperp')] data_date = bperp_date[(dataset + 'date')] ts_data_name = 'timeseries_ERA_demErr.h5' outfile = outdir + '/' + ts_data_name # write to HDF5 file dsDict = dict() dsDict['bperp'] = data_bperp dsDict['date'] = data_date dsDict['timeseries'] = data_timeseries atr['FILE_TYPE'] = attr writefile.write(dsDict, out_file=outfile, metadata=atr) # run timeseries2velocity.py output_vel = 'velocity_' + date1 + '_' + date2 + '.h5' data_name = output_vel.split('.')[0] os.chdir(outdir) completion_status = os.system( multitrack_utilities.seperate_str_byspace([ 'timeseries2velocity.py', outfile, '--start-date', date1, '--end-date', date2, '--exclude', '../exclude_date.txt', '--output', output_vel ])) if completion_status == 1: raise Exception('error when generate velocity!') data_vel, vel_atr = readfile.read(output_vel, datasetName='velocity') data = data_vel if inps.mask: print("mask file") data[~maskfile] = np.nan writefile.write(data, out_file=output_vel, metadata=vel_atr) os.chdir('../') else: data = readfile.read("".join(inps.input_HDFEOS), datasetName=dataset + attr)[0] outfile = outdir + '/' + attr + '.h5' data_name = attr atr['FILE_TYPE'] = attr if inps.mask: print("mask file") maskfile = readfile.read( "".join(inps.input_HDFEOS), datasetName='/HDFEOS/GRIDS/timeseries/quality/mask')[0] data[maskfile == 0] = np.nan writefile.write(data, out_file=outfile, metadata=atr) # mask data #if inps.mask: # print("mask file") # maskfile = readfile.read("".join(inps.input_HDFEOS),datasetName='/HDFEOS/GRIDS/timeseries/quality/mask')[0] # data[maskfile == 0] = np.nan return data, data_name, atr
def read_data(inps): # metadata atr = readfile.read_attribute(inps.file) range2phase = -4 * np.pi / float(atr['WAVELENGTH']) # change reference pixel if inps.ref_yx: atr['REF_Y'] = inps.ref_yx[0] atr['REF_X'] = inps.ref_yx[1] print('change reference point to y/x: {}'.format(inps.ref_yx)) # various file types print('read {} from file {}'.format(inps.dset, inps.file)) k = atr['FILE_TYPE'] if k == 'velocity': # read/prepare data data = readfile.read(inps.file)[0] * range2phase print("converting velocity to an interferogram with one year temporal baseline") if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = '{}{}'.format(os.path.splitext(inps.file)[0], atr['FILE_TYPE']) elif k == 'timeseries': # date1 and date2 if '_' in inps.dset: date1, date2 = ptime.yyyymmdd(inps.dset.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(inps.dset) # read/prepare data data = readfile.read(inps.file, datasetName=date2)[0] data -= readfile.read(inps.file, datasetName=date1)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' # output filename if not inps.outfile: inps.outfile = '{}_{}.unw'.format(date1, date2) if inps.file.startswith('geo_'): inps.outfile = 'geo_'+inps.outfile elif k == 'HDFEOS': dname = inps.dset.split('-')[0] # date1 and date2 if dname == 'displacement': if '-' in inps.dset: suffix = inps.dset.split('-')[1] if '_' in suffix: date1, date2 = ptime.yyyymmdd(suffix.split('_')) else: date1 = atr['REF_DATE'] date2 = ptime.yyyymmdd(suffix) else: raise ValueError("No '-' in input dataset! It is required for {}".format(dname)) else: date_list = HDFEOS(inps.file).get_date_list() date1 = date_list[0] date2 = date_list[-1] date12 = '{}_{}'.format(date1, date2) # read / prepare data slice_list = readfile.get_slice_list(inps.file) if 'displacement' in inps.dset: # read/prepare data slice_name1 = view.check_dataset_input(slice_list, '{}-{}'.format(dname, date1))[0][0] slice_name2 = view.check_dataset_input(slice_list, '{}-{}'.format(dname, date2))[0][0] data = readfile.read(inps.file, datasetName=slice_name1)[0] data -= readfile.read(inps.file, datasetName=slice_name2)[0] print('converting range to phase') data *= range2phase if inps.ref_yx: data -= data[inps.ref_yx[0], inps.ref_yx[1]] else: slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0] data = readfile.read(inps.file, datasetName=slice_name)[0] # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname == 'displacement': atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif 'coherence' in dname.lower(): atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'height': atr['FILE_TYPE'] = '.dem' atr['DATA_TYPE'] = 'int16' else: raise ValueError('unrecognized input dataset type: {}'.format(inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) elif k == 'ifgramStack': dname, date12 = inps.dset.split('-') date1, date2 = date12.split('_') # read / prepare data data = readfile.read(inps.file, datasetName=inps.dset)[0] if dname.startswith('unwrapPhase'): if 'REF_X' in atr.keys(): data -= data[int(atr['REF_Y']), int(atr['REF_X'])] print('consider reference pixel in y/x: ({}, {})'.format(atr['REF_Y'], atr['REF_X'])) else: print('No REF_Y/X found.') # metadata atr['DATE'] = date1[2:8] atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8]) if dname.startswith('unwrapPhase'): atr['FILE_TYPE'] = '.unw' atr['UNIT'] = 'radian' elif dname == 'coherence': atr['FILE_TYPE'] = '.cor' atr['UNIT'] = '1' elif dname == 'wrapPhase': atr['FILE_TYPE'] = '.int' atr['UNIT'] = 'radian' else: raise ValueError('unrecognized dataset type: {}'.format(inps.dset)) # output filename if not inps.outfile: inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE']) if inps.file.startswith('geo_'): inps.outfile = 'geo_'+inps.outfile else: # read data data = readfile.read(inps.file, datasetName=inps.dset)[0] # metadata if 'coherence' in k.lower(): atr['FILE_TYPE'] = '.cor' elif k in ['mask']: atr['FILE_TYPE'] = '.msk' atr['DATA_TYPE'] = 'byte' elif k in ['geometry'] and inps.dset == 'height': if 'Y_FIRST' in atr.keys(): atr['FILE_TYPE'] = '.dem' atr['DATA_TYPE'] = 'int16' else: atr['FILE_TYPE'] = '.hgt' atr['UNIT'] = 'm' else: atr['FILE_TYPE'] = '.unw' # output filename if not inps.outfile: inps.outfile = '{}{}'.format(os.path.splitext(inps.file)[0], atr['FILE_TYPE']) atr['PROCESSOR'] = 'roipac' return data, atr, inps.outfile
def main(argv): inps = cmdLineParse() ifgram = inps.ifgram_file Bperp = readfile.read(ifgram, datasetName='bperp')[0] Date = readfile.read(ifgram, datasetName='date')[0] DropIfgram = readfile.read(ifgram, datasetName='dropIfgram')[0] #invRes = inps.inversion_res mask_file = inps.mask_file mask = readfile.read(mask_file, datasetName='mask')[0] meta = readfile.read_attribute(ifgram, datasetName=None) REF_X = int(meta['REF_X']) REF_Y = int(meta['REF_Y']) if inps.out_file: OUT = inps.out_file else: OUT = 'ifgramStackCor.h5' sliceList = readfile.get_slice_list(ifgram) N_list =len(sliceList) g_list = [] for i in range(N_list): if 'unwrapPhase-' in sliceList[i]: g_list.append(sliceList[i]) N_list = len(g_list) Ifg = [] print('Start to calculate the integer ambugity for each closure') for i in range(N_list): print_progress(i+1, N_list, prefix='Data: ', suffix=g_list[i]) dset = g_list[i] ifgram1 = readfile.read(ifgram, datasetName=dset)[0] ifgram0 = ifgram1 - ifgram1[REF_Y,REF_X] Ifg.append(np.mean(ifgram0[mask==1])) stack_obj = ifgramStack(ifgram) stack_obj.open() date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) date12_list = stack_obj.get_date12_list(dropIfgram=True) num_ifgram = len(date12_list) C0 = ifgramStack.get_design_matrix4triplet(date12_list) #Ifg = np.asarray(Ifg);Ifg=Ifg.reshape(len(Ifg),) #ResC = np.dot(C0,Ifg) #good_pair = get_good_pair_closure(C,ResC) #good_pair = list(good_pair) #bad_pair = get_bad_pair(good_pair,N_list) #print('Bad interferograms: ' + str(bad_pair)) #Ifg_org = [] #for k in bad_pair: # Ifg_org.append(Ifg[k]) #Ifg_org = np.asarray(Ifg_org) #Ifg_org.reshape(len(bad_pair),) #A,L = get_design_matrix_unwrap_error(C,bad_pair,Ifg) #Ifg_est, var_ts = estimate_timeseries(A, -L, weight_sqrt=None, rcond=1e-5) #Ifg_est = Ifg_est.reshape(len(Ifg_est),) #kk = np.round((Ifg_est - Ifg_org)/(2*np.pi)) #print('Cycle shift of the bad interferogram: ' + str(kk)) C = matrix(ifgramStack.get_design_matrix4triplet(date12_list).astype(float)) ResC = np.dot(C,Ifg) L = matrix(np.round(ResC/(2*np.pi))) U = l1regls(-C, L, alpha=1e-2, show_progress=0) kk = np.round(U) num_row = stack_obj.length num_col = stack_obj.width box = [0,0,stack_obj.width,stack_obj.length] ref_phase = stack_obj.get_reference_phase(unwDatasetName='unwrapPhase', skip_reference=None, dropIfgram=True) pha_data = read_unwrap_phase(stack_obj,box,ref_phase,unwDatasetName='unwrapPhase',dropIfgram=True) data = pha_data.reshape(N_list,num_row,num_col) data0 = data print('Start to correct and write new unwrapIfg file ...') for i in range(N_list): ifgram1 = data[i,:,:] ifgram0 = ifgram1 - ifgram1[REF_Y,REF_X] #print(ifgram0.shape) #ifgram0 = ifgram0 + mask*(kk*(2*np.pi)) ifgram0[mask==1] = ifgram0[mask==1] + kk[i]*(2*np.pi) data0[i,:,:] = ifgram0 datasetDict = dict() datasetDict['unwrapPhase'] = data0 datasetDict['IntAmbiguity'] = kk datasetDict['bperp'] = Bperp datasetDict['date'] = Date datasetDict['dropIfgram'] = DropIfgram datasetDict['C'] = C0 write_variogram_h5(datasetDict, OUT, metadata=meta, ref_file=ifgram, compression=None) sys.exit(1)
def read_ts_std(fname, box=None, datasetName=None, print_msg=True, xstep=1, ystep=1, data_type=None): """Read one dataset and its attributes from input file. Parameters: fname : str, path of file to read datasetName : str or list of str, slice names box : 4-tuple of int area to read, defined in (x0, y0, x1, y1) in pixel coordinate x/ystep : int, number of pixels to pick/multilook for each output pixel data_type : numpy data type, e.g. np.float32, np.bool_, etc. Returns: data : 2/3/4D matrix in numpy.array format, return None if failed atr : dictionary, attributes of data, return None if failed """ # metadata dsname4atr = None # used to determine UNIT if isinstance(datasetName, list): dsname4atr = datasetName[0].split('-')[0] elif isinstance(datasetName, str): dsname4atr = datasetName.split('-')[0] atr = readfile.read_attribute(fname, datasetName=dsname4atr) # box length, width = int(atr['LENGTH']), int(atr['WIDTH']) if not box: box = (0, 0, width, length) slice_list = readfile.get_slice_list(fname) ds_list = [] for i in [i.split('-')[0] for i in slice_list]: if i not in ds_list: ds_list.append(i) ds_2d_list = [i for i in slice_list if '-' not in i] ds_3d_list = [i for i in ds_list if i not in ds_2d_list] # Input Argument: convert input datasetName into list of slice if not datasetName: datasetName = [ds_list[0]] elif isinstance(datasetName, str): datasetName = [datasetName] # if datasetName is all date info, add dsFamily as prefix # a) if all digit, e.g. YYYYMMDD # b) if in isoformat(), YYYY-MM-DDTHH:MM, etc. if all(x.isdigit() or x[:4].isdigit() for x in datasetName): datasetName = ['{}-{}'.format(ds_3d_list[0], x) for x in datasetName] # Input Argument: decompose slice list into dsFamily and inputDateList dsFamily = datasetName[0].split('-')[0] inputDateList = [x.replace(dsFamily, '') for x in datasetName] inputDateList = [x[1:] for x in inputDateList if x.startswith('-')] with h5py.File(fname, 'r') as f: indx = np.where(f['date'][:] == atr['REF_DATE'].encode('UTF-8'))[0] # get dataset object dsNames = [i for i in [datasetName[0], dsFamily] if i in f.keys()] # support for old mintpy-v0.x files dsNamesOld = [ i for i in slice_list if '/{}'.format(datasetName[0]) in i ] if len(dsNames) > 0: ds = f[dsNames[0]] elif len(dsNamesOld) > 0: ds = f[dsNamesOld[0]] else: raise ValueError('input dataset {} not found in file {}'.format( datasetName, fname)) # output size for >=2D dataset if x/ystep > 1 xsize = int((box[2] - box[0]) / xstep) ysize = int((box[3] - box[1]) / ystep) if ds.ndim == 4: num1, num2 = ds.shape[0], ds.shape[1] shape = (num1, ysize, xsize) if print_msg: ram_size = num1 * ysize * xsize * ds.dtype.itemsize / 1024**3 print( f'initiate a 3D matrix in size of {shape} in {ds.dtype} in the memory ({ram_size:.1f} GB) ...' ) data = np.zeros(shape, ds.dtype) * np.nan # loop over the 1st dimension [for more verbose print out msg] for i in range(num1): if print_msg: sys.stdout.write('\r' + f'reading 3D cubes {i + 1}/{num1}...') sys.stdout.flush() d3 = ds[i, i, box[1]:box[3], box[0]:box[2]] # sampling / nearest interpolation in y/xstep if xstep * ystep > 1: d3 = d3[int(ystep / 2)::ystep, int(xstep / 2)::xstep] data[i, :, :] = d3[:ysize, :xsize] if print_msg: print('') data = np.sqrt(data) elif ds.ndim == 3: # define flag matrix for index in time domain slice_flag = np.zeros((ds.shape[0]), dtype=np.bool_) if not inputDateList or inputDateList == ['']: slice_flag[:] = True else: date_list = [ i.split('-', 1)[1] for i in [j for j in slice_list if j.startswith(dsFamily)] ] for d in inputDateList: slice_flag[date_list.index(d)] = True # read data if xstep * ystep == 1: data = ds[slice_flag, box[1]:box[3], box[0]:box[2]] else: # sampling / nearest interplation in y/xstep # use for loop to save memory num_slice = np.sum(slice_flag) data = np.zeros((num_slice, ysize, xsize), ds.dtype) inds = np.where(slice_flag)[0] for i in range(num_slice): # print out msg if print_msg: sys.stdout.write( '\r' + f'reading 2D slices {i+1}/{num_slice}...') sys.stdout.flush() # read and index d2 = ds[inds[i], box[1]:box[3], box[0]:box[2]] d2 = d2[int(ystep / 2)::ystep, int(xstep / 2)::xstep] data[i, :, :] = d2[:ysize, :xsize] if print_msg: print('') if any(i == 1 for i in data.shape): data = np.squeeze(data) return data, atr