def prepare_metadata(meta_file, geom_src_dir, box=None): print('-'*50) # extract metadata from ISCE to MintPy (ROIPAC) format meta = isce_utils.extract_isce_metadata(meta_file, update_mode=False)[0] if 'Y_FIRST' in meta.keys(): geom_ext = '.geo.full' else: geom_ext = '.rdr.full' # add LAT/LON_REF1/2/3/4, HEADING, A/RLOOKS meta = isce_utils.extract_geometry_metadata(geom_src_dir, metadata=meta, box=box, fext_list=[geom_ext]) # add LENGTH / WIDTH atr = readfile.read_attribute(os.path.join(geom_src_dir, 'lat{}'.format(geom_ext))) meta['LENGTH'] = atr['LENGTH'] meta['WIDTH'] = atr['WIDTH'] ## update metadata due to subset print('update metadata due to subset with bounding box') meta = ut.subset_attribute(meta, box) return meta
def write2hdf5(self, outputFile='ifgramStack.h5', access_mode='w', box=None, compression=None, extra_metadata=None): '''Save/write an ifgramStackDict object into an HDF5 file with the structure defined in: https://mintpy.readthedocs.io/en/latest/api/data_structure/#ifgramstack Parameters: outputFile : str, Name of the HDF5 file for the InSAR stack access_mode : str, access mode of output File, e.g. w, r+ box : tuple, subset range in (x0, y0, x1, y1) extra_metadata : dict, extra metadata to be added into output file Returns: outputFile ''' self.outputFile = outputFile f = h5py.File(self.outputFile, access_mode) print('create HDF5 file {} with {} mode'.format( self.outputFile, access_mode)) self.pairs = sorted([pair for pair in self.pairsDict.keys()]) self.dsNames = list(self.pairsDict[self.pairs[0]].datasetDict.keys()) self.dsNames = [i for i in ifgramDatasetNames if i in self.dsNames] maxDigit = max([len(i) for i in self.dsNames]) self.get_size(box) self.bperp = np.zeros(self.numIfgram) ############################### # 3D datasets containing unwrapPhase, coherence, connectComponent, wrapPhase, etc. for dsName in self.dsNames: dsShape = (self.numIfgram, self.length, self.width) dsDataType = np.float32 dsCompression = compression if dsName in ['connectComponent']: dsDataType = np.int16 dsCompression = 'lzf' print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=dsCompression)) ds = f.create_dataset(dsName, shape=dsShape, maxshape=(None, dsShape[1], dsShape[2]), dtype=dsDataType, chunks=True, compression=dsCompression) prog_bar = ptime.progressBar(maxValue=self.numIfgram) for i in range(self.numIfgram): ifgramObj = self.pairsDict[self.pairs[i]] data = ifgramObj.read(dsName, box=box)[0] ds[i, :, :] = data self.bperp[i] = ifgramObj.get_perp_baseline( family=self.dsName0) prog_bar.update(i + 1, suffix='{}_{}'.format(self.pairs[i][0], self.pairs[i][1])) prog_bar.close() ds.attrs['MODIFICATION_TIME'] = str(time.time()) ############################### # 2D dataset containing reference and secondary dates of all pairs dsName = 'date' dsDataType = np.string_ dsShape = (self.numIfgram, 2) print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format( d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape)) data = np.array(self.pairs, dtype=dsDataType) f.create_dataset(dsName, data=data) ############################### # 1D dataset containing perpendicular baseline of all pairs dsName = 'bperp' dsDataType = np.float32 dsShape = (self.numIfgram, ) print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format( d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape)) data = np.array(self.bperp, dtype=dsDataType) f.create_dataset(dsName, data=data) ############################### # 1D dataset containing bool value of dropping the interferograms or not dsName = 'dropIfgram' dsDataType = np.bool_ dsShape = (self.numIfgram, ) print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format( d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape)) data = np.ones(dsShape, dtype=dsDataType) f.create_dataset(dsName, data=data) ############################### # Attributes self.get_metadata() if extra_metadata: self.metadata.update(extra_metadata) print('add extra metadata: {}'.format(extra_metadata)) self.metadata = ut.subset_attribute(self.metadata, box) self.metadata['FILE_TYPE'] = self.name for key, value in self.metadata.items(): f.attrs[key] = value f.close() print('Finished writing to {}'.format(self.outputFile)) return self.outputFile
def write2hdf5(self, outputFile='geometryRadar.h5', access_mode='w', box=None, compression='lzf', extra_metadata=None): ''' Save/write to HDF5 file with structure defined in: https://mintpy.readthedocs.io/en/latest/api/data_structure/#geometry ''' if len(self.datasetDict) == 0: print( 'No dataset file path in the object, skip HDF5 file writing.') return None self.outputFile = outputFile f = h5py.File(self.outputFile, access_mode) print('create HDF5 file {} with {} mode'.format( self.outputFile, access_mode)) #groupName = self.name #group = f.create_group(groupName) #print('create group /{}'.format(groupName)) maxDigit = max([len(i) for i in geometryDatasetNames]) length, width = self.get_size(box=box) self.length, self.width = self.get_size() ############################### for dsName in self.dsNames: # 3D datasets containing bperp if dsName == 'bperp': self.dateList = list(self.datasetDict[dsName].keys()) dsDataType = np.float32 self.numDate = len(self.dateList) dsShape = (self.numDate, length, width) ds = f.create_dataset(dsName, shape=dsShape, maxshape=(None, dsShape[1], dsShape[2]), dtype=dsDataType, chunks=True, compression=compression) print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=str(compression))) print( 'read coarse grid baseline files and linear interpolate into full resolution ...' ) prog_bar = ptime.progressBar(maxValue=self.numDate) for i in range(self.numDate): fname = self.datasetDict[dsName][self.dateList[i]] data = read_isce_bperp_file(fname=fname, out_shape=(self.length, self.width), box=box) ds[i, :, :] = data prog_bar.update(i + 1, suffix=self.dateList[i]) prog_bar.close() # Write 1D dataset date dsName = 'date' dsShape = (self.numDate, ) dsDataType = np.string_ print(('create dataset /{d:<{w}} of {t:<25}' ' in size of {s}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape)) data = np.array(self.dateList, dtype=dsDataType) ds = f.create_dataset(dsName, data=data) # 2D datasets containing height, latitude, incidenceAngle, shadowMask, etc. else: dsDataType = np.float32 if dsName.lower().endswith('mask'): dsDataType = np.bool_ dsShape = (length, width) print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=str(compression))) # read data = np.array(self.read(family=dsName, box=box)[0], dtype=dsDataType) # water body: -1 for water and 0 for land # water mask: 0 for water and 1 for land fname = os.path.basename(self.datasetDict[dsName]) if fname.startswith('waterBody') or fname.endswith('.wbd'): data = data > -0.5 print(( ' input file "{}" is water body (-1/0 for water/land), ' 'convert to water mask (0/1 for water/land).'.format( fname))) # write ds = f.create_dataset(dsName, data=data, chunks=True, compression=compression) ############################### # Generate Dataset if not existed in binary file: incidenceAngle, slantRangeDistance for dsName in [ i for i in ['incidenceAngle', 'slantRangeDistance'] if i not in self.dsNames ]: # Calculate data data = None if dsName == 'incidenceAngle': data = self.get_incidence_angle(box=box) elif dsName == 'slantRangeDistance': data = self.get_slant_range_distance(box=box) # Write dataset if data is not None: dsShape = data.shape dsDataType = np.float32 print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=str(compression))) ds = f.create_dataset(dsName, data=data, dtype=dsDataType, chunks=True, compression=compression) ############################### # Attributes self.get_metadata() if extra_metadata: self.metadata.update(extra_metadata) print('add extra metadata: {}'.format(extra_metadata)) self.metadata = ut.subset_attribute(self.metadata, box) self.metadata['FILE_TYPE'] = self.name for key, value in self.metadata.items(): f.attrs[key] = value f.close() print('Finished writing to {}'.format(self.outputFile)) return self.outputFile
def write2hdf5(self, outputFile='geometryRadar.h5', access_mode='w', box=None, compression='lzf', extra_metadata=None): ''' / Root level Attributes Dictionary for metadata. 'X/Y_FIRST/STEP' attribute for geocoded. /height 2D array of float32 in size of (l, w ) in meter. /latitude (azimuthCoord) 2D array of float32 in size of (l, w ) in degree. /longitude (rangeCoord) 2D array of float32 in size of (l, w ) in degree. /incidenceAngle 2D array of float32 in size of (l, w ) in degree. /slantRangeDistance 2D array of float32 in size of (l, w ) in meter. /azimuthAngle 2D array of float32 in size of (l, w ) in degree. (optional) /shadowMask 2D array of bool in size of (l, w ). (optional) /waterMask 2D array of bool in size of (l, w ). (optional) /bperp 3D array of float32 in size of (n, l, w) in meter (optional) /date 1D array of string in size of (n, ) in YYYYMMDD(optional) ... ''' if len(self.datasetDict) == 0: print('No dataset file path in the object, skip HDF5 file writing.') return None self.outputFile = outputFile f = h5py.File(self.outputFile, access_mode) print('create HDF5 file {} with {} mode'.format(self.outputFile, access_mode)) #groupName = self.name #group = f.create_group(groupName) #print('create group /{}'.format(groupName)) maxDigit = max([len(i) for i in geometryDatasetNames]) length, width = self.get_size(box=box) self.length, self.width = self.get_size() ############################### for dsName in self.dsNames: # 3D datasets containing bperp if dsName == 'bperp': self.dateList = list(self.datasetDict[dsName].keys()) dsDataType = dataType self.numDate = len(self.dateList) dsShape = (self.numDate, length, width) ds = f.create_dataset(dsName, shape=dsShape, maxshape=(None, dsShape[1], dsShape[2]), dtype=dsDataType, chunks=True, compression=compression) print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=str(compression))) print('read coarse grid baseline files and linear interpolate into full resolution ...') prog_bar = ptime.progressBar(maxValue=self.numDate) for i in range(self.numDate): fname = self.datasetDict[dsName][self.dateList[i]] data = read_isce_bperp_file(fname=fname, out_shape=(self.length, self.width), box=box) ds[i, :, :] = data prog_bar.update(i+1, suffix=self.dateList[i]) prog_bar.close() # Write 1D dataset date dsName = 'date' dsShape = (self.numDate,) dsDataType = np.string_ print(('create dataset /{d:<{w}} of {t:<25}' ' in size of {s}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape)) data = np.array(self.dateList, dtype=dsDataType) ds = f.create_dataset(dsName, data=data) # 2D datasets containing height, latitude, incidenceAngle, shadowMask, etc. else: dsDataType = dataType if dsName.lower().endswith('mask'): dsDataType = np.bool_ dsShape = (length, width) print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=str(compression))) data = np.array(self.read(family=dsName, box=box)[0], dtype=dsDataType) ds = f.create_dataset(dsName, data=data, chunks=True, compression=compression) ############################### # Generate Dataset if not existed in binary file: incidenceAngle, slantRangeDistance for dsName in [i for i in ['incidenceAngle', 'slantRangeDistance'] if i not in self.dsNames]: # Calculate data data = None if dsName == 'incidenceAngle': data = self.get_incidence_angle(box=box) elif dsName == 'slantRangeDistance': data = self.get_slant_range_distance(box=box) # Write dataset if data is not None: dsShape = data.shape dsDataType = dataType print(('create dataset /{d:<{w}} of {t:<25} in size of {s}' ' with compression = {c}').format(d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape, c=str(compression))) ds = f.create_dataset(dsName, data=data, dtype=dataType, chunks=True, compression=compression) ############################### # Attributes self.get_metadata() if extra_metadata: self.metadata.update(extra_metadata) print('add extra metadata: {}'.format(extra_metadata)) self.metadata = ut.subset_attribute(self.metadata, box) self.metadata['FILE_TYPE'] = self.name for key, value in self.metadata.items(): f.attrs[key] = value f.close() print('Finished writing to {}'.format(self.outputFile)) return self.outputFile