コード例 #1
0
ファイル: prep_fringe.py プロジェクト: scottstanie/MintPy
def prepare_metadata(meta_file, geom_src_dir, box=None):
    print('-' * 50)

    # extract metadata from ISCE to MintPy (ROIPAC) format
    meta = isce_utils.extract_isce_metadata(meta_file, update_mode=False)[0]

    if 'Y_FIRST' in meta.keys():
        geom_ext = '.geo.full'
    else:
        geom_ext = '.rdr.full'

    # add LAT/LON_REF1/2/3/4, HEADING, A/RLOOKS
    meta = isce_utils.extract_geometry_metadata(geom_src_dir,
                                                meta=meta,
                                                box=box,
                                                fext_list=[geom_ext])

    # add LENGTH / WIDTH
    atr = readfile.read_attribute(
        os.path.join(geom_src_dir, 'lat{}'.format(geom_ext)))
    meta['LENGTH'] = atr['LENGTH']
    meta['WIDTH'] = atr['WIDTH']

    ## update metadata due to subset
    print('update metadata due to subset with bounding box')
    meta = attr.update_attribute4subset(meta, box)

    return meta
コード例 #2
0
def prepare_metadata(meta_file, geom_src_dir, box=None, nlks_x=1, nlks_y=1):
    print('-' * 50)

    # extract metadata from ISCE to MintPy (ROIPAC) format
    meta = isce_utils.extract_isce_metadata(meta_file, update_mode=False)[0]

    if 'Y_FIRST' in meta.keys():
        geom_ext = '.geo.full'
    else:
        geom_ext = '.rdr.full'

    # add LAT/LON_REF1/2/3/4, HEADING, A/RLOOKS
    meta = isce_utils.extract_geometry_metadata(geom_src_dir,
                                                meta=meta,
                                                box=box,
                                                fext_list=[geom_ext])

    # add LENGTH / WIDTH
    atr = readfile.read_attribute(
        os.path.join(geom_src_dir, 'lat{}'.format(geom_ext)))
    meta['LENGTH'] = atr['LENGTH']
    meta['WIDTH'] = atr['WIDTH']

    ## update metadata due to subset
    print('update metadata due to subset with bounding box')
    meta = attr.update_attribute4subset(meta, box)

    # apply optional user multilooking
    if nlks_x > 1:
        meta['RANGE_PIXEL_SIZE'] = str(
            float(meta['RANGE_PIXEL_SIZE']) * nlks_x)
        meta['RLOOKS'] = str(float(meta['RLOOKS']) * nlks_x)

    if nlks_y > 1:
        meta['AZIMUTH_PIXEL_SIZE'] = str(
            float(meta['AZIMUTH_PIXEL_SIZE']) * nlks_y)
        meta['ALOOKS'] = str(float(meta['ALOOKS']) * nlks_y)

    return meta
コード例 #3
0
def read_subset_box(template_file, meta):
    """Read subset info from template file

    Parameters: template_file - str, path of template file
                meta          - dict, metadata
    Returns:    pix_box       - tuple of 4 int in (x0, y0, x1, y1)
                meta          - dict, metadata
    """

    if template_file and os.path.isfile(template_file):

        # read subset info from template file
        pix_box, geo_box = read_subset_template2box(template_file)

        # geo_box --> pix_box
        if geo_box is not None:
            coord = ut.coordinate(meta)
            pix_box = coord.bbox_geo2radar(geo_box)
            pix_box = coord.check_box_within_data_coverage(pix_box)
            print('input bounding box in lalo: {}'.format(geo_box))

    else:
        pix_box = None

    if pix_box is not None:
        # update metadata against the new bounding box
        print('input bounding box in yx: {}'.format(pix_box))
        meta = attr.update_attribute4subset(meta, pix_box)
    else:
        # translate box of None to tuple of 4 int
        length, width = int(meta['LENGTH']), int(meta['WIDTH'])
        pix_box = (0, 0, width, length)

    # ensure all index are in int16
    pix_box = tuple([int(i) for i in pix_box])

    return pix_box, meta
コード例 #4
0
def subset_file(fname, subset_dict_input, out_file=None):
    """Subset file with
    Inputs:
        fname        : str, path/name of file
        out_file     : str, path/name of output file
        subset_dict : dict, subsut parameter, including the following items:
                      subset_x   : list of 2 int,   subset in x direction,   default=None
                      subset_y   : list of 2 int,   subset in y direction,   default=None
                      subset_lat : list of 2 float, subset in lat direction, default=None
                      subset_lon : list of 2 float, subset in lon direction, default=None
                      fill_value : float, optional. filled value for area outside of data coverage. default=None
                                   None/not-existed to subset within data coverage only.
                      tight  : bool, tight subset or not, for lookup table file, i.e. geomap*.trans
    Outputs:
        out_file :  str, path/name of output file; 
                   out_file = 'subset_'+fname, if fname is in current directory;
                   out_file = fname, if fname is not in the current directory.
    """

    # Input File Info
    atr = readfile.read_attribute(fname)
    width = int(atr['WIDTH'])
    length = int(atr['LENGTH'])
    k = atr['FILE_TYPE']
    print('subset ' + k + ' file: ' + fname + ' ...')

    subset_dict = subset_dict_input.copy()
    # Read Subset Inputs into 4-tuple box in pixel and geo coord
    pix_box, geo_box = subset_input_dict2box(subset_dict, atr)

    coord = ut.coordinate(atr)
    # if fill_value exists and not None, subset data and fill assigned value for area out of its coverage.
    # otherwise, re-check subset to make sure it's within data coverage and initialize the matrix with np.nan
    outfill = False
    if 'fill_value' in subset_dict.keys() and subset_dict['fill_value']:
        outfill = True
    else:
        outfill = False
    if not outfill:
        pix_box = coord.check_box_within_data_coverage(pix_box)
        subset_dict['fill_value'] = np.nan

    geo_box = coord.box_pixel2geo(pix_box)
    data_box = (0, 0, width, length)
    print('data   range in (x0,y0,x1,y1): {}'.format(data_box))
    print('subset range in (x0,y0,x1,y1): {}'.format(pix_box))
    print('data   range in (W, N, E, S): {}'.format(
        coord.box_pixel2geo(data_box)))
    print('subset range in (W, N, E, S): {}'.format(geo_box))

    if pix_box == data_box:
        print('Subset range == data coverage, no need to subset. Skip.')
        return fname

    # Calculate Subset/Overlap Index
    pix_box4data, pix_box4subset = get_box_overlap_index(data_box, pix_box)

    ###########################  Data Read and Write  ######################
    # Output File Name
    if not out_file:
        if os.getcwd() == os.path.dirname(os.path.abspath(fname)):
            if 'tight' in subset_dict.keys() and subset_dict['tight']:
                out_file = '{}_tight{}'.format(
                    os.path.splitext(fname)[0],
                    os.path.splitext(fname)[1])
            else:
                out_file = 'sub_' + os.path.basename(fname)
        else:
            out_file = os.path.basename(fname)
    print('writing >>> ' + out_file)

    # update metadata
    atr = attr.update_attribute4subset(atr, pix_box)

    # subset datasets one by one
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])

    ext = os.path.splitext(out_file)[1]
    if ext in ['.h5', '.he5']:
        # initiate the output file
        writefile.layout_hdf5(out_file, metadata=atr, ref_file=fname)

        # subset dataset one-by-one
        for dsName in dsNames:
            with h5py.File(fname, 'r') as fi:
                ds = fi[dsName]
                ds_shape = ds.shape
                ds_ndim = ds.ndim
                print('cropping {d} in {b} from {f} ...'.format(
                    d=dsName, b=pix_box4data, f=os.path.basename(fname)))

                if ds_ndim == 2:
                    # read
                    data = ds[pix_box4data[1]:pix_box4data[3],
                              pix_box4data[0]:pix_box4data[2]]

                    # crop
                    data_out = np.ones(
                        (pix_box[3] - pix_box[1], pix_box[2] - pix_box[0]),
                        data.dtype) * subset_dict['fill_value']
                    data_out[pix_box4subset[1]:pix_box4subset[3],
                             pix_box4subset[0]:pix_box4subset[2]] = data
                    data_out = np.array(data_out, dtype=data.dtype)

                    # write
                    block = [0, int(atr['LENGTH']), 0, int(atr['WIDTH'])]
                    writefile.write_hdf5_block(out_file,
                                               data=data_out,
                                               datasetName=dsName,
                                               block=block,
                                               print_msg=True)

                if ds_ndim == 3:
                    prog_bar = ptime.progressBar(maxValue=ds_shape[0])
                    for i in range(ds_shape[0]):
                        # read
                        data = ds[i, pix_box4data[1]:pix_box4data[3],
                                  pix_box4data[0]:pix_box4data[2]]

                        # crop
                        data_out = np.ones(
                            (1, pix_box[3] - pix_box[1],
                             pix_box[2] - pix_box[0]),
                            data.dtype) * subset_dict['fill_value']
                        data_out[:, pix_box4subset[1]:pix_box4subset[3],
                                 pix_box4subset[0]:pix_box4subset[2]] = data

                        # write
                        block = [
                            i, i + 1, 0,
                            int(atr['LENGTH']), 0,
                            int(atr['WIDTH'])
                        ]
                        writefile.write_hdf5_block(out_file,
                                                   data=data_out,
                                                   datasetName=dsName,
                                                   block=block,
                                                   print_msg=False)

                        prog_bar.update(i + 1,
                                        suffix='{}/{}'.format(
                                            i + 1, ds_shape[0]))
                    prog_bar.close()
                    print('finished writing to file: {}'.format(out_file))

    else:
        # IO for binary files
        dsDict = dict()
        for dsName in dsNames:
            dsDict[dsName] = subset_dataset(
                fname,
                dsName,
                pix_box,
                pix_box4data,
                pix_box4subset,
                fill_value=subset_dict['fill_value'])
        writefile.write(dsDict,
                        out_file=out_file,
                        metadata=atr,
                        ref_file=fname)

        # write extra metadata files for ISCE data files
        if os.path.isfile(fname + '.xml') or os.path.isfile(fname +
                                                            '.aux.xml'):
            # write ISCE XML file
            dtype_gdal = readfile.NUMPY2GDAL_DATATYPE[atr['DATA_TYPE']]
            dtype_isce = readfile.GDAL2ISCE_DATATYPE[dtype_gdal]
            writefile.write_isce_xml(out_file,
                                     width=int(atr['WIDTH']),
                                     length=int(atr['LENGTH']),
                                     bands=len(dsDict.keys()),
                                     data_type=dtype_isce,
                                     scheme=atr['scheme'],
                                     image_type=atr['FILE_TYPE'])
            print(f'write file: {out_file}.xml')

            # write GDAL VRT file
            if os.path.isfile(fname + '.vrt'):
                from isceobj.Util.ImageUtil import ImageLib as IML
                img = IML.loadImage(out_file)[0]
                img.renderVRT()
                print(f'write file: {out_file}.vrt')

    return out_file
コード例 #5
0
    def write2hdf5(self,
                   outputFile='ifgramStack.h5',
                   access_mode='w',
                   box=None,
                   xstep=1,
                   ystep=1,
                   compression=None,
                   extra_metadata=None):
        """Save/write an ifgramStackDict object into an HDF5 file with the structure defined in:

        https://mintpy.readthedocs.io/en/latest/api/data_structure/#ifgramstack

        Parameters: outputFile : str, Name of the HDF5 file for the InSAR stack
                    access_mode : str, access mode of output File, e.g. w, r+
                    box : tuple, subset range in (x0, y0, x1, y1)
                    extra_metadata : dict, extra metadata to be added into output file
        Returns:    outputFile
        """

        self.pairs = sorted([pair for pair in self.pairsDict.keys()])
        self.dsNames = list(self.pairsDict[self.pairs[0]].datasetDict.keys())
        self.dsNames = [i for i in ifgramDatasetNames if i in self.dsNames]
        maxDigit = max([len(i) for i in self.dsNames])
        self.get_size(box=box, xstep=xstep, ystep=ystep)

        self.outputFile = outputFile
        with h5py.File(self.outputFile, access_mode) as f:
            print('create HDF5 file {} with {} mode'.format(
                self.outputFile, access_mode))

            ###############################
            # 3D datasets containing unwrapPhase, magnitude, coherence, connectComponent, wrapPhase, etc.
            for dsName in self.dsNames:
                dsShape = (self.numIfgram, self.length, self.width)
                dsDataType = np.float32
                dsCompression = compression
                if dsName in ['connectComponent']:
                    dsDataType = np.int16
                    dsCompression = 'lzf'

                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=dsCompression))
                ds = f.create_dataset(dsName,
                                      shape=dsShape,
                                      maxshape=(None, dsShape[1], dsShape[2]),
                                      dtype=dsDataType,
                                      chunks=True,
                                      compression=dsCompression)

                prog_bar = ptime.progressBar(maxValue=self.numIfgram)
                for i in range(self.numIfgram):
                    # read
                    ifgramObj = self.pairsDict[self.pairs[i]]
                    data = ifgramObj.read(dsName,
                                          box=box,
                                          xstep=xstep,
                                          ystep=ystep)[0]
                    # write
                    ds[i, :, :] = data
                    prog_bar.update(i + 1,
                                    suffix='{}_{}'.format(
                                        self.pairs[i][0], self.pairs[i][1]))
                prog_bar.close()
                ds.attrs['MODIFICATION_TIME'] = str(time.time())

            ###############################
            # 2D dataset containing reference and secondary dates of all pairs
            dsName = 'date'
            dsDataType = np.string_
            dsShape = (self.numIfgram, 2)
            print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(
                d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape))
            data = np.array(self.pairs, dtype=dsDataType)
            f.create_dataset(dsName, data=data)

            ###############################
            # 1D dataset containing perpendicular baseline of all pairs
            dsName = 'bperp'
            dsDataType = np.float32
            dsShape = (self.numIfgram, )
            print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(
                d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape))
            # get bperp
            data = np.zeros(self.numIfgram, dtype=dsDataType)
            for i in range(self.numIfgram):
                ifgramObj = self.pairsDict[self.pairs[i]]
                data[i] = ifgramObj.get_perp_baseline(family=self.dsName0)
            # write
            f.create_dataset(dsName, data=data)

            ###############################
            # 1D dataset containing bool value of dropping the interferograms or not
            dsName = 'dropIfgram'
            dsDataType = np.bool_
            dsShape = (self.numIfgram, )
            print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(
                d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape))
            data = np.ones(dsShape, dtype=dsDataType)
            f.create_dataset(dsName, data=data)

            ###############################
            # Attributes
            self.get_metadata()
            if extra_metadata:
                self.metadata.update(extra_metadata)
                print('add extra metadata: {}'.format(extra_metadata))

            # update metadata due to subset
            self.metadata = attr.update_attribute4subset(self.metadata, box)
            # update metadata due to multilook
            if xstep * ystep > 1:
                self.metadata = attr.update_attribute4multilook(
                    self.metadata, ystep, xstep)

            self.metadata['FILE_TYPE'] = self.name
            for key, value in self.metadata.items():
                f.attrs[key] = value

        print('Finished writing to {}'.format(self.outputFile))
        return self.outputFile
コード例 #6
0
    def write2hdf5(self,
                   outputFile='geometryRadar.h5',
                   access_mode='w',
                   box=None,
                   xstep=1,
                   ystep=1,
                   compression='lzf',
                   extra_metadata=None):
        """Save/write to HDF5 file with structure defined in:
            https://mintpy.readthedocs.io/en/latest/api/data_structure/#geometry
        """
        if len(self.datasetDict) == 0:
            print(
                'No dataset file path in the object, skip HDF5 file writing.')
            return None

        maxDigit = max([len(i) for i in geometryDatasetNames])
        length, width = self.get_size(box=box, xstep=xstep, ystep=ystep)

        self.outputFile = outputFile
        with h5py.File(self.outputFile, access_mode) as f:
            print('create HDF5 file {} with {} mode'.format(
                self.outputFile, access_mode))

            ###############################
            for dsName in self.dsNames:
                # 3D datasets containing bperp
                if dsName == 'bperp':
                    self.dateList = list(self.datasetDict[dsName].keys())
                    dsDataType = np.float32
                    self.numDate = len(self.dateList)
                    dsShape = (self.numDate, length, width)
                    ds = f.create_dataset(dsName,
                                          shape=dsShape,
                                          maxshape=(None, dsShape[1],
                                                    dsShape[2]),
                                          dtype=dsDataType,
                                          chunks=True,
                                          compression=compression)
                    print(
                        ('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                         ' with compression = {c}').format(d=dsName,
                                                           w=maxDigit,
                                                           t=str(dsDataType),
                                                           s=dsShape,
                                                           c=str(compression)))

                    print(
                        'read coarse grid baseline files and linear interpolate into full resolution ...'
                    )
                    prog_bar = ptime.progressBar(maxValue=self.numDate)
                    for i in range(self.numDate):
                        fname = self.datasetDict[dsName][self.dateList[i]]
                        data = read_isce_bperp_file(fname=fname,
                                                    full_shape=self.get_size(),
                                                    box=box,
                                                    xstep=xstep,
                                                    ystep=ystep)
                        ds[i, :, :] = data
                        prog_bar.update(i + 1, suffix=self.dateList[i])
                    prog_bar.close()

                    # Write 1D dataset date accompnay the 3D bperp
                    dsName = 'date'
                    dsShape = (self.numDate, )
                    dsDataType = np.string_
                    print(('create dataset /{d:<{w}} of {t:<25}'
                           ' in size of {s}').format(d=dsName,
                                                     w=maxDigit,
                                                     t=str(dsDataType),
                                                     s=dsShape))
                    data = np.array(self.dateList, dtype=dsDataType)
                    ds = f.create_dataset(dsName, data=data)

                # 2D datasets containing height, latitude/longitude, range/azimuthCoord, incidenceAngle, shadowMask, etc.
                else:
                    dsDataType = np.float32
                    if dsName.lower().endswith('mask'):
                        dsDataType = np.bool_
                    dsShape = (length, width)
                    print(
                        ('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                         ' with compression = {c}').format(d=dsName,
                                                           w=maxDigit,
                                                           t=str(dsDataType),
                                                           s=dsShape,
                                                           c=str(compression)))

                    # read
                    data = np.array(self.read(family=dsName,
                                              box=box,
                                              xstep=xstep,
                                              ystep=ystep)[0],
                                    dtype=dsDataType)

                    # water body: -1 for water and 0 for land
                    # water mask:  0 for water and 1 for land
                    fname = os.path.basename(self.datasetDict[dsName])
                    if fname.startswith('waterBody') or fname.endswith('.wbd'):
                        data = data > -0.5
                        print((
                            '    input file "{}" is water body (-1/0 for water/land), '
                            'convert to water mask (0/1 for water/land).'.
                            format(fname)))

                    elif dsName == 'height':
                        noDataValueDEM = -32768
                        if np.any(data == noDataValueDEM):
                            data[data == noDataValueDEM] = np.nan
                            print(
                                '    convert no-data value for DEM {} to NaN.'.
                                format(noDataValueDEM))

                    elif dsName == 'rangeCoord' and xstep != 1:
                        print(
                            '    scale value of {:<15} by 1/{} due to multilooking'
                            .format(dsName, xstep))
                        data /= xstep

                    elif dsName == 'azimuthCoord' and ystep != 1:
                        print(
                            '    scale value of {:<15} by 1/{} due to multilooking'
                            .format(dsName, ystep))
                        data /= ystep

                    # write
                    ds = f.create_dataset(dsName,
                                          data=data,
                                          chunks=True,
                                          compression=compression)

            ###############################
            # Generate Dataset if not existed in binary file: incidenceAngle, slantRangeDistance
            for dsName in [
                    i for i in ['incidenceAngle', 'slantRangeDistance']
                    if i not in self.dsNames
            ]:
                # Calculate data
                data = None
                if dsName == 'incidenceAngle':
                    data = self.get_incidence_angle(box=box,
                                                    xstep=xstep,
                                                    ystep=ystep)
                elif dsName == 'slantRangeDistance':
                    data = self.get_slant_range_distance(box=box,
                                                         xstep=xstep,
                                                         ystep=ystep)

                # Write dataset
                if data is not None:
                    dsShape = data.shape
                    dsDataType = np.float32
                    print(
                        ('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                         ' with compression = {c}').format(d=dsName,
                                                           w=maxDigit,
                                                           t=str(dsDataType),
                                                           s=dsShape,
                                                           c=str(compression)))
                    ds = f.create_dataset(dsName,
                                          data=data,
                                          dtype=dsDataType,
                                          chunks=True,
                                          compression=compression)

            ###############################
            # Attributes
            self.get_metadata()
            if extra_metadata:
                self.metadata.update(extra_metadata)
                print('add extra metadata: {}'.format(extra_metadata))

            # update due to subset
            self.metadata = attr.update_attribute4subset(self.metadata, box)
            # update due to multilook
            if xstep * ystep > 1:
                self.metadata = attr.update_attribute4multilook(
                    self.metadata, ystep, xstep)

            self.metadata['FILE_TYPE'] = self.name
            for key, value in self.metadata.items():
                f.attrs[key] = value

        print('Finished writing to {}'.format(self.outputFile))
        return self.outputFile
コード例 #7
0
ファイル: prep_fringe.py プロジェクト: scottstanie/MintPy
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # translate input options
    processor = isce_utils.get_processor(inps.metaFile)
    src_box, geom_src_dir = read_vrt_info(os.path.join(inps.geomDir,
                                                       'lat.vrt'))

    # metadata
    meta = prepare_metadata(inps.metaFile, geom_src_dir, box=src_box)

    # subset - read pix_box for fringe file
    pix_box = subset.subset_input_dict2box(vars(inps), meta)[0]
    pix_box = ut.coordinate(meta).check_box_within_data_coverage(pix_box)
    print('input subset in y/x: {}'.format(pix_box))

    # subset - update src_box for isce file and meta
    src_box = (pix_box[0] + src_box[0], pix_box[1] + src_box[1],
               pix_box[2] + src_box[0], pix_box[3] + src_box[1])
    meta = attr.update_attribute4subset(meta, pix_box)
    print(
        'input subset in y/x with respect to the VRT file: {}'.format(src_box))

    ## output directory
    for dname in [inps.outDir, os.path.join(inps.outDir, 'inputs')]:
        os.makedirs(dname, exist_ok=True)

    ## output filename
    ts_file = os.path.join(inps.outDir, 'timeseries.h5')
    tcoh_file = os.path.join(inps.outDir, 'temporalCoherence.h5')
    ps_mask_file = os.path.join(inps.outDir, 'maskPS.h5')
    if 'Y_FIRST' in meta.keys():
        geom_file = os.path.join(inps.outDir, 'inputs/geometryGeo.h5')
    else:
        geom_file = os.path.join(inps.outDir, 'inputs/geometryRadar.h5')

    ## 1 - time-series (from fringe)
    prepare_timeseries(outfile=ts_file,
                       unw_file=inps.unwFile,
                       metadata=meta,
                       processor=processor,
                       baseline_dir=inps.baselineDir,
                       box=pix_box)

    ## 2 - temporal coherence and mask for PS (from fringe)
    prepare_temporal_coherence(outfile=tcoh_file,
                               infile=inps.cohFile,
                               metadata=meta,
                               box=pix_box)

    prepare_ps_mask(outfile=ps_mask_file,
                    infile=inps.psMaskFile,
                    metadata=meta,
                    box=pix_box)

    ## 3 - geometry (from SLC stacks before fringe, e.g. ISCE2)
    prepare_geometry(outfile=geom_file,
                     geom_dir=geom_src_dir,
                     box=src_box,
                     metadata=meta)

    return ts_file, tcoh_file, ps_mask_file, geom_file
コード例 #8
0
ファイル: save_kite.py プロジェクト: scottstanie/MintPy
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    print('\n-------------------READ INPUTS -------------------')
    print('Read metadata from file: {}'.format(inps.file))
    attr = readfile.read_attribute(inps.file)

    #Extract subset if defined
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), attr)

    # output filename
    if not inps.outfile:
        inps.outfile = attr['PROJECT_NAME']

    # date1/2
    if attr['FILE_TYPE'] in ['timeseries', 'HDFEOS']:
        date1, date2 = inps.dset.split('_')
        inps.dset = date2

    elif attr['FILE_TYPE'] == 'ifgramStack':
        date1, date2 = inps.dset.split('-')[1].split('_')

    else:
        # velocity and *.unw files
        date1, date2 = ptime.yyyymmdd(attr['DATE12'].replace('_',
                                                             '-').split('-'))
        if inps.dset.startswith('step'):
            date1 = inps.dset.split('step')[-1]
            date2 = date1
    print('First  InSAR date: {}'.format(date1))
    print('Second InSAR date: {}'.format(date2))

    # read data
    print('Read {} from file: {}'.format(inps.dset, inps.file))
    dis, attr = readfile.read(inps.file,
                              datasetName=inps.dset,
                              box=inps.pix_box)

    if attr['FILE_TYPE'] == 'timeseries':
        print('Read {} from file: {}'.format(date1, inps.file))
        dis -= readfile.read(inps.file, datasetName=date1, box=inps.pix_box)[0]

    # mask data
    if inps.mask_file is not None:
        mask = readfile.read(inps.mask_file, box=inps.pix_box)[0]
        print('Set data to NaN for pixels with zero value in file: {}'.format(
            inps.mask_file))
        dis[mask == 0] = np.nan

    # read geometry incidence / azimuth angle
    print('\nread incidence / azimuth angle from file: {}'.format(
        inps.geom_file))
    inc_angle = readfile.read(inps.geom_file,
                              datasetName='incidenceAngle',
                              box=inps.pix_box)[0]
    az_angle = readfile.read(inps.geom_file,
                             datasetName='azimuthAngle',
                             box=inps.pix_box)[0]
    print('Mean satellite incidence angle: {0:.2f}°'.format(
        np.nanmean(inc_angle)))
    print('Mean satellite heading   angle: {0:.2f}°\n'.format(
        90 - np.nanmean(az_angle)))

    # Update attributes
    if inps.subset_lat != None or inps.subset_x != None:
        attr = attribute.update_attribute4subset(attr, inps.pix_box)

    # create kite container
    scene = mintpy2kite(dis,
                        attr,
                        date1,
                        date2,
                        inc_angle,
                        az_angle,
                        out_file=inps.outfile)

    return scene
コード例 #9
0
ファイル: geometryStack.py プロジェクト: geodesymiami/MiNoPy
    def write2hdf5(self,
                   outputFile='geometryRadar.h5',
                   access_mode='w',
                   box=None,
                   xstep=1,
                   ystep=1,
                   compression='lzf',
                   extra_metadata=None):
        '''
        /                        Root level
        Attributes               Dictionary for metadata. 'X/Y_FIRST/STEP' attribute for geocoded.
        /height                  2D array of float32 in size of (l, w   ) in meter.
        /latitude (azimuthCoord) 2D array of float32 in size of (l, w   ) in degree.
        /longitude (rangeCoord)  2D array of float32 in size of (l, w   ) in degree.
        /incidenceAngle          2D array of float32 in size of (l, w   ) in degree.
        /slantRangeDistance      2D array of float32 in size of (l, w   ) in meter.
        /azimuthAngle            2D array of float32 in size of (l, w   ) in degree. (optional)
        /shadowMask              2D array of bool    in size of (l, w   ).           (optional)
        /waterMask               2D array of bool    in size of (l, w   ).           (optional)
        /bperp                   3D array of float32 in size of (n, l, w) in meter   (optional)
        /date                    1D array of string  in size of (n,     ) in YYYYMMDD(optional)
        ...
        '''
        if len(self.datasetDict) == 0:
            print(
                'No dataset file path in the object, skip HDF5 file writing.')
            return None

        self.outputFile = outputFile
        f = h5py.File(self.outputFile, access_mode)
        print('create HDF5 file {} with {} mode'.format(
            self.outputFile, access_mode))

        #groupName = self.name
        #group = f.create_group(groupName)
        #print('create group   /{}'.format(groupName))

        maxDigit = max([len(i) for i in geometryDatasetNames])
        length, width = self.get_size(box=box, xstep=xstep, ystep=ystep)
        #self.length, self.width = self.get_size()

        ###############################
        for dsName in self.dsNames:
            # 3D datasets containing bperp
            if dsName == 'bperp':
                self.dateList = list(self.datasetDict[dsName].keys())
                dsDataType = dataType
                self.numDate = len(self.dateList)
                dsShape = (self.numDate, length, width)
                ds = f.create_dataset(dsName,
                                      shape=dsShape,
                                      maxshape=(None, dsShape[1], dsShape[2]),
                                      dtype=dsDataType,
                                      chunks=True,
                                      compression=compression)
                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=str(compression)))

                print(
                    'read coarse grid baseline files and linear interpolate into full resolution ...'
                )
                prog_bar = ptime.progressBar(maxValue=self.numDate)
                for i in range(self.numDate):
                    fname = self.datasetDict[dsName][self.dateList[i]]
                    data = read_isce_bperp_file(fname=fname,
                                                full_shape=self.get_size(),
                                                box=box,
                                                xstep=xstep,
                                                ystep=ystep)
                    ds[i, :, :] = data
                    prog_bar.update(i + 1, suffix=self.dateList[i])
                prog_bar.close()

                # Write 1D dataset date
                dsName = 'date'
                dsShape = (self.numDate, )
                dsDataType = np.string_
                print(('create dataset /{d:<{w}} of {t:<25}'
                       ' in size of {s}').format(d=dsName,
                                                 w=maxDigit,
                                                 t=str(dsDataType),
                                                 s=dsShape))
                data = np.array(self.dateList, dtype=dsDataType)
                if not dsName in f.keys():
                    ds = f.create_dataset(dsName, data=data)

            # 2D datasets containing height, latitude, incidenceAngle, shadowMask, etc.
            else:
                dsDataType = dataType
                if dsName.lower().endswith('mask'):
                    dsDataType = np.bool_
                dsShape = (length, width)
                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=str(compression)))

                data = np.array(self.read(family=dsName, box=box)[0],
                                dtype=dsDataType)
                if not dsName in f.keys():
                    ds = f.create_dataset(dsName,
                                          data=data,
                                          chunks=True,
                                          compression=compression)

        ###############################
        # Generate Dataset if not existed in binary file: incidenceAngle, slantRangeDistance
        for dsName in [
                i for i in ['incidenceAngle', 'slantRangeDistance']
                if i not in self.dsNames
        ]:
            # Calculate data
            data = None
            if dsName == 'incidenceAngle':
                data = self.get_incidence_angle(box=box,
                                                xstep=xstep,
                                                ystep=ystep)
            elif dsName == 'slantRangeDistance':
                data = self.get_slant_range_distance(box=box,
                                                     xstep=xstep,
                                                     ystep=ystep)

            # Write dataset
            if data is not None:
                dsShape = data.shape
                dsDataType = dataType
                print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                       ' with compression = {c}').format(d=dsName,
                                                         w=maxDigit,
                                                         t=str(dsDataType),
                                                         s=dsShape,
                                                         c=str(compression)))
                if not dsName in f.keys():
                    ds = f.create_dataset(dsName,
                                          data=data,
                                          dtype=dataType,
                                          chunks=True,
                                          compression=compression)

        ###############################
        # Attributes
        self.get_metadata()
        if extra_metadata:
            self.metadata.update(extra_metadata)
            #print('add extra metadata: {}'.format(extra_metadata))
        self.metadata = attr.update_attribute4subset(self.metadata, box)
        self.metadata['FILE_TYPE'] = self.name
        for key, value in self.metadata.items():
            f.attrs[key] = value

        f.close()
        print('Finished writing to {}'.format(self.outputFile))
        return self.outputFile
コード例 #10
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # 1. Read metadata and data
    k = readfile.read_attribute(inps.file)['FILE_TYPE']
    if k == 'timeseries' and inps.dset and '_' in inps.dset:
        inps.ref_date, inps.dset = inps.dset.split('_')
    else:
        inps.ref_date = None
    atr = readfile.read_attribute(inps.file, datasetName=inps.dset)

    # pix_box
    inps.pix_box = subset.subset_input_dict2box(vars(inps), atr)[0]
    inps.pix_box = ut.coordinate(atr).check_box_within_data_coverage(
        inps.pix_box)
    data_box = (0, 0, int(atr['WIDTH']), int(atr['LENGTH']))
    print('data   coverage in y/x: {}'.format(data_box))
    print('subset coverage in y/x: {}'.format(inps.pix_box))
    atr = attr.update_attribute4subset(atr, inps.pix_box)

    # read data
    data = readfile.read(inps.file, datasetName=inps.dset, box=inps.pix_box)[0]
    if k == 'timeseries' and inps.ref_date:
        data -= readfile.read(inps.file,
                              datasetName=inps.ref_date,
                              box=inps.pix_box)[0]

    # mask
    mask = pp.read_mask(inps.file,
                        mask_file=inps.mask_file,
                        datasetName=inps.dset,
                        box=inps.pix_box)[0]
    if mask is not None:
        print('masking out pixels with zero value in file: {}'.format(
            inps.mask_file))
        data[mask == 0] = np.nan
    if inps.zero_mask:
        print('masking out pixels with zero value')
        data[data == 0] = np.nan
    del mask

    # Data Operation - Display Unit & Rewrapping
    (data, inps.disp_unit, inps.disp_scale,
     inps.wrap) = pp.scale_data4disp_unit_and_rewrap(
         data,
         metadata=atr,
         disp_unit=inps.disp_unit,
         wrap=inps.wrap,
         wrap_range=inps.wrap_range)
    if inps.wrap:
        inps.vlim = inps.wrap_range

    # 2. Generate Google Earth KMZ
    # 2.1 Common settings
    # disp min/max and colormap
    cmap_lut = 256
    if not inps.vlim:
        cmap_lut, inps.vlim = pp.auto_adjust_colormap_lut_and_disp_limit(data)
    inps.colormap = pp.auto_colormap_name(atr, inps.colormap)
    inps.colormap = pp.ColormapExt(inps.colormap, cmap_lut).colormap
    inps.norm = colors.Normalize(vmin=inps.vlim[0], vmax=inps.vlim[1])

    # Output filename
    inps.fig_title = pp.auto_figure_title(inps.file,
                                          datasetNames=inps.dset,
                                          inps_dict=vars(inps))
    if not inps.outfile:
        inps.outfile = '{}.kmz'.format(inps.fig_title)
    inps.outfile = os.path.abspath(inps.outfile)

    # 2.2 Write KMZ file
    if 'Y_FIRST' in atr.keys():
        # create ground overlay KML for file in geo-coord
        write_kmz_overlay(
            data,
            meta=atr,
            out_file=inps.outfile,
            inps=inps,
        )

    else:
        # create placemark KML for file in radar-coord
        write_kmz_placemark(
            data,
            meta=atr,
            out_file=inps.outfile,
            geom_file=inps.geom_file,
            inps=inps,
        )

    return inps.outfile
コード例 #11
0
    def write2hdf5(self,
                   outputFile='slcStack.h5',
                   access_mode='a',
                   box=None,
                   xstep=1,
                   ystep=1,
                   compression=None,
                   extra_metadata=None):
        '''Save/write an slcStackDict object into an HDF5 file with the structure below:

        /                  Root level
        Attributes         Dictionary for metadata
        /date              2D array of string  in size of (m, 2   ) in YYYYMMDD format for reference and secondary date
        /bperp             1D array of float32 in size of (m,     ) in meter.

        Parameters: outputFile : str, Name of the HDF5 file for the SLC stack
                    access_mode : str, access mode of output File, e.g. w, r+
                    box : tuple, subset range in (x0, y0, x1, y1)
                    extra_metadata : dict, extra metadata to be added into output file
        Returns:    outputFile
        '''
        self.outputFile = outputFile
        f = h5py.File(self.outputFile, access_mode)
        print('create HDF5 file {} with {} mode'.format(
            self.outputFile, access_mode))

        # self.pairs = sorted([pair for pair in self.pairsDict.keys()])
        # self.dsNames = list(self.pairsDict[self.pairs[0]].datasetDict.keys())
        self.dates = sorted([date for date in self.pairsDict.keys()])
        self.dsNames = list(self.pairsDict[self.dates[0]].datasetDict.keys())
        self.dsNames = [i for i in slcDatasetNames if i in self.dsNames]
        maxDigit = max([len(i) for i in self.dsNames])
        self.get_size(box=box, xstep=xstep, ystep=ystep)

        self.bperp = np.zeros(self.numSlc)

        ###############################

        # 3D datasets containing slc.
        for dsName in self.dsNames:
            dsShape = (self.numSlc, self.length, self.width)
            dsDataType = dataType
            dsCompression = compression
            if dsName in ['connectComponent']:
                dsDataType = np.int16
                dsCompression = 'lzf'

            print(('create dataset /{d:<{w}} of {t:<25} in size of {s}'
                   ' with compression = {c}').format(d=dsName,
                                                     w=maxDigit,
                                                     t=str(dsDataType),
                                                     s=dsShape,
                                                     c=dsCompression))

            if dsName in f.keys():
                ds = f[dsName]
            else:
                ds = f.create_dataset(dsName,
                                      shape=dsShape,
                                      maxshape=(None, dsShape[1], dsShape[2]),
                                      dtype=dsDataType,
                                      chunks=True,
                                      compression=dsCompression)

                prog_bar = ptime.progressBar(maxValue=self.numSlc)

                for i in range(self.numSlc):
                    slcObj = self.pairsDict[self.dates[i]]
                    # fname, metadata = slcObj.read(dsName, box=box)
                    fname, metadata = slcObj.read(dsName)

                    if not box:
                        box = (0, 0, self.width, self.length)
                    dsSlc = gdal.Open(fname + '.vrt', gdal.GA_ReadOnly)
                    ds[i, :, :] = dsSlc.GetRasterBand(1).ReadAsArray(
                        int(box[0]), int(box[1]), self.width, self.length)

                    self.bperp[i] = slcObj.get_perp_baseline()
                    prog_bar.update(i + 1,
                                    suffix='{}'.format(self.dates[i][0]))

                prog_bar.close()
            ds.attrs['MODIFICATION_TIME'] = str(time.time())

        ###############################
        # 1D dataset containing dates of all images
        dsName = 'date'
        dsDataType = np.string_
        dsShape = (self.numSlc, 1)
        print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(
            d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape))
        data = np.array(self.dates, dtype=dsDataType)
        if not dsName in f.keys():
            f.create_dataset(dsName, data=data)

        ###############################
        # 1D dataset containing perpendicular baseline of all pairs
        dsName = 'bperp'
        dsDataType = np.float32
        dsShape = (self.numSlc, )
        print('create dataset /{d:<{w}} of {t:<25} in size of {s}'.format(
            d=dsName, w=maxDigit, t=str(dsDataType), s=dsShape))
        data = np.array(self.bperp, dtype=dsDataType)
        if not dsName in f.keys():
            f.create_dataset(dsName, data=data)

        ###############################
        # Attributes
        self.get_metadata()
        if extra_metadata:
            self.metadata.update(extra_metadata)
            #print('add extra metadata: {}'.format(extra_metadata))
        self.metadata = attr.update_attribute4subset(self.metadata, box)

        # update metadata due to multilook
        if xstep * ystep > 1:
            self.metadata = attr.update_attribute4multilook(
                self.metadata, ystep, xstep)

        self.metadata['FILE_TYPE'] = 'timeseries'  #'slc'
        for key, value in self.metadata.items():
            f.attrs[key] = value

        f.close()
        print('Finished writing to {}'.format(self.outputFile))
        return self.outputFile