Beispiel #1
0
    def get_los_geometry(self, insar_obj, print_msg=False):
        lat, lon = self.get_stat_lat_lon(print_msg=print_msg)

        # get LOS geometry
        if isinstance(insar_obj, str):
            # geometry file
            atr = readfile.read_attribute(insar_obj)
            coord = ut.coordinate(atr, lookup_file=insar_obj)
            y, x = coord.geo2radar(lat, lon, print_msg=print_msg)[0:2]
            box = (x, y, x + 1, y + 1)
            inc_angle = readfile.read(insar_obj,
                                      datasetName='incidenceAngle',
                                      box=box,
                                      print_msg=print_msg)[0][0, 0]
            az_angle = readfile.read(insar_obj,
                                     datasetName='azimuthAngle',
                                     box=box,
                                     print_msg=print_msg)[0][0, 0]
            head_angle = ut.azimuth2heading_angle(az_angle)
        elif isinstance(insar_obj, dict):
            # use mean inc/head_angle from metadata
            inc_angle = ut.incidence_angle(insar_obj,
                                           dimension=0,
                                           print_msg=print_msg)
            head_angle = float(insar_obj['HEADING'])
            # for old reading of los.rdr band2 data into headingAngle directly
            if (head_angle + 180.) > 45.:
                head_angle = ut.azimuth2heading_angle(head_angle)
        else:
            raise ValueError(
                'input insar_obj is neight str nor dict: {}'.format(insar_obj))
        return inc_angle, head_angle
Beispiel #2
0
def read_timeseries_yx(y, x, ts_file, lookup_file=None, ref_y=None, ref_x=None):
    """ Read time-series of one pixel with input y/x
    Parameters: y/x         : int, row/column number of interest
                ts_file     : string, filename of time-series HDF5 file
                lookup_file : string, filename of lookup table file
                ref_y/x     : int, row/column number of reference pixel
    Returns:    dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0)
                dis   : 1D np.array of float in meter
    """
    # read date
    obj = timeseries(ts_file)
    obj.open(print_msg=False)
    dates = ptime.date_list2vector(obj.dateList)[0]
    dates = np.array(dates)

    # read displacement
    box = (x, y, x+1, y+1)
    dis = readfile.read(ts_file, box=box)[0]
    # reference pixel
    if ref_y is not None:
        ref_box = (ref_x, ref_y, ref_x+1, ref_y+1)
        dis -= readfile.read(ts_file, box=ref_box)[0]
    #start at zero
    dis -= dis[0]
    return dates, dis
Beispiel #3
0
def mask_file(fname, mask_file, out_file=None, inps=None):
    """ Mask input fname with mask_file
    Inputs:
        fname/mask_file - string, 
        inps_dict - dictionary including the following options:
                    subset_x/y - list of 2 ints, subset in x/y direction
                    threshold - float, threshold/minValue to generate mask
    Output:
        out_file - string
    """
    if not inps:
        inps = cmd_line_parse()

    if not out_file:
        out_file = '{}_masked{}'.format(
            os.path.splitext(fname)[0],
            os.path.splitext(fname)[1])

    # read mask_file
    mask = readfile.read(mask_file)[0]
    mask = update_mask_with_inps(mask, inps)

    # masking input file
    dsNames = readfile.get_dataset_list(fname)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = {}
    for dsName in dsNames:
        if dsName not in ['coherence']:
            print('masking {d:<{w}} from {f} ...'.format(d=dsName,
                                                         w=maxDigit,
                                                         f=fname))
            data = readfile.read(fname, datasetName=dsName, print_msg=False)[0]
            data = mask_matrix(data, mask, fill_value=inps.fill_value)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=out_file, ref_file=fname)
Beispiel #4
0
def read_timeseries_lalo(lat, lon, ts_file, lookup_file=None, ref_lat=None, ref_lon=None):
    """ Read time-series of one pixel with input lat/lon
    Parameters: lat/lon     : float, latitude/longitude
                ts_file     : string, filename of time-series HDF5 file
                lookup_file : string, filename of lookup table file
                ref_lat/lon : float, latitude/longitude of reference pixel
    Returns:    dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0)
                dis   : 1D np.array of float in meter
    """
    # read date
    obj = timeseries(ts_file)
    obj.open(print_msg=False)
    dates = ptime.date_list2vector(obj.dateList)[0]
    dates = np.array(dates)

    # read displacement
    coord = coordinate(obj.metadata, lookup_file=lookup_file)
    y, x = coord.geo2radar(lat, lon)[0:2]
    box = (x, y, x+1, y+1)
    dis = readfile.read(ts_file, box=box)[0]
    # reference pixel
    if ref_lat is not None:
        ref_y, ref_x = coord.geo2radar(ref_lat, ref_lon)[0:2]
        ref_box = (ref_x, ref_y, ref_x+1, ref_y+1)
        dis -= readfile.read(ts_file, box=ref_box)[0]
    #start at zero
    dis -= dis[0]
    return dates, dis
Beispiel #5
0
def set_initial_map():
    global d_v, h5, k, dateList, inps, data_lim

    d_v = h5['timeseries'][inps.epoch_num][:] * inps.unit_fac
    # Initial Map
    print(str(dateList))
    d_v = readfile.read(inps.timeseries_file, datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac
    #d_v = h5[k].get(dateList[inps.epoch_num])[:]*inps.unit_fac
    if inps.ref_date:
        inps.ref_d_v = readfile.read(inps.timeseries_file, datasetName=inps.ref_date)[0]*inps.unit_fac
        d_v -= inps.ref_d_v

    if mask is not None:
        d_v = mask_matrix(d_v, mask)

    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]

    data_lim = [np.nanmin(d_v), np.nanmax(d_v)]

    if not inps.ylim_mat:
        inps.ylim_mat = data_lim

    print(('Initial data range: '+str(data_lim)))
    print(('Display data range: '+str(inps.ylim_mat)))

    print(('Initial data range: ' + str(data_lim)))
    print(('Display data range: ' + str(inps.ylim)))
Beispiel #6
0
def add_file(fnames, out_file=None):
    """Generate sum of all input files
    Parameters: fnames : list of str, path/name of input files to be added
                out_file : str, optional, path/name of output file
    Returns:    out_file : str, path/name of output file
    Example:    'mask_all.h5' = add_file(['mask_1.h5','mask_2.h5','mask_3.h5'], 'mask_all.h5')
    """
    # Default output file name
    ext = os.path.splitext(fnames[0])[1]
    if not out_file:
        out_file = os.path.splitext(fnames[0])[0]
        for i in range(1, len(fnames)):
            out_file += '_plus_' + os.path.splitext(os.path.basename(
                fnames[i]))[0]
        out_file += ext

    atr = readfile.read_attribute(fnames[0])
    dsNames = readfile.get_dataset_list(fnames[0])
    dsDict = {}
    for dsName in dsNames:
        print('adding {} ...'.format(dsName))
        data = readfile.read(fnames[0], datasetName=dsName)[0]
        for i in range(1, len(fnames)):
            d = readfile.read(fnames[i], datasetName=dsName)[0]
            data = add_matrix(data, d)
        dsDict[dsName] = data
    writefile.write(dsDict,
                    out_file=out_file,
                    metadata=atr,
                    ref_file=fnames[0])
    return out_file
Beispiel #7
0
    def prepare_regular_grid_interpolator(self):
        """Prepare aux data for RGI module"""
        # source points in regular grid
        src_length = int(self.src_metadata['LENGTH'])
        src_width = int(self.src_metadata['WIDTH'])
        self.src_pts = (np.arange(src_length), np.arange(src_width))

        # destination points
        dest_y = readfile.read(self.file, datasetName='azimuthCoord')[0]
        dest_x = readfile.read(self.file, datasetName='rangeCoord')[0]
        if 'SUBSET_XMIN' in self.src_metadata.keys():
            print('input data file was cropped before.')
            dest_y[dest_y != 0.] -= float(self.src_metadata['SUBSET_YMIN'])
            dest_x[dest_x != 0.] -= float(self.src_metadata['SUBSET_XMIN'])
        self.interp_mask = np.multiply(np.multiply(dest_y > 0, dest_y < src_length),
                                       np.multiply(dest_x > 0, dest_x < src_width))
        self.dest_pts = np.hstack((dest_y[self.interp_mask].reshape(-1, 1),
                                   dest_x[self.interp_mask].reshape(-1, 1)))

        # destimation data size
        self.length = int(self.lut_metadata['LENGTH'])
        self.width = int(self.lut_metadata['WIDTH'])
        lat0 = float(self.lut_metadata['Y_FIRST'])
        lon0 = float(self.lut_metadata['X_FIRST'])
        lat_step = float(self.lut_metadata['Y_STEP'])
        lon_step = float(self.lut_metadata['X_STEP'])
        self.laloStep = (lat_step, lon_step)
        self.SNWE = (lat0 + lat_step * (self.length - 1),
                     lat0,
                     lon0,
                     lon0 + lon_step * (self.width - 1))
Beispiel #8
0
def read_timeseries_yx(y, x, ts_file, ref_y=None, ref_x=None, win_size=1):
    """ Read time-series of one pixel with input y/x
    Parameters: y/x         : int, row/column number of interest
                ts_file     : string, filename of time-series HDF5 file
                ref_y/x     : int, row/column number of reference pixel
    Returns:    dates : 1D np.array of datetime.datetime objects, i.e. datetime.datetime(2010, 10, 20, 0, 0)
                dis   : 1D np.array of float in meter
    """
    # read date
    obj = timeseries(ts_file)
    obj.open(print_msg=False)
    dates = ptime.date_list2vector(obj.dateList)[0]
    dates = np.array(dates)

    # read displacement
    print('input y / x: {} / {}'.format(y, x))
    box = (x, y, x + 1, y + 1)
    dis = readfile.read(ts_file, box=box)[0]
    if win_size != 1:
        buf = int(win_size / 2)
        box_win = (x - buf, y - buf, x + buf + 1, y + buf + 1)
        dis_win = readfile.read(ts_file, box=box_win)[0]
        dis = np.nanmean(dis_win.reshape((obj.numDate, -1)), axis=1)

    # reference pixel
    if ref_y is not None:
        ref_box = (ref_x, ref_y, ref_x + 1, ref_y + 1)
        dis -= readfile.read(ts_file, box=ref_box)[0]

    #start at zero
    dis -= dis[0]
    return dates, dis
Beispiel #9
0
def correct_local_oscilator_drift(fname, rg_dist_file=None, out_file=None):
    print('-'*50)
    print('correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)')
    print('-'*50)
    atr = readfile.read_attribute(fname)

    # Check Sensor Type
    platform = atr['PLATFORM']
    print('platform: '+platform)
    if not platform.lower() in ['env', 'envisat']:
        print('No need to correct LOD for '+platform)
        return

    # output file name
    if not out_file:
        out_file = '{}_LODcor{}'.format(os.path.splitext(fname)[0], os.path.splitext(fname)[1])

    # Get LOD ramp rate from empirical model
    if not rg_dist_file:
        print('calculate range distance from file metadata')
        rg_dist = get_relative_range_distance(atr)
    else:
        print('read range distance from file: %s' % (rg_dist_file))
        rg_dist = readfile.read(rg_dist_file, datasetName='slantRangeDistance', print_msg=False)[0]
        rg_dist -= rg_dist[int(atr['REF_Y']), int(atr['REF_X'])]
    ramp_rate = np.array(rg_dist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input fname
    range2phase = -4*np.pi / float(atr['WAVELENGTH'])
    k = atr['FILE_TYPE']
    if k == 'timeseries':
        # read
        obj = timeseries(fname)
        obj.open()
        data = obj.read()

        # correct LOD
        diff_year = np.array(obj.yearList)
        diff_year -= diff_year[obj.refIndex]
        for i in range(data.shape[0]):
            data[i, :, :] -= ramp_rate * diff_year[i]

        # write
        obj_out = timeseries(out_file)
        obj_out.write2hdf5(data, refFile=fname)

    elif k in ['.unw']:
        data, atr = readfile.read(fname)

        dates = ptime.yyyymmdd2years(ptime.yyyymmdd(atr['DATE12'].split('-')))
        dt = dates[1] - dates[0]
        data -= ramp_rate * range2phase * dt

        writefile.write(data, out_file=out_file, metadata=atr)
    else:
        print('No need to correct for LOD for %s file' % (k))
    return out_file
Beispiel #10
0
def extract_geometry_metadata(geom_dir, metadata=dict()):
    """extract metadata from geometry files"""

    def get_nonzero_row_number(data, buffer=2):
        """Find the first and last row number of rows without zero value
        for multiple swaths data
        """
        if np.all(data):
            r0, r1 = 0 + buffer, -1 - buffer
        else:
            row_flag = np.sum(data != 0., axis=1) == data.shape[1]
            row_idx = np.where(row_flag)[0]
            r0, r1 = row_idx[0] + buffer, row_idx[-1] - buffer
        return r0, r1

    # grab existing files
    geom_files = [os.path.join(os.path.abspath(geom_dir), '{}.rdr'.format(i)) 
                  for i in ['hgt','lat','lon','los']]
    geom_files = [i for i in geom_files if os.path.isfile(i)]
    print('extract metadata from geometry files: {}'.format(
        [os.path.basename(i) for i in geom_files]))

    # get A/RLOOKS
    metadata = extract_multilook_number(geom_dir, metadata)

    # update pixel_size for multilooked data
    metadata['rangePixelSize'] *= metadata['RLOOKS']
    metadata['azimuthPixelSize'] *= metadata['ALOOKS']

    # get LAT/LON_REF1/2/3/4 and HEADING into metadata
    for geom_file in geom_files:
        if 'lat' in os.path.basename(geom_file):
            data = readfile.read(geom_file)[0]
            r0, r1 = get_nonzero_row_number(data)
            metadata['LAT_REF1'] = str(data[r0, 0])
            metadata['LAT_REF2'] = str(data[r0, -1])
            metadata['LAT_REF3'] = str(data[r1, 0])
            metadata['LAT_REF4'] = str(data[r1, -1])

        if 'lon' in os.path.basename(geom_file):
            data = readfile.read(geom_file)[0]
            r0, r1 = get_nonzero_row_number(data)
            metadata['LON_REF1'] = str(data[r0, 0])
            metadata['LON_REF2'] = str(data[r0, -1])
            metadata['LON_REF3'] = str(data[r1, 0])
            metadata['LON_REF4'] = str(data[r1, -1])

        if 'los' in os.path.basename(geom_file):
            data = readfile.read(geom_file, datasetName='az')[0]
            data[data == 0.] = np.nan
            az_angle = np.nanmean(data)
            # convert isce azimuth angle to roipac orbit heading angle
            head_angle = -1 * (270 + az_angle)
            head_angle -= np.round(head_angle / 360.) * 360.
            metadata['HEADING'] = str(head_angle)
    return metadata
Beispiel #11
0
def read_aux_subset2inps(inps):
    # Convert All Inputs into subset_y/x/lat/lon
    # Input Priority: subset_y/x/lat/lon > reference > template > tight
    if all(not i for i in
           [inps.subset_x, inps.subset_y, inps.subset_lat, inps.subset_lon]):
        # 1. Read subset info from Reference File
        if inps.reference:
            ref_atr = readfile.read_attribute(inps.reference)
            pix_box, geo_box = get_coverage_box(ref_atr)
            print('using subset info from ' + inps.reference)

        # 2. Read subset info from template options
        elif inps.template_file:
            pix_box, geo_box = read_subset_template2box(inps.template_file)
            print('using subset info from ' + inps.template_file)

        # 3. Use subset from tight info
        elif inps.tight:
            inps.lookup_file = ut.get_lookup_file(inps.lookup_file)
            if not inps.lookup_file:
                raise Exception(
                    'No lookup file found! Can not use --tight option without it.'
                )

            atr_lut = readfile.read_attribute(inps.lookup_file)
            coord = ut.coordinate(atr_lut)
            if 'Y_FIRST' in atr_lut.keys():
                rg_lut = readfile.read(inps.lookup_file,
                                       datasetName='range')[0]
                rg_unique, rg_pos = np.unique(rg_lut, return_inverse=True)
                idx_row, idx_col = np.where(
                    rg_lut != rg_unique[np.bincount(rg_pos).argmax()])
                pix_box = (np.min(idx_col) - 10, np.min(idx_row) - 10,
                           np.max(idx_col) + 10, np.max(idx_row) + 10)
                geo_box = coord.box_pixel2geo(pix_box)
                del rg_lut
            else:
                lat = readfile.read(inps.lookup_file,
                                    datasetName='latitude')[0]
                lon = readfile.read(inps.lookup_file,
                                    datasetName='longitude')[0]
                geo_box = (np.nanmin(lon), np.nanmax(lat), np.nanmax(lon),
                           np.nanmin(lat))
                pix_box = None
                del lat, lon
        else:
            raise Exception('No subset inputs found!')

        # Update subset_y/x/lat/lon
        inps = subset_box2inps(inps, pix_box, geo_box)
    return inps
Beispiel #12
0
def get_nonzero_phase_closure(ifgram_file,
                              out_file=None,
                              thres=0.1,
                              unwDatasetName='unwrapPhase'):
    """Calculate/Read number of non-zero phase closure
    Parameters: ifgram_file : string, path of ifgram stack file
                out_file    : string, path of num non-zero phase closure file
    Returns:    num_nonzero_closure : 2D np.array in size of (length, width)
    """
    if not out_file:
        out_file = 'numNonzeroPhaseClosure_{}.h5'.format(unwDatasetName)
    if os.path.isfile(out_file) and readfile.read_attribute(out_file):
        print('1. read number of nonzero phase closure from file: {}'.format(
            out_file))
        num_nonzero_closure = readfile.read(out_file)[0]
    else:
        obj = ifgramStack(ifgram_file)
        obj.open(print_msg=False)
        length, width = obj.length, obj.width

        ref_phase = obj.get_reference_phase(unwDatasetName=unwDatasetName,
                                            dropIfgram=False)
        C = obj.get_design_matrix4triplet(
            obj.get_date12_list(dropIfgram=False))

        # calculate phase closure line by line to save memory usage
        num_nonzero_closure = np.zeros((length, width), np.float32)
        print(
            '1. calculating phase closure of all pixels from dataset - {} ...'.
            format(unwDatasetName))
        line_step = 10
        num_loop = int(np.ceil(length / line_step))
        prog_bar = ptime.progressBar(maxValue=num_loop)
        for i in range(num_loop):
            # read phase
            i0, i1 = i * line_step, min(length, (i + 1) * line_step)
            box = (0, i0, width, i1)
            pha_data = ifginv.read_unwrap_phase(obj,
                                                box,
                                                ref_phase,
                                                unwDatasetName=unwDatasetName,
                                                dropIfgram=False,
                                                print_msg=False)
            # calculate phase closure
            pha_closure = np.dot(C, pha_data)
            pha_closure = np.abs(pha_closure - ut.wrap(pha_closure))
            # get number of non-zero phase closure
            num_nonzero = np.sum(pha_closure >= thres, axis=0)
            num_nonzero_closure[i0:i1, :] = num_nonzero.reshape(i1 - i0, width)
            prog_bar.update(i + 1,
                            every=1,
                            suffix='{}/{} lines'.format((i + 1) * line_step,
                                                        length))
        prog_bar.close()

        atr = dict(obj.metadata)
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 1
        writefile.write(num_nonzero_closure, out_file=out_file, metadata=atr)
    return num_nonzero_closure
Beispiel #13
0
    def generate_temporal_coherence_mask(self):
        """Generate reliable pixel mask from temporal coherence"""
        geom_file = ut.check_loaded_dataset(self.workDir, print_msg=False)[2]
        tcoh_file = 'temporalCoherence.h5'
        mask_file = 'maskTempCoh.h5'
        tcoh_min = self.template['pysar.networkInversion.minTempCoh']

        scp_args = '{} -m {} -o {} --shadow {}'.format(tcoh_file, tcoh_min,
                                                       mask_file, geom_file)
        print('generate_mask.py', scp_args)

        # update mode: run only if:
        # 1) output file exists and newer than input file, AND
        # 2) all config keys are the same
        config_keys = ['pysar.networkInversion.minTempCoh']
        print('update mode: ON')
        flag = 'skip'
        if ut.run_or_skip(out_file=mask_file,
                          in_file=tcoh_file,
                          print_msg=False) == 'run':
            flag = 'run'
        else:
            print(
                '1) output file: {} already exists and newer than input file: {}'
                .format(mask_file, tcoh_file))
            atr = readfile.read_attribute(mask_file)
            if any(
                    str(self.template[i]) != atr.get(i, 'False')
                    for i in config_keys):
                flag = 'run'
                print(
                    '2) NOT all key configration parameters are the same: {}'.
                    format(config_keys))
            else:
                print('2) all key configuration parameters are the same: {}'.
                      format(config_keys))
        print('run or skip: {}'.format(flag))

        if flag == 'run':
            pysar.generate_mask.main(scp_args.split())
            # update configKeys
            atr = {}
            for key in config_keys:
                atr[key] = self.template[key]
            ut.add_attribute(mask_file, atr)

        # check number of pixels selected in mask file for following analysis
        num_pixel = np.sum(readfile.read(mask_file)[0] != 0.)
        print('number of reliable pixels: {}'.format(num_pixel))

        min_num_pixel = float(
            self.template['pysar.networkInversion.minNumPixel'])
        if num_pixel < min_num_pixel:
            msg = "Not enough reliable pixels (minimum of {}). ".format(
                int(min_num_pixel))
            msg += "Try the following:\n"
            msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
            msg += "2) Check the network and make sure it's fully connected without subsets"
            raise RuntimeError(msg)
        return
Beispiel #14
0
def plot_bridge(ax, mask_cc_file, bridges):
    """Plot mask of connected components with bridges info
    Parameters: mask_cc_file : string, path of mask cc file
                bridges : list of dict
    """
    mask_cc, metadata = readfile.read(mask_cc_file)
    num_bridge = len(bridges)

    # plot 1. mask_cc data
    im = ax.imshow(mask_cc)

    # plot 2. bridge data
    for i in range(num_bridge):
        bridge = bridges[i]
        ax.imshow(np.ma.masked_where(~bridge['mask0'],
                                     np.zeros(mask_cc.shape)),
                  cmap='gray',
                  alpha=0.3,
                  vmin=0,
                  vmax=1)
        ax.imshow(np.ma.masked_where(~bridge['mask1'],
                                     np.zeros(mask_cc.shape)),
                  cmap='gray',
                  alpha=0.3,
                  vmin=0,
                  vmax=1)
        ax.plot([bridge['x0'], bridge['x1']], [bridge['y0'], bridge['y1']],
                '-',
                ms=5,
                mfc='none')

    ax = pp.auto_flip_direction(metadata, ax=ax, print_msg=False)
    return ax, im
Beispiel #15
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    plt.switch_backend('Agg')  # Backend setting

    # Read data
    data, atr = readfile.read(inps.file, datasetName=inps.dset)

    # Data Operation - Display Unit & Rewrapping
    (data, inps.disp_unit, inps.disp_scale,
     inps.wrap) = pp.scale_data4disp_unit_and_rewrap(
         data,
         metadata=atr,
         disp_unit=inps.disp_unit,
         wrap=inps.wrap,
         wrap_range=inps.wrap_range)
    if inps.wrap:
        inps.vlim = inps.wrap_range

    # Output filename
    inps.fig_title = pp.auto_figure_title(inps.file,
                                          datasetNames=inps.dset,
                                          inps_dict=vars(inps))
    if not inps.outfile:
        inps.outfile = '{}.kmz'.format(inps.fig_title)

    # 2. Generate Google Earth KMZ
    kmz_file = write_kmz_file(data,
                              metadata=atr,
                              out_file=inps.outfile,
                              inps=inps)

    print('Done.')
    return
Beispiel #16
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    #plt.switch_backend('Agg')

    # Read data
    data, atr = readfile.read(inps.file, datasetName=inps.dset)

    # Data Operation - Display Unit & Rewrapping
    data, inps.disp_unit, inps.disp_scale, inps.wrap = pp.scale_data4disp_unit_and_rewrap(
        data=data, metadata=atr, disp_unit=inps.disp_unit, wrap=inps.wrap)
    if inps.wrap:
        inps.ylim = [-np.pi, np.pi]

    # Output filename
    if not inps.outfile:
        inps.outfile = pp.auto_figure_title(inps.file,
                                            datasetNames=inps.dset,
                                            inps_dict=vars(inps))

    # 2. Generate Google Earth KMZ
    kmz_file = write_kmz_file(data,
                              metadata=atr,
                              out_name_base=inps.outfile,
                              inps=inps)

    print('Done.')
    return
Beispiel #17
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    plt.switch_backend('Agg')  # Backend setting
    #print("The Python version is %s.%s.%s" % sys.version_info[:3])

    # Read data
    data, atr = readfile.read(inps.file, datasetName=inps.dset)

    # Data Operation - Display Unit & Rewrapping
    (data, inps.disp_unit, inps.disp_scale,
     inps.wrap) = pp.scale_data4disp_unit_and_rewrap(data=data,
                                                     metadata=atr,
                                                     disp_unit=inps.disp_unit,
                                                     wrap=inps.wrap)
    if inps.wrap:
        inps.ylim = [-np.pi, np.pi]

    # Output filename
    if not inps.outfile:
        inps.outfile = '{}.kmz'.format(
            pp.auto_figure_title(inps.file,
                                 datasetNames=inps.dset,
                                 inps_dict=vars(inps)))

    # 2. Generate Google Earth KMZ
    kmz_file = write_kmz_file(data,
                              metadata=atr,
                              out_file=inps.outfile,
                              inps=inps)

    print('Done.')
    return
Beispiel #18
0
def manual_select_start_end_point(File):
    """Manual Select Start/End Point in display figure."""
    print('reading ' + File + ' ...')
    data, atr = readfile.read(File)
    print('displaying ' + File + ' ...')
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.imshow(data)

    xc = []
    yc = []
    print('please click on start and end point of the desired profile')
    print('then close the figure to continue')

    def onclick(event):
        if event.button == 1:
            xcc, ycc = int(event.xdata), int(event.ydata)
            xc.append(xcc)
            yc.append(ycc)
            print('x = ' + str(xcc) + '\ny = ' + str(ycc))
            ax.plot(xcc, ycc, 'ro')

    cid = fig.canvas.mpl_connect('button_release_event', onclick)
    plt.show()

    start_yx = [yc[0], xc[0]]
    end_yx = [yc[1], xc[1]]
    return start_yx, end_yx
Beispiel #19
0
def timeseries2ifgram(ts_file, ifgram_file, out_file='reconUnwrapIfgram.h5'):
    # read time-series
    atr = readfile.read_attribute(ts_file)
    range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
    print('reading timeseries data from file {} ...'.format(ts_file))
    ts_data = readfile.read(ts_file)[0] * range2phase
    num_date, length, width = ts_data.shape
    ts_data = ts_data.reshape(num_date, -1)

    # reconstruct unwrapPhase
    print('reconstructing the interferograms from timeseries')
    stack_obj = ifgramStack(ifgram_file)
    stack_obj.open(print_msg=False)
    A1 = stack_obj.get_design_matrix4timeseries_estimation(dropIfgram=False)[0]
    num_ifgram = A1.shape[0]
    A0 = -1. * np.ones((num_ifgram, 1))
    A = np.hstack((A0, A1))
    ifgram_est = np.dot(A, ts_data).reshape(num_ifgram, length, width)
    ifgram_est = np.array(ifgram_est, dtype=ts_data.dtype)
    del ts_data

    # write to ifgram file
    dsDict = {}
    dsDict['unwrapPhase'] = ifgram_est
    writefile.write(dsDict, out_file=out_file, ref_file=ifgram_file)
    return ifgram_file
Beispiel #20
0
def transect_list(fileList, inps):
    """Get transection along input line from file list
    Inputs:
        fileList : list of str, path of files to get transect
        inps     : Namespace including the following items:
                   start/end_lalo
                   start/end_yx
                   interpolation
    Outputs:
        transectList : list of N*2 matrix containing distance and its value
        atrList      : list of attribute dictionary, for each input file
    """

    transectList = []
    atrList = []
    for File in fileList:
        print('reading ' + File)
        data, atr = readfile.read(File)
        if inps.start_lalo and inps.end_lalo:
            transect = transect_lalo(data, atr, inps.start_lalo, inps.end_lalo,
                                     inps.interpolation)
        else:
            transect = transect_yx(data, atr, inps.start_yx, inps.end_yx,
                                   inps.interpolation)
        transectList.append(transect)
        atrList.append(atr)
    return transectList, atrList
Beispiel #21
0
def multilook_file(infile, lks_y, lks_x, outfile=None):
    lks_y = int(lks_y)
    lks_x = int(lks_x)

    # input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print('multilooking {} {} file: {}'.format(atr['PROCESSOR'], k, infile))
    print('number of looks in y / azimuth direction: %d' % lks_y)
    print('number of looks in x / range   direction: %d' % lks_x)

    # output file name
    if not outfile:
        if os.getcwd() == os.path.dirname(os.path.abspath(infile)):
            ext = os.path.splitext(infile)[1]
            outfile = os.path.splitext(infile)[0] + '_' + str(
                lks_y) + 'alks_' + str(lks_x) + 'rlks' + ext
        else:
            outfile = os.path.basename(infile)
    #print('writing >>> '+outfile)

    # read source data and multilooking
    dsNames = readfile.get_dataset_list(infile)
    maxDigit = max([len(i) for i in dsNames])
    dsDict = dict()
    for dsName in dsNames:
        print('multilooking {d:<{w}} from {f} ...'.format(
            d=dsName, w=maxDigit, f=os.path.basename(infile)))
        data = readfile.read(infile, datasetName=dsName, print_msg=False)[0]
        data = multilook_data(data, lks_y, lks_x)
        dsDict[dsName] = data
    atr = multilook_attribute(atr, lks_y, lks_x)
    writefile.write(dsDict, out_file=outfile, metadata=atr, ref_file=infile)
    return outfile
Beispiel #22
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.file)[0]+'.h5'

    if inps.data_type:
        if inps.data_type in ['float', 'float32', 'np.float32']:
            inps.data_type = np.float32
        elif inps.data_type in ['float64', 'np.float64']:
            inps.data_type = np.float64
        elif inps.data_type in ['int', 'int16', 'np.int16']:
            inps.data_type = np.int16
        elif inps.data_type in ['bool', 'np.bool_']:
            inps.data_type = np.bool_
        elif inps.data_type in ['complex', 'np.complex64']:
            inps.data_type = np.complex64
        elif inps.data_type in ['complex128', 'np.complex128']:
            inps.data_type = np.complex128
        else:
            raise ValueError('un-recognized input data type: {}'.format(inps.data_type))

    atr = readfile.read_attribute(inps.file)
    dsNames = readfile.get_dataset_list(inps.file)
    dsDict = {}
    for dsName in dsNames:
        data = readfile.read(inps.file, datasetName=dsName)[0]
        if inps.data_type:
            data = np.array(data, inps.data_type)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=inps.outfile, metadata=atr)
    return inps.outfile
Beispiel #23
0
    def plot_coherence_matrix4pixel(self, yx):
        """Plot coherence matrix for one pixel
        Parameters: yx : list of 2 int
        """
        # read coherence
        box = (yx[1], yx[0], yx[1]+1, yx[0]+1)
        coh = readfile.read(self.ifgram_file, datasetName='coherence', box=box)[0]
        # prep metadata
        plotDict = {}
        plotDict['fig_title'] = 'Y = {}, X = {}'.format(yx[0], yx[1])
        plotDict['colormap'] = self.colormap
        plotDict['disp_legend'] = False
        # plot
        coh_mat = pp.plot_coherence_matrix(self.ax_mat,
                                           date12List=self.date12_list,
                                           cohList=coh.tolist(),
                                           date12List_drop=self.ex_date12_list,
                                           plot_dict=plotDict)[1]
        self.fig.canvas.draw()

        # status bar
        def format_coord(x, y):
            row, col = int(y+0.5), int(x+0.5)
            date12 = sorted([self.date_list[row], self.date_list[col]])
            date12 = ['{}-{}-{}'.format(i[0:4], i[4:6], i[6:8]) for i in date12]
            return 'x={}, y={}, v={:.3f}'.format(date12[0], date12[1], coh_mat[row, col])
        self.ax_mat.format_coord = format_coord
        # info
        vprint('-'*30)
        vprint('pixel: yx = {}'.format(yx))
        vprint('min/max coherence: {:.2f} / {:.2f}'.format(np.min(coh), np.max(coh)))
        return
Beispiel #24
0
def file_operation(fname, operator, operand, out_file=None):
    """Mathmathic operation of file"""

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is ' + k + ' file: ' + fname)
    print('operation: file %s %f' % (operator, operand))

    # default output filename
    if not out_file:
        if operator in ['+', 'plus', 'add', 'addition']:
            suffix = 'plus'
        elif operator in ['-', 'minus', 'substract', 'substraction']:
            suffix = 'minus'
        elif operator in ['*', 'times', 'multiply', 'multiplication']:
            suffix = 'multiply'
        elif operator in ['/', 'obelus', 'divide', 'division']:
            suffix = 'divide'
        elif operator in ['^', 'pow', 'power']:
            suffix = 'pow'
        out_file = '{}_{}{}{}'.format(
            os.path.splitext(fname)[0], suffix, str(operand),
            os.path.splitext(fname)[1])

    atr = readfile.read_attribute(fname)
    dsNames = readfile.get_dataset_list(fname)
    dsDict = {}
    for dsName in dsNames:
        data = readfile.read(fname, datasetName=dsName)[0]
        data = data_operation(data, operator, operand)
        dsDict[dsName] = data
    writefile.write(dsDict, out_file=out_file, metadata=atr, ref_file=fname)
    return out_file
Beispiel #25
0
 def read_lookup_table(self, print_msg=True):
     if 'Y_FIRST' in self.lut_metadata.keys():
         self.lut_y = readfile.read(self.lookup_file[0],
                                    datasetName='azimuthCoord',
                                    print_msg=print_msg)[0]
         self.lut_x = readfile.read(self.lookup_file[1],
                                    datasetName='rangeCoord',
                                    print_msg=print_msg)[0]
     else:
         self.lut_y = readfile.read(self.lookup_file[0],
                                    datasetName='latitude',
                                    print_msg=print_msg)[0]
         self.lut_x = readfile.read(self.lookup_file[1],
                                    datasetName='longitude',
                                    print_msg=print_msg)[0]
     return self.lut_y, self.lut_x
Beispiel #26
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # Calculate look angle
    atr = readfile.read_attribute(inps.file)
    dem = None
    if inps.dem_file:
        dem = readfile.read(inps.dem_file, datasetName='height')[0]
    angle = ut.incidence_angle(atr, dem=dem, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print(
            'Input file is geocoded, only center incident angle is calculated: '
        )
        print(angle)
        length = int(atr['LENGTH'])
        width = int(atr['WIDTH'])
        angle_mat = np.zeros((length, width), np.float32)
        angle_mat[:] = angle
        angle = angle_mat

    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'degree'
    if 'REF_DATE' in atr.keys():
        atr.pop('REF_DATE')

    if not inps.outfile:
        inps.outfile = 'incidenceAngle.h5'
    writefile.write(angle, out_file=inps.outfile, metadata=atr)
    return inps.outfile
Beispiel #27
0
def run_geocode(inps):
    """geocode all input files"""
    start_time = time.time()

    # Prepare geometry for geocoding
    res_obj = resample(lookupFile=inps.lookupFile,
                       dataFile=inps.file[0],
                       SNWE=inps.SNWE,
                       laloStep=inps.laloStep,
                       processor=inps.processor)
    res_obj.open()

    if not inps.nprocs:
        inps.nprocs = multiprocessing.cpu_count()

    # resample input files one by one
    for infile in inps.file:
        print('-' * 50+'\nresampling file: {}'.format(infile))
        atr = readfile.read_attribute(infile, datasetName=inps.dset)
        outfile = auto_output_filename(infile, inps)
        if inps.updateMode and ut.run_or_skip(outfile, in_file=[infile, inps.lookupFile]) == 'skip':
            print('update mode is ON, skip geocoding.')
            continue

        # read source data and resample
        dsNames = readfile.get_dataset_list(infile, datasetName=inps.dset)
        maxDigit = max([len(i) for i in dsNames])
        dsResDict = dict()
        for dsName in dsNames:
            print('reading {d:<{w}} from {f} ...'.format(d=dsName,
                                                         w=maxDigit,
                                                         f=os.path.basename(infile)))
            data = readfile.read(infile,
                                 datasetName=dsName,
                                 print_msg=False)[0]

            if atr['FILE_TYPE'] == 'timeseries' and len(data.shape) == 2:
                data = np.reshape(data, (1, data.shape[0], data.shape[1]))
            res_data = res_obj.run_resample(src_data=data,
                                            interp_method=inps.interpMethod,
                                            fill_value=inps.fillValue,
                                            nprocs=inps.nprocs,
                                            print_msg=True)
            dsResDict[dsName] = res_data

        # update metadata
        if inps.radar2geo:
            atr = metadata_radar2geo(atr, res_obj)
        else:
            atr = metadata_geo2radar(atr, res_obj)
        #if len(dsNames) == 1 and dsName not in ['timeseries']:
        #    atr['FILE_TYPE'] = dsNames[0]
        #    infile = None

        writefile.write(dsResDict, out_file=outfile, metadata=atr, ref_file=infile)

    m, s = divmod(time.time()-start_time, 60)
    print('time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s))
    return outfile
Beispiel #28
0
def get_temporal_coherence_mask(inps, template):
    """Generate mask from temporal coherence"""
    configKeys = ['pysar.networkInversion.minTempCoh']
    inps.maskFile = 'maskTempCoh.h5'
    inps.minTempCoh = template['pysar.networkInversion.minTempCoh']
    maskCmd = 'generate_mask.py {} -m {} -o {} --shadow {}'.format(
        inps.tempCohFile, inps.minTempCoh, inps.maskFile, inps.geomFile)
    print(maskCmd)

    # update mode checking
    # run if 1) output file exists; 2) newer than input file and 3) all config keys are the same
    run = False
    if ut.run_or_skip(out_file=inps.maskFile,
                      in_file=inps.tempCohFile,
                      print_msg=False) == 'run':
        run = True
    else:
        print(
            '  1) output file: {} already exists and newer than input file: {}'
            .format(inps.maskFile, inps.tempCohFile))
        meta_dict = readfile.read_attribute(inps.maskFile)
        if any(
                str(template[i]) != meta_dict.get(i, 'False')
                for i in configKeys):
            run = True
            print(
                '  2) NOT all key configration parameters are the same --> run.\n\t{}'
                .format(configKeys))
        else:
            print('  2) all key configuration parameters are the same:\n\t{}'.
                  format(configKeys))
    # result
    print('run this step:', run)
    if run:
        status = subprocess.Popen(maskCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while generating mask file from temporal coherence.')

        # update configKeys
        meta_dict = {}
        for key in configKeys:
            meta_dict[key] = template[key]
        ut.add_attribute(inps.maskFile, meta_dict)

    # check number of pixels selected in mask file for following analysis
    min_num_pixel = float(template['pysar.networkInversion.minNumPixel'])
    msk = readfile.read(inps.maskFile)[0]
    num_pixel = np.sum(msk != 0.)
    print('number of pixels selected: {}'.format(num_pixel))
    if num_pixel < min_num_pixel:
        msg = "Not enought coherent pixels selected (minimum of {}). ".format(
            int(min_num_pixel))
        msg += "Try the following:\n"
        msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
        msg += "2) Check the network and make sure it's fully connected without subsets"
        raise RuntimeError(msg)
    del msk
    return
Beispiel #29
0
def read_topographic_data(geom_file, metadata):
    print('read DEM from file: ' + geom_file)
    dem = readfile.read(geom_file, datasetName='height', print_msg=False)[0]

    print('considering the incidence angle of each pixel ...')
    inc_angle = readfile.read(geom_file,
                              datasetName='incidenceAngle',
                              print_msg=False)[0]
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ref_y = int(metadata['REF_Y'])
    ref_x = int(metadata['REF_X'])
    dem -= dem[ref_y, ref_x]

    # Design matrix for elevation v.s. phase
    # dem = dem.flatten()
    return dem
Beispiel #30
0
def set_dem_file():
    global ax_v, inps, img

    if inps.dem_file:
        dem = readfile.read(inps.dem_file, datasetName='height')[0]
        ax_v = pp.plot_dem_yx(ax_v, dem)

    img = ax_v.imshow(d_v, cmap=inps.colormap, clim=inps.ylim_mat, interpolation='nearest')