コード例 #1
0
    def get_los_geometry(self, insar_obj, print_msg=False):
        lat, lon = self.get_stat_lat_lon(print_msg=print_msg)

        # get LOS geometry
        if isinstance(insar_obj, str):
            # geometry file
            atr = readfile.read_attribute(insar_obj)
            coord = ut.coordinate(atr, lookup_file=insar_obj)
            y, x = coord.geo2radar(lat, lon, print_msg=print_msg)[0:2]
            box = (x, y, x+1, y+1)
            inc_angle = readfile.read(insar_obj, datasetName='incidenceAngle', box=box, print_msg=print_msg)[0][0,0]
            az_angle  = readfile.read(insar_obj, datasetName='azimuthAngle', box=box, print_msg=print_msg)[0][0,0]
            head_angle = ut.azimuth2heading_angle(az_angle)
        elif isinstance(insar_obj, dict):
            # use mean inc/head_angle from metadata
            inc_angle = ut.incidence_angle(insar_obj, dimension=0, print_msg=print_msg)
            head_angle = float(insar_obj['HEADING_DEG'])
            # for old reading of los.rdr band2 data into headingAngle directly
            if (head_angle + 180.) > 45.:
                head_angle = ut.azimuth2heading_angle(head_angle)
        else:
            raise ValueError('input insar_obj is neight str nor dict: {}'.format(insar_obj))
        return inc_angle, head_angle
コード例 #2
0
ファイル: tsview_gui.py プロジェクト: whigg/PySAR
def read_file_data(epoch=None):
    global atr, attributes, ref_dates_list

    atr = readfile.read_attribute(h5_file.get())

    file_type = atr['FILE_TYPE']

    ref_dates_list = ["All"]

    h5file = h5py.File(h5_file.get(), 'r')
    if file_type in ['HDFEOS']:
        ref_dates_list += h5file.attrs['DATE_TIMESERIES'].split()
    else:
        ref_dates_list = timeseries(h5_file.get()).get_date_list()

    if epoch and epoch is not "All":
        data, attributes = readfile.read(h5_file.get(),
                                         datasetName=ref_dates_list[epoch])
    else:
        data, attributes = readfile.read(
            h5_file.get(), datasetName=ref_dates_list[len(ref_dates_list) - 1])

    return data
コード例 #3
0
ファイル: remove_ramp.py プロジェクト: xuubing/PySAR
def run_or_skip(inps):
    print('-'*50)
    print('update mode: ON')
    flag = 'skip'

    # check output file
    if not os.path.isfile(inps.outfile):
        flag = 'run'
        print('1) output file {} NOT found.'.format(inps.outfile))
    else:
        print('1) output file {} already exists.'.format(inps.outfile))
        infiles = [inps.file]
        if inps.mask_file:
            infiles.append(inps.mask_file)
        ti = max(os.path.getmtime(i) for i in infiles)
        to = os.path.getmtime(inps.outfile)
        if ti > to:
            flag = 'run'
            print('2) output file is NOT newer than input file: {}.'.format(infiles))
        else:
            print('2) output file is newer than input file: {}.'.format(infiles))

    # check configuration
    if flag == 'skip':
        iDict = {}
        iDict['pysar.deramp'] = inps.surface_type
        iDict['pysar.deramp.maskFile'] = inps.mask_file
        atr = readfile.read_attribute(inps.outfile)
        if any(str(iDict[key]) != atr.get(key, 'None') for key in configKeys):
            flag = 'run'
            print('3) NOT all key configration parameters are the same:{}'.format(configKeys))
        else:
            print('3) all key configuration parameters are the same:{}'.format(configKeys))

    # result
    print('run or skip: {}.'.format(flag))
    return flag
コード例 #4
0
ファイル: timeseries2velocity.py プロジェクト: whigg/PySAR
def run_or_skip(inps):
    print('update mode: ON')
    flag = 'skip'

    # check output file
    if not os.path.isfile(inps.outfile):
        flag = 'run'
        print('  1) output file {} not found, --> run'.format(inps.outfile))
    else:
        print('  1) output file {} already exists.'.format(inps.outfile))
        ti = os.path.getmtime(inps.timeseries_file)
        to = os.path.getmtime(inps.outfile)
        if ti > to:
            flag = 'run'
            print('  2) output file is NOT newer than input file: {} --> run.'.
                  format(inps.timeseries_file))
        else:
            print('  2) output file is newer than input file: {}.'.format(
                inps.timeseries_file))

    # check configuration
    if flag == 'skip':
        atr = readfile.read_attribute(inps.outfile)
        if any(
                str(vars(inps)[key]) != atr.get(key_prefix + key, 'None')
                for key in configKeys):
            flag = 'run'
            print(
                '  3) NOT all key configration parameters are the same --> run.\n\t{}'
                .format(configKeys))
        else:
            print('  3) all key configuration parameters are the same:\n\t{}'.
                  format(configKeys))

    # result
    print('check result:', flag)
    return flag
コード例 #5
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'rangeDistance.h5'

    # Calculate look angle
    range_dis = ut.range_distance(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print(
            'Input file is geocoded, only center range distance is calculated: '
        )
        print(range_dis)
        length = int(atr['LENGTH'])
        width = int(atr['WIDTH'])
        range_dis_mat = np.zeros((length, width), np.float32)
        range_dis_mat[:] = range_dis
        range_dis = range_dis_mat

    print('writing >>> ' + outFile)
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'm'
    try:
        atr.pop('REF_DATE')
    except:
        pass
    writefile.write(range_dis, out_file=outFile, metadata=atr)
    return outFile
コード例 #6
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    # check 1 input file type
    k = readfile.read_attribute(inps.ifgram_file)['FILE_TYPE']
    if k not in ['ifgramStack']:
        raise ValueError('input file is not ifgramStack: {}'.format(k))

    # check 2 cc_mask_file
    if not os.path.isfile(inps.cc_mask_file):
        raise FileNotFoundError(inps.cc_mask_file)

    if not inps.datasetNameOut:
        inps.datasetNameOut = '{}_phaseClosure'.format(inps.datasetNameIn)

    # discard water mask file is not found
    if inps.waterMaskFile and not os.path.isfile(inps.waterMaskFile):
        inps.waterMaskFile = None

    return inps
コード例 #7
0
ファイル: pysarApp.py プロジェクト: xuubing/PySAR
    def run_save2google_earth(self, step_name):
        """Save velocity file in geo coordinates into Google Earth raster image."""
        if self.template['pysar.save.kmz'] is True:
            print(
                'creating Google Earth KMZ file for geocoded velocity file: ...'
            )
            # input
            vel_file = 'velocity.h5'
            atr = readfile.read_attribute(vel_file)
            if 'Y_FIRST' not in atr.keys():
                vel_file = os.path.join(self.workDir,
                                        'GEOCODE/geo_velocity.h5')

            # output
            kmz_file = '{}.kmz'.format(os.path.splitext(vel_file)[0])
            scp_args = '{} -o {}'.format(vel_file, kmz_file)
            print('save_kmz.py', scp_args)

            # update mode
            try:
                fbase = os.path.basename(kmz_file)
                kmz_file = [
                    i for i in [
                        fbase, './GEOCODE/{}'.format(fbase), './PIC/{}'.format(
                            fbase)
                    ] if os.path.isfile(i)
                ][0]
            except:
                kmz_file = None
            if ut.run_or_skip(out_file=kmz_file,
                              in_file=vel_file,
                              check_readable=False) == 'run':
                pysar.save_kmz.main(scp_args.split())
        else:
            print('save velocity to Google Earth format is OFF.')
        return
コード例 #8
0
def print_date_list(fname, disp_num=False, print_msg=False):
    """Print time/date info of file"""
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        obj = timeseries(fname)
        obj.open(print_msg=False)
        dateList = obj.dateList
    elif k == 'HDFEOS':
        obj = HDFEOS(fname)
        obj.open(print_msg=False)
        dateList = obj.dateList
    elif k == 'giantTimeseries':
        obj = giantTimeseries(fname)
        obj.open(print_msg=False)
        dateList = obj.dateList
    elif k in ['ifgramStack']:
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        dateList = obj.date12List
    elif k in ['giantIfgramStack']:
        obj = giantIfgramStack(fname)
        obj.open(print_msg=False)
        dateList = obj.date12List
    else:
        print('--date option can not be applied to {} file, ignore it.'.format(
            k))

    if print_msg and dateList is not None:
        for i in range(len(dateList)):
            if disp_num:
                print('{}\t{}'.format(dateList[i], i))
            else:
                print(dateList[i])
    return dateList
コード例 #9
0
def run_or_skip(inps):
    print('-'*50)
    print('update mode: ON')
    flag = 'skip'

    # check output file
    if not os.path.isfile(inps.outfile):
        flag = 'run'
        print('  1) output file {} not found, --> run'.format(inps.outfile))
    else:
        print('  1) output file {} already exists.'.format(inps.outfile))
        infiles = [inps.timeseries_file]
        if inps.geom_file:
            infiles.append(inps.geom_file)
        ti = max(os.path.getmtime(i) for i in infiles)
        to = os.path.getmtime(inps.outfile)
        if ti > to:
            flag = 'run'
            print('  2) output file is NOT newer than input file: {} --> run.'.format(infiles))
        else:
            print('  2) output file is newer than input file: {}.'.format(infiles))

    # check configuration
    if flag == 'skip':
        date_list_all = timeseries(inps.timeseries_file).get_date_list()
        inps.excludeDate = read_exclude_date(inps.excludeDate, date_list_all, print_msg=False)[1]
        atr = readfile.read_attribute(inps.outfile)
        if any(str(vars(inps)[key]) != atr.get(key_prefix+key, 'None') for key in configKeys):
            flag = 'run'
            print('  3) NOT all key configration parameters are the same --> run.\n\t{}'.format(configKeys))
        else:
            print('  3) all key configuration parameters are the same:\n\t{}'.format(configKeys))

    # result
    print('check result:', flag)
    return flag
コード例 #10
0
def get_date_list(fname, print_msg=False):
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    dateList = None
    if k in ['timeseries']:
        obj = timeseries(fname)
        obj.open(print_msg=False)
        dateList = obj.dateList
    elif k in ['ifgramStack']:
        obj = ifgramStack(fname)
        obj.open(print_msg=False)
        dateList = obj.date12List
    else:
        print('--date option can not be applied to {} file, ignore it.'.format(
            k))
    try:
        obj.close(print_msg=False)
    except:
        pass

    if print_msg and dateList is not None:
        for i in dateList:
            print(i)
    return dateList
コード例 #11
0
def main(argv):
    if len(sys.argv) < 3:
        usage()
        sys.exit(1)

    y = int(argv[0])
    x = int(argv[1])
    print('input radar coord: y/azimuth=%d, x/range=%d' % (y, x))

    try:
        trans_file = argv[2]
    except:
        trans_file = ut.get_lookup_file()

    try:
        radar_file = argv[3]
    except:
        radar_file = 'INPUTS/ifgramStack.h5'
    atr_rdr = readfile.read_attribute(radar_file)

    coord = ut.coordinate(atr_rdr, lookup_file=trans_file)
    lat, lon = coord.radar2geo(np.array(y), np.array(x))[0:2]
    print('corresponding geo coord: lat=%.4f, lon=%.4f' % (lat, lon))
    return
コード例 #12
0
def read_subset_box(inpsDict):
    # Read subset info from template
    inpsDict['box'] = None
    inpsDict['box4geo_lut'] = None
    pix_box, geo_box = subset.read_subset_template2box(
        inpsDict['template_file'][0])

    # Grab required info to read input geo_box into pix_box
    try:
        lookupFile = [
            glob.glob(str(inpsDict['pysar.load.lookupYFile']))[0],
            glob.glob(str(inpsDict['pysar.load.lookupXFile']))[0]
        ]
    except:
        lookupFile = None

    try:
        pathKey = [
            i for i in datasetName2templateKey.values()
            if i in inpsDict.keys()
        ][0]
        file = glob.glob(str(inpsDict[pathKey]))[0]
        atr = readfile.read_attribute(file)
    except:
        atr = dict()

    geocoded = None
    if 'Y_FIRST' in atr.keys():
        geocoded = True
    else:
        geocoded = False

    # Check conflict
    if geo_box and not geocoded and lookupFile is None:
        geo_box = None
        print(('WARNING: pysar.subset.lalo is not supported'
               ' if 1) no lookup file AND'
               '    2) radar/unkonwn coded dataset'))
        print('\tignore it and continue.')
    if not geo_box and not pix_box:
        return inpsDict

    # geo_box --> pix_box
    coord = ut.coordinate(atr, lookup_file=lookupFile)
    if geo_box is not None:
        pix_box = coord.bbox_geo2radar(geo_box)
        pix_box = coord.check_box_within_data_coverage(pix_box)
        print('input bounding box of interest in lalo: {}'.format(geo_box))
    print('box to read for datasets in y/x: {}'.format(pix_box))

    # Get box for geocoded lookup table (for gamma/roipac)
    box4geo_lut = None
    if lookupFile is not None:
        atrLut = readfile.read_attribute(lookupFile[0])
        if not geocoded and 'Y_FIRST' in atrLut.keys():
            geo_box = coord.bbox_radar2geo(pix_box)
            box4geo_lut = ut.coordinate(atrLut).bbox_geo2radar(geo_box)
            print('box to read for geocoded lookup file in y/x: {}'.format(
                box4geo_lut))

    inpsDict['box'] = pix_box
    inpsDict['box4geo_lut'] = box4geo_lut
    return inpsDict
コード例 #13
0
def read_init_info(inps):
    # Time Series Info
    ts_file0 = inps.timeseries_file[0]
    atr = readfile.read_attribute(ts_file0)
    inps.key = atr['FILE_TYPE']
    if inps.key == 'timeseries':
        obj = timeseries(ts_file0)
    elif inps.key == 'giantTimeseries':
        obj = giantTimeseries(ts_file0)
    elif inps.key == 'HDFEOS':
        obj = HDFEOS(ts_file0)
    else:
        raise ValueError('input file is {}, not timeseries.'.format(inps.key))
    obj.open()

    if not inps.file_label:
        inps.file_label = [
            str(i) for i in list(range(len(inps.timeseries_file)))
        ]

    # default mask file
    if not inps.mask_file and 'masked' not in ts_file0:
        dir_name = os.path.dirname(ts_file0)
        if 'Y_FIRST' in atr.keys():
            inps.mask_file = os.path.join(dir_name, 'geo_maskTempCoh.h5')
        else:
            inps.mask_file = os.path.join(dir_name, 'maskTempCoh.h5')
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None

    # date info
    inps.date_list = obj.dateList
    if inps.start_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) >= int(inps.start_date)
        ]
    if inps.end_date:
        inps.date_list = [
            i for i in inps.date_list if int(i) <= int(inps.end_date)
        ]
    inps.num_date = len(inps.date_list)
    inps.dates, inps.yearList = ptime.date_list2vector(inps.date_list)
    (inps.ex_date_list, inps.ex_dates,
     inps.ex_flag) = read_exclude_date(inps.ex_date_list, inps.date_list)

    # initial display index
    if obj.metadata['REF_DATE'] in inps.date_list:
        inps.ref_idx = inps.date_list.index(obj.metadata['REF_DATE'])
    else:
        inps.ref_idx = 0
    if inps.ref_date:
        inps.ref_idx = inps.date_list.index(inps.ref_date)
    if not inps.init_idx:
        if inps.ref_idx < inps.num_date / 2.:
            inps.init_idx = -3
        else:
            inps.init_idx = 3

    # Display Unit
    (inps.disp_unit,
     inps.unit_fac) = pp.scale_data2disp_unit(metadata=atr,
                                              disp_unit=inps.disp_unit)[1:3]

    # Read Error List
    inps.error_ts = None
    inps.ex_error_ts = None
    if inps.error_file:
        error_fileContent = np.loadtxt(inps.error_file,
                                       dtype=bytes).astype(str)
        inps.error_ts = error_fileContent[:, 1].astype(
            np.float) * inps.unit_fac
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = e_ts[inps.ex_flag == 0]
            inps.error_ts = e_ts[inps.ex_flag == 1]

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        inps.zero_idx = min(0, np.min(np.where(inps.ex_flag)[0]))

    # default lookup table file
    if not inps.lookup_file:
        inps.lookup_file = ut.get_lookup_file('./INPUTS/geometryRadar.h5')
    inps.coord = ut.coordinate(atr, inps.lookup_file)

    # size and lalo info
    inps.pix_box, inps.geo_box = subset.subset_input_dict2box(vars(inps), atr)
    inps.pix_box = inps.coord.check_box_within_data_coverage(inps.pix_box)
    inps.geo_box = inps.coord.box_pixel2geo(inps.pix_box)
    # Out message
    data_box = (0, 0, obj.width, obj.length)
    print('data   coverage in y/x: ' + str(data_box))
    print('subset coverage in y/x: ' + str(inps.pix_box))
    print('data   coverage in lat/lon: ' +
          str(inps.coord.box_pixel2geo(data_box)))
    print('subset coverage in lat/lon: ' + str(inps.geo_box))
    print(
        '------------------------------------------------------------------------'
    )

    # reference pixel
    if not inps.ref_lalo and 'REF_LAT' in atr.keys():
        inps.ref_lalo = (float(atr['REF_LAT']), float(atr['REF_LON']))
    if inps.ref_lalo:
        if inps.ref_lalo[1] > 180.:
            inps.ref_lalo[1] -= 360.
        inps.ref_yx = inps.coord.geo2radar(inps.ref_lalo[0],
                                           inps.ref_lalo[1],
                                           print_msg=False)[0:2]
    if not inps.ref_yx:
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]

    # Initial Pixel Coord
    if inps.lalo:
        inps.yx = inps.coord.geo2radar(inps.lalo[0],
                                       inps.lalo[1],
                                       print_msg=False)[0:2]
    try:
        inps.lalo = inps.coord.radar2geo(inps.yx[0],
                                         inps.yx[1],
                                         print_msg=False)[0:2]
    except:
        inps.lalo = None

    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr)

    # display unit ans wrap
    # if wrap_step == 2*np.pi (default value), set disp_unit_v = radian;
    # otherwise set disp_unit_v = disp_unit
    inps.disp_unit_v = inps.disp_unit
    if inps.wrap:
        inps.range2phase = -4. * np.pi / float(atr['WAVELENGTH'])
        if 'cm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 100.
        elif 'mm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1000.
        elif 'm' == inps.disp_unit.split('/')[0]: inps.range2phase /= 1.
        else:
            raise ValueError('un-recognized display unit: {}'.format(
                inps.disp_unit))

        if (inps.wrap_range[1] - inps.wrap_range[0]) == 2 * np.pi:
            inps.disp_unit_v = 'radian'
        inps.vlim = inps.wrap_range
    inps.cbar_label = 'Displacement [{}]'.format(inps.disp_unit_v)

    return inps, atr
コード例 #14
0
ファイル: prep_isce.py プロジェクト: shineusn/PySAR
def prepare_geometry(geometryDir, metadata=dict()):
    """Prepare and extract metadata from geometry files"""
    def get_nonzero_row_number(data, buffer=2):
        """Find the first and last row number of rows without zero value, for multiple swaths data"""
        if np.all(data):
            r0, r1 = 0 + buffer, -1 - buffer
        else:
            row_flag = np.sum(data != 0., axis=1) == data.shape[1]
            row_idx = np.where(row_flag)[0]
            r0, r1 = row_idx[0] + buffer, row_idx[-1] - buffer
        return r0, r1

    print('prepare .rsc file for geometry files')
    isceFiles = [
        os.path.join(os.path.abspath(geometryDir), '{}.rdr'.format(i))
        for i in ['hgt', 'lat', 'lon', 'los', 'shadowMask']
    ]
    isceFiles = [i for i in isceFiles if os.path.isfile(i)]

    # get A/RLOOKS
    metadata = extract_multilook_number(metadata, geometryDir)

    # get LAT/LON_REF1/2/3/4 and HEADING into metadata
    for isceFile in isceFiles:
        if 'lat' in os.path.basename(isceFile):
            data = readfile.read(isceFile)[0]
            r0, r1 = get_nonzero_row_number(data)
            metadata['LAT_REF1'] = str(data[r0, 0])
            metadata['LAT_REF2'] = str(data[r0, -1])
            metadata['LAT_REF3'] = str(data[r1, 0])
            metadata['LAT_REF4'] = str(data[r1, -1])

        if 'lon' in os.path.basename(isceFile):
            data = readfile.read(isceFile)[0]
            r0, r1 = get_nonzero_row_number(data)
            metadata['LON_REF1'] = str(data[r0, 0])
            metadata['LON_REF2'] = str(data[r0, -1])
            metadata['LON_REF3'] = str(data[r1, 0])
            metadata['LON_REF4'] = str(data[r1, -1])

        if 'los' in os.path.basename(isceFile):
            data = readfile.read(isceFile, datasetName='inc')[0]
            data[data == 0.] = np.nan
            az_angle = np.nanmean(data)
            # convert isce azimuth angle to roipac orbit heading angle
            head_angle = -1 * (270 + az_angle)
            head_angle -= np.round(head_angle / 360.) * 360.
            metadata['HEADING'] = str(head_angle)

    # write rsc file for each geometry file
    for isceFile in isceFiles:
        # prepare metadata for current file
        geom_metadata = readfile.read_attribute(isceFile, meta_ext='.xml')
        geom_metadata.update(metadata)
        geom_metadata = readfile.standardize_metadata_isce(geom_metadata)

        # write .rsc file
        rscFile = isceFile + '.rsc'
        rscDict = dict()
        if os.path.isfile(rscFile):
            rscDict = readfile.read_roipac_rsc(rscFile)
        # update .rsc file only if there are new metadata key/value
        if not set(geom_metadata.items()).issubset(set(rscDict.items())):
            rscDictOut = {**rscDict, **geom_metadata}
            print('writing ', rscFile)
            writefile.write_roipac_rsc(rscDictOut, rscFile)
    return metadata
コード例 #15
0
def read_reference_input(inps):
    atr = readfile.read_attribute(inps.file)
    length = int(atr['LENGTH'])
    width = int(atr['WIDTH'])
    inps.go_reference = True

    if inps.reset:
        remove_reference_pixel(inps.file)
        inps.outfile = inps.file
        inps.go_reference = False
        return inps

    print('-' * 50)
    # Check Input Coordinates
    # Read ref_y/x/lat/lon from reference/template
    # priority: Direct Input > Reference File > Template File
    if inps.template_file:
        print('reading reference info from template: ' + inps.template_file)
        inps = read_template_file2inps(inps.template_file, inps)
    if inps.reference_file:
        print('reading reference info from reference: ' + inps.reference_file)
        inps = read_reference_file2inps(inps.reference_file, inps)

    # Convert ref_lat/lon to ref_y/x
    coord = ut.coordinate(atr, lookup_file=inps.lookup_file)
    if inps.ref_lat and inps.ref_lon:
        (inps.ref_y, inps.ref_x) = coord.geo2radar(np.array(inps.ref_lat),
                                                   np.array(inps.ref_lon))[0:2]
        print('input reference point in lat/lon: {}'.format(
            (inps.ref_lat, inps.ref_lon)))
    if inps.ref_y and inps.ref_x:
        print('input reference point in y/x: {}'.format(
            (inps.ref_y, inps.ref_x)))

    # Do not use ref_y/x outside of data coverage
    if (inps.ref_y and inps.ref_x
            and not (0 <= inps.ref_y <= length and 0 <= inps.ref_x <= width)):
        inps.ref_y = None
        inps.ref_x = None
        raise ValueError('input reference point is OUT of data coverage!')

    # Do not use ref_y/x in masked out area
    if inps.ref_y and inps.ref_x and inps.maskFile:
        print('mask: ' + inps.maskFile)
        mask = readfile.read(inps.maskFile, datasetName='mask')[0]
        if mask[inps.ref_y, inps.ref_x] == 0:
            inps.ref_y = None
            inps.ref_x = None
            msg = 'input reference point is in masked OUT area defined by {}!'.format(
                inps.maskFile)
            raise ValueError(msg)

    # Select method
    if not inps.ref_y or not inps.ref_x:
        print('no input reference y/x.')
        if not inps.method:
            # Use existing REF_Y/X if no ref_y/x input and no method input
            if not inps.force and 'REF_X' in atr.keys():
                print('REF_Y/X exists in input file, skip updating.')
                print('REF_Y: ' + atr['REF_Y'])
                print('REF_X: ' + atr['REF_X'])
                inps.outfile = inps.file
                inps.go_reference = False

            # method to select reference point
            elif inps.coherenceFile and os.path.isfile(inps.coherenceFile):
                inps.method = 'maxCoherence'
            else:
                inps.method = 'random'
        print('reference point selection method: ' + str(inps.method))
    print('-' * 50)
    return inps
コード例 #16
0
def reference_file(inps):
    """Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    """
    if not inps:
        inps = cmd_line_parse([''])
    atr = readfile.read_attribute(inps.file)
    if (inps.ref_y and inps.ref_x and 'REF_Y' in atr.keys()
            and inps.ref_y == int(atr['REF_Y'])
            and inps.ref_x == int(atr['REF_X']) and not inps.force):
        print(
            'Same reference pixel is already selected/saved in file, skip updating.'
        )
        return inps.file

    # Get stack and mask
    stack = ut.temporal_average(inps.file,
                                datasetName='unwrapPhase',
                                updateMode=True,
                                outFile=False)[0]
    mask = np.multiply(~np.isnan(stack), stack != 0.)
    if np.nansum(mask) == 0.0:
        raise ValueError(
            'no pixel found with valid phase value in all datasets.')

    if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x] == 0.:
        raise ValueError(
            'reference y/x have nan value in some dataset. Please re-select.')

    # Find reference y/x
    if not inps.ref_y or not inps.ref_x:
        if inps.method == 'maxCoherence':
            inps.ref_y, inps.ref_x = select_max_coherence_yx(
                coh_file=inps.coherenceFile,
                mask=mask,
                min_coh=inps.minCoherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps, mask)
    if not inps.ref_y or not inps.ref_x:
        raise ValueError('ERROR: no reference y/x found.')

    # Seeding file with reference y/x
    atrNew = reference_point_attribute(atr, y=inps.ref_y, x=inps.ref_x)
    if not inps.write_data:
        print('Add/update ref_x/y attribute to file: ' + inps.file)
        print(atrNew)
        inps.outfile = ut.add_attribute(inps.file, atrNew)

    else:
        if not inps.outfile:
            inps.outfile = '{}_seeded{}'.format(
                os.path.splitext(inps.file)[0],
                os.path.splitext(inps.file)[1])
        k = atr['FILE_TYPE']

        # For ifgramStack file, update data value directly, do not write to new file
        if k == 'ifgramStack':
            f = h5py.File(inps.file, 'r+')
            ds = f[k].get('unwrapPhase')
            for i in range(ds.shape[0]):
                ds[i, :, :] -= ds[i, inps.ref_y, inps.ref_x]
            f[k].attrs.update(atrNew)
            f.close()
            inps.outfile = inps.file

        elif k == 'timeseries':
            data = timeseries(inps.file).read()
            for i in range(data.shape[0]):
                data[i, :, :] -= data[i, inps.ref_y, inps.ref_x]
            obj = timeseries(inps.outfile)
            atr.update(atrNew)
            obj.write2hdf5(data=data, metadata=atr, refFile=inps.file)
            obj.close()
        else:
            print('writing >>> ' + inps.outfile)
            data = readfile.read(inps.file)[0]
            data -= data[inps.ref_y, inps.ref_x]
            atr.update(atrNew)
            writefile.write(data, out_file=inps.outfile, metadata=atr)
    ut.touch([inps.coherenceFile, inps.maskFile])
    return inps.outfile
コード例 #17
0
def correct_local_oscilator_drift(fname, rg_dist_file=None, out_file=None):
    print('-' * 50)
    print(
        'correct Local Oscilator Drift for Envisat using an empirical model (Marinkovic and Larsen, 2013)'
    )
    print('-' * 50)
    atr = readfile.read_attribute(fname)

    # Check Sensor Type
    platform = atr['PLATFORM']
    print('platform: ' + platform)
    if not platform.lower() in ['env', 'envisat']:
        print('No need to correct LOD for ' + platform)
        sys.exit(1)

    # output file name
    if not out_file:
        out_file = '{}_LODcor{}'.format(
            os.path.splitext(fname)[0],
            os.path.splitext(fname)[1])

    # Get LOD ramp rate from empirical model
    if not rg_dist_file:
        print('calculate range distance from file metadata')
        rg_dist = get_relative_range_distance(atr)
    else:
        print('read range distance from file: %s' % (rg_dist_file))
        rg_dist = readfile.read(rg_dist_file,
                                datasetName='slantRangeDistance',
                                print_msg=False)[0]
        rg_dist -= rg_dist[int(atr['REF_Y']), int(atr['REF_X'])]
    ramp_rate = np.array(rg_dist * 3.87e-7, np.float32)

    # Correct LOD Ramp for Input fname
    range2phase = -4 * np.pi / float(atr['WAVELENGTH'])
    k = atr['FILE_TYPE']
    if k == 'timeseries':
        # read
        obj = timeseries(fname)
        obj.open()
        data = obj.read()

        # correct LOD
        diff_year = np.array(obj.yearList)
        diff_year -= diff_year[obj.refIndex]
        for i in range(data.shape[0]):
            data[i, :, :] -= ramp_rate * diff_year[i]

        # write
        obj_out = timeseries(out_file)
        obj_out.write2hdf5(data, refFile=fname)

    elif k in ['.unw']:
        data, atr = readfile.read(fname)

        dates = ptime.yyyymmdd2years(ptime.yyyymmdd(atr['DATE12'].split('-')))
        dt = dates[1] - dates[0]
        data -= ramp_rate * range2phase * dt

        writefile.write(data, out_file=out_file, metadata=atr)
    else:
        print('No need to correct for LOD for %s file' % (k))
    return out_file
コード例 #18
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    # 1. Extract the common area of two input files
    # Basic info
    atr1 = readfile.read_attribute(inps.file[0])
    atr2 = readfile.read_attribute(inps.file[1])
    if any('X_FIRST' not in i for i in [atr1, atr2]):
        sys.exit('ERROR: Not all input files are geocoded.')

    k1 = atr1['FILE_TYPE']
    print('Input 1st file is ' + k1)

    # Common AOI in lalo
    west, east, south, north = get_overlap_lalo(atr1, atr2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width = int(round((east - west) / lon_step))
    length = int(round((south - north) / lat_step))

    # Read data in common AOI: LOS displacement, heading angle, incident angle
    u_los = np.zeros((2, width * length))
    heading = []
    incidence = []
    for i in range(len(inps.file)):
        fname = inps.file[i]
        print('---------------------')
        print('reading ' + fname)
        atr = readfile.read_attribute(fname)

        [x0, x1] = ut.coord_geo2radar([west, east], atr, 'lon')
        [y0, y1] = ut.coord_geo2radar([north, south], atr, 'lat')
        V = readfile.read(fname, box=(x0, y0, x1, y1))[0]
        u_los[i, :] = V.flatten(0)

        heading_angle = float(atr['HEADING'])
        if heading_angle < 0.:
            heading_angle += 360.
        print('heading angle: ' + str(heading_angle))
        heading_angle *= np.pi / 180.
        heading.append(heading_angle)

        inc_angle = float(ut.incidence_angle(atr, dimension=0))
        inc_angle *= np.pi / 180.
        incidence.append(inc_angle)

    # 2. Project displacement from LOS to Horizontal and Vertical components
    # math for 3D: cos(theta)*Uz - cos(alpha)*sin(theta)*Ux + sin(alpha)*sin(theta)*Uy = Ulos
    # math for 2D: cos(theta)*Uv - sin(alpha-az)*sin(theta)*Uh = Ulos   #Uh_perp = 0.0
    # This could be easily modified to support multiple view geometry
    # (e.g. two adjcent tracks from asc & desc) to resolve 3D

    # Design matrix
    A = np.zeros((2, 2))
    for i in range(len(inps.file)):
        A[i, 0] = np.cos(incidence[i])
        A[i, 1] = np.sin(incidence[i]) * np.sin(heading[i] - inps.azimuth)

    A_inv = np.linalg.pinv(A)
    u_vh = np.dot(A_inv, u_los)

    u_v = np.reshape(u_vh[0, :], (length, width))
    u_h = np.reshape(u_vh[1, :], (length, width))

    # 3. Output
    # Attributes
    atr = atr1.copy()
    atr['WIDTH'] = str(width)
    atr['LENGTH'] = str(length)
    atr['X_FIRST'] = str(west)
    atr['Y_FIRST'] = str(north)
    atr['X_STEP'] = str(lon_step)
    atr['Y_STEP'] = str(lat_step)

    print('---------------------')
    outname = inps.outfile[0]
    print('writing   vertical component to file: ' + outname)
    writefile.write(u_v, out_file=outname, metadata=atr)

    outname = inps.outfile[1]
    print('writing horizontal component to file: ' + outname)
    writefile.write(u_h, out_file=outname, metadata=atr)

    print('Done.')
    return
コード例 #19
0
def diff_file(file1, file2, outFile=None, force=False):
    """Subtraction/difference of two input files"""
    if not outFile:
        fbase, fext = os.path.splitext(file1)
        if len(file2) > 1:
            raise ValueError('Output file name is needed for more than 2 files input.')
        outFile = '{}_diff_{}{}'.format(fbase, os.path.splitext(os.path.basename(file2[0]))[0], fext)
    print('{} - {} --> {}'.format(file1, file2, outFile))

    # Read basic info
    atr1 = readfile.read_attribute(file1)
    k1 = atr1['FILE_TYPE']
    atr2 = readfile.read_attribute(file2[0])
    k2 = atr2['FILE_TYPE']
    print('input files are: {} and {}'.format(k1, k2))

    if k1 == 'timeseries':
        if k2 not in ['timeseries', 'giantTimeseries']:
            raise Exception('Input multiple dataset files are not the same file type!')
        if len(file2) > 1:
            raise Exception(('Only 2 files substraction is supported for time-series file,'
                             ' {} input.'.format(len(file2)+1)))

        obj1 = timeseries(file1)
        obj1.open()
        if k2 == 'timeseries':
            obj2 = timeseries(file2[0])
            unit_fac = 1.
        elif k2 == 'giantTimeseries':
            obj2 = giantTimeseries(file2[0])
            unit_fac = 0.001
        obj2.open()
        ref_date, ref_y, ref_x = _check_reference(obj1.metadata, obj2.metadata)

        # check dates shared by two timeseries files
        dateListShared = [i for i in obj1.dateList if i in obj2.dateList]
        dateShared = np.ones((obj1.numDate), dtype=np.bool_)
        if dateListShared != obj1.dateList:
            print('WARNING: {} does not contain all dates in {}'.format(file2, file1))
            if force:
                dateExcluded = list(set(obj1.dateList) - set(dateListShared))
                print('Continue and enforce the differencing for their shared dates only.')
                print('\twith following dates are ignored for differencing:\n{}'.format(dateExcluded))
                dateShared[np.array([obj1.dateList.index(i) for i in dateExcluded])] = 0
            else:
                raise Exception('To enforce the differencing anyway, use --force option.')

        # consider different reference_date/pixel
        data2 = readfile.read(file2[0], datasetName=dateListShared)[0] * unit_fac
        if ref_date:
            data2 -= np.tile(data2[obj2.dateList.index(ref_date), :, :],
                             (data2.shape[0], 1, 1))
        if ref_y and ref_x:
            data2 -= np.tile(data2[:, ref_y, ref_x].reshape(-1, 1, 1),
                             (1, data2.shape[1], data2.shape[2]))

        data = obj1.read()
        mask = data == 0.
        data[dateShared] -= data2
        data[mask] = 0.               # Do not change zero phase value
        del data2
        writefile.write(data, out_file=outFile, ref_file=file1)

    elif all(i == 'ifgramStack' for i in [k1, k2]):
        obj1 = ifgramStack(file1)
        obj1.open()
        obj2 = ifgramStack(file2[0])
        obj2.open()
        dsNames = list(set(obj1.datasetNames) & set(obj2.datasetNames))
        if len(dsNames) == 0:
            raise ValueError('no common dataset between two files!')
        dsName = [i for i in ifgramDatasetNames if i in dsNames][0]

        # read data
        print('reading {} from file {} ...'.format(dsName, file1))
        data1 = readfile.read(file1, datasetName=dsName)[0]
        print('reading {} from file {} ...'.format(dsName, file2[0]))
        data2 = readfile.read(file2[0], datasetName=dsName)[0]

        # consider reference pixel
        if 'unwrapphase' in dsName.lower():
            print('referencing to pixel ({},{}) ...'.format(obj1.refY, obj1.refX))
            ref1 = data1[:, obj1.refY, obj1.refX]
            ref2 = data2[:, obj2.refY, obj2.refX]
            for i in range(data1.shape[0]):
                data1[i,:][data1[i, :] != 0.] -= ref1[i]
                data2[i,:][data2[i, :] != 0.] -= ref2[i]

        # operation and ignore zero values
        data1[data1 == 0] = np.nan
        data2[data2 == 0] = np.nan
        data = data1 - data2
        del data1, data2
        data[np.isnan(data)] = 0.

        # write to file
        dsDict = {}
        dsDict[dsName] = data
        writefile.write(dsDict, out_file=outFile, ref_file=file1)

    # Sing dataset file
    else:
        data1 = readfile.read(file1)[0]
        data = np.array(data1, data1.dtype)
        for fname in file2:
            data2 = readfile.read(fname)[0]
            data = np.array(data, dtype=np.float32) - np.array(data2, dtype=np.float32)
            data = np.array(data, data1.dtype)
        print('writing >>> '+outFile)
        writefile.write(data, out_file=outFile, metadata=atr1)

    return outFile
コード例 #20
0
def read_data(inps):
    # metadata
    atr = readfile.read_attribute(inps.file)
    range2phase = -4 * np.pi / float(atr['WAVELENGTH'])

    # change reference pixel
    if inps.ref_yx:
        atr['REF_Y'] = inps.ref_yx[0]
        atr['REF_X'] = inps.ref_yx[1]
        print('change reference point to y/x: {}'.format(inps.ref_yx))

    # various file types
    print('read {} from file {}'.format(inps.dset, inps.file))
    k = atr['FILE_TYPE']
    if k == 'velocity':
        # read/prepare data
        data = readfile.read(inps.file)[0] * range2phase
        print(
            "converting velocity to an interferogram with one year temporal baseline"
        )
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    elif k == 'timeseries':
        # date1 and date2
        if '_' in inps.dset:
            date1, date2 = ptime.yyyymmdd(inps.dset.split('_'))
        else:
            date1 = atr['REF_DATE']
            date2 = ptime.yyyymmdd(inps.dset)

        # read/prepare data
        data = readfile.read(inps.file, datasetName=date2)[0]
        data -= readfile.read(inps.file, datasetName=date1)[0]
        print('converting range to phase')
        data *= range2phase
        if inps.ref_yx:
            data -= data[inps.ref_yx[0], inps.ref_yx[1]]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        atr['FILE_TYPE'] = '.unw'
        atr['UNIT'] = 'radian'

        # output filename
        if not inps.outfile:
            inps.outfile = '{}_{}.unw'.format(date1, date2)
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    elif k == 'HDFEOS':
        dname = inps.dset.split('-')[0]

        # date1 and date2
        if dname == 'displacement':
            if '-' in inps.dset:
                suffix = inps.dset.split('-')[1]
                if '_' in suffix:
                    date1, date2 = ptime.yyyymmdd(suffix.split('_'))
                else:
                    date1 = atr['REF_DATE']
                    date2 = ptime.yyyymmdd(suffix)
            else:
                raise ValueError(
                    "No '-' in input dataset! It is required for {}".format(
                        dname))
        else:
            date_list = HDFEOS(inps.file).get_date_list()
            date1 = date_list[0]
            date2 = date_list[-1]
        date12 = '{}_{}'.format(date1, date2)

        # read / prepare data
        slice_list = readfile.get_slice_list(inps.file)
        if 'displacement' in inps.dset:
            # read/prepare data
            slice_name1 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date1))[0][0]
            slice_name2 = view.check_dataset_input(
                slice_list, '{}-{}'.format(dname, date2))[0][0]
            data = readfile.read(inps.file, datasetName=slice_name1)[0]
            data -= readfile.read(inps.file, datasetName=slice_name2)[0]
            print('converting range to phase')
            data *= range2phase
            if inps.ref_yx:
                data -= data[inps.ref_yx[0], inps.ref_yx[1]]
        else:
            slice_name = view.check_dataset_input(slice_list, inps.dset)[0][0]
            data = readfile.read(inps.file, datasetName=slice_name)[0]

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname == 'displacement':
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif 'coherence' in dname.lower():
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'height':
            atr['FILE_TYPE'] = '.dem'
            atr['DATA_TYPE'] = 'int16'
        else:
            raise ValueError('unrecognized input dataset type: {}'.format(
                inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])

    elif k == 'ifgramStack':
        dname, date12 = inps.dset.split('-')
        date1, date2 = date12.split('_')

        # read / prepare data
        data = readfile.read(inps.file, datasetName=inps.dset)[0]
        if dname.startswith('unwrapPhase'):
            if 'REF_X' in atr.keys():
                data -= data[int(atr['REF_Y']), int(atr['REF_X'])]
                print('consider reference pixel in y/x: ({}, {})'.format(
                    atr['REF_Y'], atr['REF_X']))
            else:
                print('No REF_Y/X found.')

        # metadata
        atr['DATE'] = date1[2:8]
        atr['DATE12'] = '{}-{}'.format(date1[2:8], date2[2:8])
        if dname.startswith('unwrapPhase'):
            atr['FILE_TYPE'] = '.unw'
            atr['UNIT'] = 'radian'
        elif dname == 'coherence':
            atr['FILE_TYPE'] = '.cor'
            atr['UNIT'] = '1'
        elif dname == 'wrapPhase':
            atr['FILE_TYPE'] = '.int'
            atr['UNIT'] = 'radian'
        else:
            raise ValueError('unrecognized dataset type: {}'.format(inps.dset))

        # output filename
        if not inps.outfile:
            inps.outfile = '{}{}'.format(date12, atr['FILE_TYPE'])
            if inps.file.startswith('geo_'):
                inps.outfile = 'geo_' + inps.outfile

    else:
        # temporal coherence
        if 'coherence' in k.lower():
            atr['FILE_TYPE'] = '.cor'

        elif k in ['mask']:
            atr['FILE_TYPE'] = '.msk'
            atr['DATA_TYPE'] = 'byte'

        elif k in ['geometry'] and inps.dset == 'height':
            if 'Y_FIRST' in atr.keys():
                atr['FILE_TYPE'] = '.dem'
                atr['DATA_TYPE'] = 'int16'
            else:
                atr['FILE_TYPE'] = '.hgt'
            atr['UNIT'] = 'm'
        else:
            atr['FILE_TYPE'] = '.unw'

        if not inps.outfile:
            inps.outfile = '{}{}'.format(
                os.path.splitext(inps.file)[0], atr['FILE_TYPE'])

    atr['PROCESSOR'] = 'roipac'
    return data, atr, inps.outfile
コード例 #21
0
def main(argv):
    try:
        fname = argv[0]
    except:
        usage()
        sys.exit(1)

    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print('input is ' + k + ' file: ' + fname)

    try:
        mat_file = argv[1]
    except:
        mat_file = os.path.splitext(fname)[0] + '.mat'
    print('writing >>> ' + mat_file)

    #####
    if k not in ['timeseries', 'ifgramStack']:
        data = readfile.read(fname)[0]

        V = {}
        V['time_range'] = ''
        try:
            V['x_first'] = float(atr['X_FIRST'])
            V['y_first'] = float(atr['Y_FIRST'])
            V['x_step'] = float(atr['X_STEP'])
            V['y_step'] = float(atr['Y_STEP'])
            V['x_unit'] = atr['X_UNIT']
            V['y_unit'] = atr['Y_UNIT']
        except:
            V['x_first'] = 1
            V['y_first'] = 1
            V['x_step'] = 1
            V['y_step'] = 1
            V['x_unit'] = ''
            V['y_unit'] = ''

        try:
            V['wavelength'] = float(atr['WAVELENGTH'])
        except:
            print('WAVELENGTH was not found')
        try:
            V['sat_height'] = float(atr['HEIGHT'])
        except:
            print('HEIGHT was not found')

        try:
            V['near_range'] = float(atr['STARTING_RANGE'])
        except:
            print('STARTING_RANGE was not found')

        V['far_range'] = ''

        try:
            V['near_LookAng'] = float(atr['LOOK_REF1'])
        except:
            print('LOOK_REF1 was not found')
        try:
            V['far_LookAng'] = float(atr['LOOK_REF2'])
        except:
            print('LOOK_REF2 was not found')

        V['earth_radius'] = ''
        V['Unit'] = 'm/yr'
        V['bperptop'] = ''
        V['bperpbot'] = ''
        V['sat'] = ''
        try:
            V['width'] = int(atr['WIDTH'])
        except:
            print('WIDTH was not found')

        try:
            V['file_length'] = int(atr['LENGTH'])
        except:
            print('LENGTH was not found')
        V['t'] = ''
        V['date'] = ''
        V['date_years'] = ''
        try:
            V['sat'] = atr['satellite']
        except:
            V['sat'] = ''

        ########################################################
        V['data'] = data
        sio.savemat(mat_file, {k: V})

    elif k == 'timeseries':
        f = h5py.File(fname, 'r')
        epochList = sorted(f['timeseries'].keys())
        data_dict = {}
        for epoch in epochList:
            print(epoch)
            d = f['timeseries'].get(epoch)
            ts = {}
            ts['data'] = d[0:d.shape[0], 0:d.shape[1]]
            try:
                ts['x_first'] = float(atr['X_FIRST'])
                ts['y_first'] = float(atr['Y_FIRST'])
                ts['x_step'] = float(atr['X_STEP'])
                ts['y_step'] = float(atr['Y_STEP'])
                ts['x_unit'] = atr['X_UNIT']
                ts['y_unit'] = atr['Y_UNIT']
            except:
                ts['x_first'] = 1
                ts['y_first'] = 1
                ts['x_step'] = 1
                ts['y_step'] = 1
                ts['x_unit'] = ''
                ts['y_unit'] = ''

            ts['wavelength'] = float(atr['WAVELENGTH'])
            ts['sat_height'] = float(atr['HEIGHT'])
            ts['near_range'] = float(atr['STARTING_RANGE'])
            ts['far_range'] = float(atr['STARTING_RANGE1'])
            ts['near_LookAng'] = float(atr['LOOK_REF1'])
            ts['far_LookAng'] = float(atr['LOOK_REF2'])
            ts['earth_radius'] = float(atr['EARTH_RADIUS'])
            ts['Unit'] = 'm'
            ts['bperptop'] = float(atr['P_BASELINE_TOP_HDR'])
            ts['bperpbot'] = float(atr['P_BASELINE_BOTTOM_HDR'])
            ts['sat'] = atr['PLATFORM']
            ts['width'] = int(atr['WIDTH'])
            ts['length'] = int(atr['LENGTH'])
            ts['t'] = np.round(
                (yyyymmdd2years(epoch) - yyyymmdd2years(epochList[0])) * 365)
            ts['date'] = epoch
            ts['date_years'] = yyyymmdd2years(epoch)

            data_dict['t' + str(epoch)] = ts  #

        data_dict['Number_of_epochs'] = len(epochList)
        data_dict['epoch_dates'] = epochList
        sio.savemat(mat_file, {k: data_dict})
        f.close()
    return mat_file
コード例 #22
0
ファイル: tsview_legacy.py プロジェクト: fossabot/PySAR
def main(iargs=None):
    # Actual code.
    inps = cmd_line_parse(iargs)

    # Time Series Info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print('input file is ' + k + ': ' + inps.timeseries_file)
    if not k in ['timeseries', 'GIANT_TS']:
        raise ValueError('Only timeseries file is supported!')

    obj = timeseries(inps.timeseries_file)
    obj.open()
    h5 = h5py.File(inps.timeseries_file, 'r')
    if k in ['GIANT_TS']:
        dateList = [
            dt.fromordinal(int(i)).strftime('%Y%m%d')
            for i in h5['dates'][:].tolist()
        ]
    else:
        dateList = obj.dateList
    date_num = len(dateList)
    inps.dates, inps.yearList = ptime.date_list2vector(dateList)

    # Read exclude dates
    if inps.ex_date_list:
        input_ex_date = list(inps.ex_date_list)
        inps.ex_date_list = []
        if input_ex_date:
            for ex_date in input_ex_date:
                if os.path.isfile(ex_date):
                    ex_date = ptime.read_date_list(ex_date)
                else:
                    ex_date = [ptime.yyyymmdd(ex_date)]
                inps.ex_date_list += list(
                    set(ex_date) - set(inps.ex_date_list))
            # delete dates not existed in input file
            inps.ex_date_list = sorted(
                list(set(inps.ex_date_list).intersection(dateList)))
            inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0]
            inps.ex_idx_list = sorted(
                [dateList.index(i) for i in inps.ex_date_list])
            print('exclude date:' + str(inps.ex_date_list))

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        if inps.ex_date_list:
            inps.zero_idx = min(
                list(set(range(date_num)) - set(inps.ex_idx_list)))
        else:
            inps.zero_idx = 0

    # File Size
    length = int(atr['LENGTH'])
    width = int(atr['WIDTH'])
    print('data size in [y0,y1,x0,x1]: [%d, %d, %d, %d]' %
          (0, length, 0, width))
    try:
        ullon = float(atr['X_FIRST'])
        ullat = float(atr['Y_FIRST'])
        lon_step = float(atr['X_STEP'])
        lat_step = float(atr['Y_STEP'])
        lrlon = ullon + width * lon_step
        lrlat = ullat + length * lat_step
        print('data size in [lat0,lat1,lon0,lon1]: [%.4f, %.4f, %.4f, %.4f]' %
              (lrlat, ullat, ullon, lrlon))
    except:
        pass

    # Initial Pixel Coord
    if inps.lalo and 'Y_FIRST' in atr.keys():
        y = int((inps.lalo[0] - ullat) / lat_step + 0.5)
        x = int((inps.lalo[1] - ullon) / lon_step + 0.5)
        inps.yx = [y, x]

    if inps.ref_lalo and 'Y_FIRST' in atr.keys():
        y = int((inps.ref_lalo[0] - ullat) / lat_step + 0.5)
        x = int((inps.ref_lalo[1] - ullon) / lon_step + 0.5)
        inps.ref_yx = [y, x]

    # Display Unit
    if inps.disp_unit == 'cm':
        inps.unit_fac = 100.0
    elif inps.disp_unit == 'm':
        inps.unit_fac = 1.0
    elif inps.disp_unit == 'dm':
        inps.unit_fac = 10.0
    elif inps.disp_unit == 'mm':
        inps.unit_fac = 1000.0
    elif inps.disp_unit == 'km':
        inps.unit_fac = 0.001
    else:
        raise ValueError('Un-recognized unit: ' + inps.disp_unit)
    if k in ['GIANT_TS']:
        print('data    unit: mm')
        inps.unit_fac *= 0.001
    else:
        print('data    unit: m')
    print('display unit: ' + inps.disp_unit)

    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr)
    else:
        inps.flip_ud = False
        inps.left_lr = False

    # Mask file
    if not inps.mask_file:
        if os.path.basename(inps.timeseries_file).startswith('geo_'):
            file_list = ['geo_maskTempCoh.h5']
        else:
            file_list = ['maskTempCoh.h5', 'mask.h5']
        try:
            inps.mask_file = ut.get_file_list(file_list)[0]
        except:
            inps.mask_file = None
    try:
        mask = readfile.read(inps.mask_file, datasetName='mask')[0]
        mask[mask != 0] = 1
        print('load mask from file: ' + inps.mask_file)
    except:
        mask = None
        print('No mask used.')

    # Initial Map
    d_v = readfile.read(
        inps.timeseries_file,
        datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac
    if inps.ref_date:
        inps.ref_d_v = readfile.read(
            inps.timeseries_file, datasetName=inps.ref_date)[0] * inps.unit_fac
        d_v -= inps.ref_d_v
    if mask is not None:
        d_v = mask_matrix(d_v, mask)
    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
    data_lim = [np.nanmin(d_v), np.nanmax(d_v)]

    if not inps.ylim_mat:
        inps.ylim_mat = data_lim
    print('Initial data range: ' + str(data_lim))
    print('Display data range: ' + str(inps.ylim_mat))

    # Fig 1 - Cumulative Displacement Map
    if not inps.disp_fig:
        plt.switch_backend('Agg')

    fig_v = plt.figure('Cumulative Displacement')

    # Axes 1
    #ax_v = fig_v.add_subplot(111)
    # ax_v.set_position([0.125,0.25,0.75,0.65])
    # This works on OSX. Original worked on Linux.
    # rect[left, bottom, width, height]
    ax_v = fig_v.add_axes([0.125, 0.25, 0.75, 0.65])
    if inps.dem_file:
        dem = readfile.read(inps.dem_file, datasetName='height')[0]
        ax_v = pp.plot_dem_yx(ax_v, dem)
    img = ax_v.imshow(d_v,
                      cmap=inps.colormap,
                      clim=inps.ylim_mat,
                      interpolation='nearest')

    # Reference Pixel
    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
        ax_v.plot(inps.ref_yx[1], inps.ref_yx[0], 'ks', ms=6)
    else:
        try:
            ax_v.plot(int(atr['REF_X']), int(atr['REF_Y']), 'ks', ms=6)
        except:
            pass

    # Initial Pixel
    if inps.yx:
        ax_v.plot(inps.yx[1], inps.yx[0], 'ro', markeredgecolor='black')

    ax_v.set_xlim(0, np.shape(d_v)[1])
    ax_v.set_ylim(np.shape(d_v)[0], 0)

    # Status Bar
    def format_coord(x, y):
        col = int(x + 0.5)
        row = int(y + 0.5)
        if 0 <= col < width and 0 <= row < length:
            z = d_v[row, col]
            try:
                lon = ullon + x * lon_step
                lat = ullat + y * lat_step
                return 'x=%.0f, y=%.0f, value=%.4f, lon=%.4f, lat=%.4f' % (
                    x, y, z, lon, lat)
            except:
                return 'x=%.0f, y=%.0f, value=%.4f' % (x, y, z)

    ax_v.format_coord = format_coord

    # Title and Axis Label
    ax_v.set_title(
        'N = %d, Time = %s' %
        (inps.epoch_num, inps.dates[inps.epoch_num].strftime('%Y-%m-%d')))
    if not 'Y_FIRST' in atr.keys():
        ax_v.set_xlabel('Range')
        ax_v.set_ylabel('Azimuth')

    # Flip axis
    if inps.flip_lr:
        ax_v.invert_xaxis()
        print('flip map left and right')
    if inps.flip_ud:
        ax_v.invert_yaxis()
        print('flip map up and down')

    # Colorbar
    cbar = fig_v.colorbar(img, orientation='vertical')
    cbar.set_label('Displacement [%s]' % inps.disp_unit)

    # Axes 2 - Time Slider
    ax_time = fig_v.add_axes([0.125, 0.1, 0.6, 0.07],
                             facecolor='lightgoldenrodyellow',
                             yticks=[])
    tslider = Slider(ax_time,
                     'Years',
                     inps.yearList[0],
                     inps.yearList[-1],
                     valinit=inps.yearList[inps.epoch_num])
    tslider.ax.bar(inps.yearList,
                   np.ones(len(inps.yearList)),
                   facecolor='black',
                   width=0.01,
                   ecolor=None)
    tslider.ax.set_xticks(
        np.round(
            np.linspace(inps.yearList[0], inps.yearList[-1], num=5) * 100) /
        100)

    def time_slider_update(val):
        """Update Displacement Map using Slider"""
        timein = tslider.val
        idx_nearest = np.argmin(np.abs(np.array(inps.yearList) - timein))
        ax_v.set_title(
            'N = %d, Time = %s' %
            (idx_nearest, inps.dates[idx_nearest].strftime('%Y-%m-%d')))
        d_v = h5[dateList[idx_nearest]][:] * inps.unit_fac
        if inps.ref_date:
            d_v -= inps.ref_d_v
        if mask is not None:
            d_v = mask_matrix(d_v, mask)
        if inps.ref_yx:
            d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
        img.set_data(d_v)
        fig_v.canvas.draw()

    tslider.on_changed(time_slider_update)

    # Fig 2 - Time Series Displacement - Point
    fig_ts = plt.figure('Time series - point', figsize=inps.fig_size)
    ax_ts = fig_ts.add_subplot(111)

    # Read Error List
    inps.error_ts = None
    if inps.error_file:
        error_fileContent = np.loadtxt(inps.error_file,
                                       dtype=bytes).astype(str)
        inps.error_ts = error_fileContent[:, 1].astype(
            np.float) * inps.unit_fac
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = np.array([e_ts[i] for i in inps.ex_idx_list])
            inps.error_ts = np.array([
                e_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ])

    def plot_timeseries_errorbar(ax, dis_ts, inps):
        dates = list(inps.dates)
        d_ts = dis_ts[:]
        if inps.ex_date_list:
            # Update displacement time-series
            dates = sorted(list(set(inps.dates) - set(inps.ex_dates)))
            ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list])
            d_ts = np.array([
                dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ])
            # Plot excluded dates
            (_, caps, _) = ax.errorbar(inps.ex_dates,
                                       ex_d_ts,
                                       yerr=inps.ex_error_ts,
                                       fmt='-o',
                                       color='gray',
                                       ms=inps.marker_size,
                                       lw=0,
                                       alpha=1,
                                       mfc='gray',
                                       elinewidth=inps.edge_width,
                                       ecolor='black',
                                       capsize=inps.marker_size * 0.5)
            for cap in caps:
                cap.set_markeredgewidth(inps.edge_width)
        # Plot kept dates
        (_, caps, _) = ax.errorbar(dates,
                                   d_ts,
                                   yerr=inps.error_ts,
                                   fmt='-o',
                                   ms=inps.marker_size,
                                   lw=0,
                                   alpha=1,
                                   elinewidth=inps.edge_width,
                                   ecolor='black',
                                   capsize=inps.marker_size * 0.5)
        for cap in caps:
            cap.set_markeredgewidth(inps.edge_width)
        return ax

    def plot_timeseries_scatter(ax, dis_ts, inps):
        dates = list(inps.dates)
        d_ts = dis_ts[:]
        if inps.ex_date_list:
            # Update displacement time-series
            dates = sorted(list(set(inps.dates) - set(inps.ex_dates)))
            ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list])
            d_ts = np.array([
                dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ])
            # Plot excluded dates
            ax.scatter(inps.ex_dates,
                       ex_d_ts,
                       s=inps.marker_size**2,
                       color='gray')  # color='crimson'
        # Plot kept dates
        ax.scatter(dates, d_ts, s=inps.marker_size**2)
        return ax

    def update_timeseries(ax_ts, y, x):
        """Plot point time series displacement at pixel [y, x]"""
        d_ts = read_timeseries_yx(
            inps.timeseries_file, y, x, ref_yx=inps.ref_yx) * inps.unit_fac
        # for date in dateList:
        #    d = h5[k].get(date)[y,x]
        #    if inps.ref_yx:
        #        d -= h5[k].get(date)[inps.ref_yx[0], inps.ref_yx[1]]
        #    d_ts.append(d*inps.unit_fac)

        if inps.zero_first:
            d_ts -= d_ts[inps.zero_idx]

        ax_ts.cla()
        if inps.error_file:
            ax_ts = plot_timeseries_errorbar(ax_ts, d_ts, inps)
        else:
            ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps)
        if inps.ylim:
            ax_ts.set_ylim(inps.ylim)
        for tick in ax_ts.yaxis.get_major_ticks():
            tick.label.set_fontsize(inps.font_size)

        # Title
        title_ts = 'Y = %d, X = %d' % (y, x)
        try:
            lat = ullat + y * lat_step
            lon = ullon + x * lon_step
            title_ts += ', lat = %.4f, lon = %.4f' % (lat, lon)
        except:
            pass
        if inps.disp_title:
            ax_ts.set_title(title_ts)

        ax_ts = pp.auto_adjust_xaxis_date(ax_ts,
                                          inps.yearList,
                                          fontSize=inps.font_size)[0]
        ax_ts.set_xlabel('Time', fontsize=inps.font_size)
        ax_ts.set_ylabel('Displacement [%s]' % inps.disp_unit,
                         fontsize=inps.font_size)

        fig_ts.canvas.draw()

        # Print to terminal
        print('\n---------------------------------------')
        print(title_ts)
        print(d_ts)

        # Slope estimation
        if inps.ex_date_list:
            inps.yearList_kept = [
                inps.yearList[i] for i in range(date_num)
                if i not in inps.ex_idx_list
            ]
            d_ts_kept = [
                d_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ]
            d_slope = stats.linregress(np.array(inps.yearList_kept),
                                       np.array(d_ts_kept))
        else:
            d_slope = stats.linregress(np.array(inps.yearList), np.array(d_ts))
        print('linear velocity: %.2f +/- %.2f [%s/yr]' %
              (d_slope[0], d_slope[4], inps.disp_unit))

        return d_ts

    # Initial point time series plot
    if inps.yx:
        d_ts = update_timeseries(ax_ts, inps.yx[0], inps.yx[1])
    else:
        d_ts = np.zeros(len(inps.yearList))
        ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps)

    def plot_timeseries_event(event):
        """Event function to get y/x from button press"""
        if event.inaxes != ax_v:
            return

        ii = int(event.ydata + 0.5)
        jj = int(event.xdata + 0.5)
        d_ts = update_timeseries(ax_ts, ii, jj)

    # Output
    if inps.save_fig and inps.yx:
        print('save info for pixel ' + str(inps.yx))
        if not inps.fig_base:
            inps.fig_base = 'y%d_x%d' % (inps.yx[0], inps.yx[1])

        # TXT - point time series
        outName = inps.fig_base + '_ts.txt'
        header_info = 'timeseries_file=' + inps.timeseries_file
        header_info += '\ny=%d, x=%d' % (inps.yx[0], inps.yx[1])
        try:
            lat = ullat + inps.yx[0] * lat_step
            lon = ullon + inps.yx[1] * lon_step
            header_info += '\nlat=%.6f, lon=%.6f' % (lat, lon)
        except:
            pass
        if inps.ref_yx:
            header_info += '\nreference pixel: y=%d, x=%d' % (inps.ref_yx[0],
                                                              inps.ref_yx[1])
        else:
            header_info += '\nreference pixel: y=%s, x=%s' % (atr['REF_Y'],
                                                              atr['REF_X'])
        header_info += '\nunit=m/yr'
        np.savetxt(outName,
                   list(zip(np.array(dateList),
                            np.array(d_ts) / inps.unit_fac)),
                   fmt='%s',
                   delimiter='    ',
                   header=header_info)
        print('save time series displacement in meter to ' + outName)

        # Figure - point time series
        outName = inps.fig_base + '_ts.pdf'
        fig_ts.savefig(outName,
                       bbox_inches='tight',
                       transparent=True,
                       dpi=inps.fig_dpi)
        print('save time series plot to ' + outName)

        # Figure - map
        outName = inps.fig_base + '_' + dateList[inps.epoch_num] + '.png'
        fig_v.savefig(outName,
                      bbox_inches='tight',
                      transparent=True,
                      dpi=inps.fig_dpi)
        print('save map plot to ' + outName)

    # Final linking of the canvas to the plots.
    cid = fig_v.canvas.mpl_connect('button_press_event', plot_timeseries_event)
    if inps.disp_fig:
        plt.show()
    fig_v.canvas.mpl_disconnect(cid)
コード例 #23
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)

    atr = readfile.read_attribute(inps.velocity_file)
    length = int(atr['LENGTH'])
    width = int(atr['WIDTH'])

    # Check subset input
    if inps.subset_y:
        inps.subset_y = sorted(inps.subset_y)
        print('subset in y/azimuth direction: ' + str(inps.subset_y))
    else:
        inps.subset_y = [0, length]

    if inps.subset_x:
        inps.subset_x = sorted(inps.subset_x)
        print('subset in x/range direction: ' + str(inps.subset_x))
    else:
        inps.subset_x = [0, width]
    y0, y1 = inps.subset_y
    x0, x1 = inps.subset_x

    # Read velocity/rate
    velocity = readfile.read(inps.velocity_file)[0]
    print('read velocity file: ' + inps.velocity_file)

    k = 'interferograms'
    h5 = h5py.File(inps.ifgram_file, 'r')
    ifgram_list = sorted(h5[k].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    print('number of interferograms: ' + str(ifgram_num))

    # Select interferograms with unwrapping error
    if inps.percentage > 0.0:
        mask = readfile.read(inps.mask_file, datasetName='mask')[0]
        print('read mask for pixels with unwrapping error from file: ' +
              inps.mask_file)

        unw_err_ifgram_num = int(np.rint(inps.percentage * ifgram_num))
        unw_err_ifgram_idx = random.sample(list(range(ifgram_num)),
                                           unw_err_ifgram_num)
        unw_err_ifgram_list = [ifgram_list[i] for i in unw_err_ifgram_idx]
        unw_err_date12_list = [date12_list[i] for i in unw_err_ifgram_idx]
        print(
            'randomly choose the following %d interferograms with unwrapping error'
            % unw_err_ifgram_num)
        print(unw_err_date12_list)

        unit_unw_err = 2.0 * np.pi * mask
    else:
        unw_err_ifgram_list = []

    # Generate simulated interferograms
    m_dates = ptime.yyyymmdd([i.split('_')[0] for i in date12_list])
    s_dates = ptime.yyyymmdd([i.split('_')[1] for i in date12_list])
    range2phase = -4.0 * np.pi / float(atr['WAVELENGTH'])

    print('writing simulated interferograms file: ' + inps.outfile)
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group('interferograms')
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        # Get temporal baseline in years
        t1 = datetime.datetime(*time.strptime(m_dates[i], "%Y%m%d")[0:5])
        t2 = datetime.datetime(*time.strptime(s_dates[i], "%Y%m%d")[0:5])
        dt = (t2 - t1)
        dt = float(dt.days) / 365.25

        # Simuated interferograms with unwrap error
        unw = velocity * dt * range2phase
        if ifgram in unw_err_ifgram_list:
            rand_int = random.sample(list(range(1, 10)), 1)[0]
            unw += rand_int * unit_unw_err
            print(ifgram + '  - add unwrapping error of %d*2*pi' % rand_int)
        else:
            print(ifgram)

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=unw[y0:y1, x0:x1])

        for key, value in h5[k][ifgram].attrs.items():
            gg.attrs[key] = value
        if ifgram in unw_err_ifgram_list:
            gg.attrs['unwrap_error'] = 'yes'
        else:
            gg.attrs['unwrap_error'] = 'no'
        gg.attrs['LENGTH'] = y1 - y0
        gg.attrs['WIDTH'] = x1 - x0
    h5.close()
    h5out.close()
    print('Done.')
    return inps.outfile
コード例 #24
0
ファイル: writefile.py プロジェクト: xuubing/PySAR
def write(datasetDict,
          out_file,
          metadata=None,
          ref_file=None,
          compression=None):
    """ Write one file.
    Parameters: datasetDict : dict of dataset, with key = datasetName and value = 2D/3D array, e.g.:
                    {'height'        : np.ones((   200,300), dtype=np.int16),
                     'incidenceAngle': np.ones((   200,300), dtype=np.float32),
                     'bperp'         : np.ones((80,200,300), dtype=np.float32),
                     ...}
                out_file : str, output file name
                metadata : dict of attributes
                ref_file : str, reference file to get auxliary info
                compression : str, compression while writing to HDF5 file, None, "lzf", "gzip"
    Returns:    out_file : str
    Examples:   dsDict = dict()
                dsDict['velocity'] = np.ones((200,300), dtype=np.float32)
                write(datasetDict=dsDict, out_file='velocity.h5', metadata=atr)
    """
    # copy metadata to meta
    if metadata:
        meta = {key: value for key, value in metadata.items()}
    elif ref_file:
        meta = readfile.read_attribute(ref_file)
    else:
        raise ValueError('No metadata or reference file input.')

    # convert ndarray input into dict type
    if isinstance(datasetDict, np.ndarray):
        data = np.array(datasetDict, datasetDict.dtype)
        datasetDict = dict()
        datasetDict[meta['FILE_TYPE']] = data

    ext = os.path.splitext(out_file)[1].lower()
    # HDF5 File
    if ext in ['.h5', '.he5']:
        k = meta['FILE_TYPE']
        if k == 'timeseries':
            if ref_file is None:
                raise Exception(
                    'Can not write {} file without reference file!'.format(k))
            obj = timeseries(out_file)
            obj.write2hdf5(datasetDict[k],
                           metadata=meta,
                           refFile=ref_file,
                           compression=compression)

        else:
            if os.path.isfile(out_file):
                print('delete exsited file: {}'.format(out_file))
                os.remove(out_file)

            print('create HDF5 file: {} with w mode'.format(out_file))
            with h5py.File(out_file, 'w') as f:
                # 1. Write input datasets
                maxDigit = max([len(i) for i in list(datasetDict.keys())])
                for dsName in datasetDict.keys():
                    data = datasetDict[dsName]
                    print((
                        'create dataset /{d:<{w}} of {t:<10} in size of {s:<20} '
                        'with compression={c}').format(d=dsName,
                                                       w=maxDigit,
                                                       t=str(data.dtype),
                                                       s=str(data.shape),
                                                       c=compression))
                    ds = f.create_dataset(dsName,
                                          data=data,
                                          chunks=True,
                                          compression=compression)

                # 2. Write extra/auxliary datasets from ref_file
                if ref_file and os.path.splitext(ref_file)[1] in [
                        '.h5', '.he5'
                ]:
                    atr_ref = readfile.read_attribute(ref_file)
                    shape_ref = (int(atr_ref['LENGTH']), int(atr_ref['WIDTH']))
                    with h5py.File(ref_file, 'r') as fr:
                        dsNames = [
                            i for i in fr.keys()
                            if (i not in list(datasetDict.keys())
                                and isinstance(fr[i], h5py.Dataset)
                                and fr[i].shape[-2:] != shape_ref)
                        ]
                        maxDigit = max([len(i) for i in dsNames] + [maxDigit])
                        for dsName in dsNames:
                            ds = fr[dsName]
                            print((
                                'create dataset /{d:<{w}} of {t:<10} in size of {s:<10} '
                                'with compression={c}').format(d=dsName,
                                                               w=maxDigit,
                                                               t=str(ds.dtype),
                                                               s=str(ds.shape),
                                                               c=compression))
                            f.create_dataset(dsName,
                                             data=ds[:],
                                             chunks=True,
                                             compression=compression)

                # 3. metadata
                for key, value in meta.items():
                    f.attrs[key] = str(value)
                print('finished writing to {}'.format(out_file))

    # ISCE / ROI_PAC GAMMA / Image product
    else:
        key_list = list(datasetDict.keys())
        data_list = []
        for key in key_list:
            data_list.append(datasetDict[key])

        # Write Data File
        print('write {}'.format(out_file))
        if ext in ['.unw', '.cor', '.hgt']:
            write_float32(data_list[0], out_file)
            meta['DATA_TYPE'] = 'float32'
        elif ext == '.dem':
            write_real_int16(data_list[0], out_file)
            meta['DATA_TYPE'] = 'int16'
        elif ext in ['.trans']:
            write_float32(data_list[0], data_list[1], out_file)
        elif ext in ['.utm_to_rdc', '.UTM_TO_RDC']:
            data = np.zeros(rg.shape, dtype=np.complex64)
            data.real = datasetDict['rangeCoord']
            data.imag = datasetDict['azimuthCoord']
            data.astype('>c8').tofile(out_file)
        elif ext in ['.mli', '.flt']:
            write_real_float32(data_list[0], out_file)
        elif ext == '.slc':
            write_complex_int16(data_list[0], out_file)
        elif ext == '.int':
            write_complex64(data_list[0], out_file)
        elif meta['DATA_TYPE'].lower() in ['float32', 'float']:
            write_real_float32(data_list[0], out_file)
        elif meta['DATA_TYPE'].lower() in ['int16', 'short']:
            write_real_int16(data_list[0], out_file)
        elif meta['DATA_TYPE'].lower() in ['byte', 'bool']:
            write_byte(data_list[0], out_file)
        else:
            print('Un-supported file type: ' + ext)
            return 0

        # write metadata file
        write_roipac_rsc(meta, out_file + '.rsc', print_msg=True)
    return out_file
コード例 #25
0
def read_inps_dict2ifgram_stack_dict_object(inpsDict):
    """Read input arguments into dict of ifgramStackDict object"""
    # inpsDict --> dsPathDict
    print('-' * 50)
    print('searching interferometric pairs info')
    print('input data files:')
    maxDigit = max([len(i) for i in list(datasetName2templateKey.keys())])
    dsPathDict = {}
    dsNumDict = {}
    for dsName in [
            i for i in ifgramDatasetNames
            if i in datasetName2templateKey.keys()
    ]:
        key = datasetName2templateKey[dsName]
        if key in inpsDict.keys():
            files = sorted(glob.glob(str(inpsDict[key])))
            if len(files) > 0:
                dsPathDict[dsName] = files
                dsNumDict[dsName] = len(files)
                print('{:<{width}}: {path}'.format(dsName,
                                                   width=maxDigit,
                                                   path=inpsDict[key]))

    # Check required dataset
    dsName0 = 'unwrapPhase'
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))
        return None

    # Check number of files for all dataset types
    dsNumList = list(dsNumDict.values())
    if any(i != dsNumList[0] for i in dsNumList):
        print(
            'WARNING: Not all types of dataset have the same number of files:')
        for key, value in dsNumDict.items():
            print('number of {:<{width}}: {num}'.format(key,
                                                        width=maxDigit,
                                                        num=value))
    print('number of files per type: {}'.format(dsNumList[0]))

    # Check data dimension for all files

    # dsPathDict --> pairsDict --> stackObj
    dsNameList = list(dsPathDict.keys())
    pairsDict = {}
    for dsPath in dsPathDict[dsName0]:
        dates = ptime.yyyymmdd(
            readfile.read_attribute(dsPath)['DATE12'].split('-'))

        #####################################
        # A dictionary of data files for a given pair.
        # One pair may have several types of dataset.
        # example ifgramPathDict = {'unwrapPhase': /pathToFile/filt.unw, 'iono':/PathToFile/iono.bil}
        # All path of data file must contain the master and slave date, either in file name or folder name.

        ifgramPathDict = {}
        for i in range(len(dsNameList)):
            dsName = dsNameList[i]
            dsPath1 = dsPathDict[dsName][0]
            if all(d[2:8] in dsPath1 for d in dates):
                ifgramPathDict[dsName] = dsPath1
            else:
                dsPath2 = [
                    i for i in dsPathDict[dsName]
                    if all(d[2:8] in i for d in dates)
                ]
                if len(dsPath2) > 0:
                    ifgramPathDict[dsName] = dsPath2[0]
                else:
                    print('WARNING: {} file missing for pair {}'.format(
                        dsName, dates))
        ifgramObj = ifgramDict(dates=tuple(dates), datasetDict=ifgramPathDict)
        pairsDict[tuple(dates)] = ifgramObj

    if len(pairsDict) > 0:
        stackObj = ifgramStackDict(pairsDict=pairsDict)
    else:
        stackObj = None
    return stackObj
コード例 #26
0
def check_inputs(inps):
    parser = create_parser()

    # output directories/files
    atr = dict()
    pysar_dir = None
    if inps.timeseries_file:
        atr = readfile.read_attribute(inps.timeseries_file)
        pysar_dir = os.path.dirname(inps.timeseries_file)
        if not inps.outfile:
            fbase = os.path.splitext(inps.timeseries_file)[0]
            inps.outfile = '{}_{}.h5'.format(fbase, inps.trop_model)
    elif inps.geom_file:
        atr = readfile.read_attribute(inps.geom_file)
        pysar_dir = os.path.join(os.path.dirname(inps.geom_file), '..')
    else:
        pysar_dir = os.path.abspath(os.getcwd())

    # trop_file
    inps.trop_file = os.path.join(pysar_dir,
                                  'INPUTS/{}.h5'.format(inps.trop_model))
    print('output tropospheric delay file: {}'.format(inps.trop_file))

    # hour
    if not inps.hour:
        if 'CENTER_LINE_UTC' in atr.keys():
            inps.hour = ptime.closest_weather_product_time(
                atr['CENTER_LINE_UTC'], inps.trop_model)
        else:
            parser.print_usage()
            raise Exception('no input for hour')
    print('time of cloest available product: {}:00 UTC'.format(inps.hour))

    # date list
    if inps.timeseries_file:
        print('read date list from timeseries file: {}'.format(
            inps.timeseries_file))
        ts_obj = timeseries(inps.timeseries_file)
        ts_obj.open(print_msg=False)
        inps.date_list = ts_obj.dateList
    elif len(inps.date_list) == 1:
        if os.path.isfile(inps.date_list[0]):
            print('read date list from text file: {}'.format(
                inps.date_list[0]))
            inps.date_list = ptime.yyyymmdd(
                np.loadtxt(inps.date_list[0], dtype=bytes,
                           usecols=(0, )).astype(str).tolist())
        else:
            parser.print_usage()
            raise Exception('ERROR: input date list < 2')

    # Grib data directory
    inps.grib_dir = os.path.join(inps.weather_dir, inps.trop_model)
    if not os.path.isdir(inps.grib_dir):
        os.makedirs(inps.grib_dir)
        print('making directory: ' + inps.grib_dir)

    # Date list to grib file list
    inps.grib_file_list = date_list2grib_file(inps.date_list, inps.hour,
                                              inps.trop_model, inps.grib_dir)

    if 'REF_Y' in atr.keys():
        inps.ref_yx = [int(atr['REF_Y']), int(atr['REF_X'])]
        print('reference pixel: {}'.format(inps.ref_yx))

    # Coordinate system: geocoded or not
    inps.geocoded = False
    if 'Y_FIRST' in atr.keys():
        inps.geocoded = True
    print('geocoded: {}'.format(inps.geocoded))

    # Prepare DEM, inc_angle, lat/lon file for PyAPS to read
    if inps.geom_file:
        geom_atr = readfile.read_attribute(inps.geom_file)
        print('converting DEM/incAngle for PyAPS to read')
        # DEM
        data = readfile.read(inps.geom_file,
                             datasetName='height',
                             print_msg=False)[0]
        inps.dem_file = 'pyapsDem.hgt'
        writefile.write(data, inps.dem_file, metadata=geom_atr)

        # inc_angle
        inps.inc_angle = readfile.read(inps.geom_file,
                                       datasetName='incidenceAngle',
                                       print_msg=False)[0]
        inps.inc_angle_file = 'pyapsIncAngle.flt'
        writefile.write(inps.inc_angle, inps.inc_angle_file, metadata=geom_atr)

        # latitude
        try:
            data = readfile.read(inps.geom_file,
                                 datasetName='latitude',
                                 print_msg=False)[0]
            print('converting lat for PyAPS to read')
            inps.lat_file = 'pyapsLat.flt'
            writefile.write(data, inps.lat_file, metadata=geom_atr)
        except:
            inps.lat_file = None

        # longitude
        try:
            data = readfile.read(inps.geom_file,
                                 datasetName='longitude',
                                 print_msg=False)[0]
            print('converting lon for PyAPS to read')
            inps.lon_file = 'pyapsLon.flt'
            writefile.write(data, inps.lon_file, metadata=geom_atr)
        except:
            inps.lon_file = None
    return inps, atr
コード例 #27
0
def read_inps_dict2geometry_dict_object(inpsDict):

    # eliminate dsName by processor
    if inpsDict['processor'] in ['isce', 'doris']:
        datasetName2templateKey.pop('azimuthCoord')
        datasetName2templateKey.pop('rangeCoord')
    elif inpsDict['processor'] in ['roipac', 'gamma']:
        datasetName2templateKey.pop('latitude')
        datasetName2templateKey.pop('longitude')
    else:
        print('Un-recognized InSAR processor: {}'.format(
            inpsDict['processor']))

    # inpsDict --> dsPathDict
    print('-' * 50)
    print('searching geometry files info')
    print('input data files:')
    maxDigit = max([len(i) for i in list(datasetName2templateKey.keys())])
    dsPathDict = {}
    for dsName in [
            i for i in geometryDatasetNames
            if i in datasetName2templateKey.keys()
    ]:
        key = datasetName2templateKey[dsName]
        if key in inpsDict.keys():
            files = sorted(glob.glob(str(inpsDict[key])))
            if len(files) > 0:
                if dsName == 'bperp':
                    bperpDict = {}
                    for file in files:
                        date = ptime.yyyymmdd(
                            os.path.basename(os.path.dirname(file)))
                        bperpDict[date] = file
                    dsPathDict[dsName] = bperpDict
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=inpsDict[key]))
                    print('number of bperp files: {}'.format(
                        len(list(bperpDict.keys()))))
                else:
                    dsPathDict[dsName] = files[0]
                    print('{:<{width}}: {path}'.format(dsName,
                                                       width=maxDigit,
                                                       path=files[0]))

    # Check required dataset
    dsName0 = geometryDatasetNames[0]
    if dsName0 not in dsPathDict.keys():
        print('WARNING: No reqired {} data files found!'.format(dsName0))

    # metadata
    ifgramRadarMetadata = None
    ifgramKey = datasetName2templateKey['unwrapPhase']
    if ifgramKey in inpsDict.keys():
        ifgramFiles = glob.glob(str(inpsDict[ifgramKey]))
        if len(ifgramFiles) > 0:
            atr = readfile.read_attribute(ifgramFiles[0])
            if 'Y_FIRST' not in atr.keys():
                ifgramRadarMetadata = atr.copy()

    # dsPathDict --> dsGeoPathDict + dsRadarPathDict
    dsNameList = list(dsPathDict.keys())
    dsGeoPathDict = {}
    dsRadarPathDict = {}
    for dsName in dsNameList:
        if dsName == 'bperp':
            atr = readfile.read_attribute(
                next(iter(dsPathDict[dsName].values())))
        else:
            atr = readfile.read_attribute(dsPathDict[dsName])
        if 'Y_FIRST' in atr.keys():
            dsGeoPathDict[dsName] = dsPathDict[dsName]
        else:
            dsRadarPathDict[dsName] = dsPathDict[dsName]

    geomRadarObj = None
    geomGeoObj = None
    if len(dsRadarPathDict) > 0:
        geomRadarObj = geometryDict(processor=inpsDict['processor'],
                                    datasetDict=dsRadarPathDict,
                                    extraMetadata=ifgramRadarMetadata)
    if len(dsGeoPathDict) > 0:
        geomGeoObj = geometryDict(processor=inpsDict['processor'],
                                  datasetDict=dsGeoPathDict,
                                  extraMetadata=None)
    return geomRadarObj, geomGeoObj
コード例 #28
0
 def get_dataset_size(fname):
     atr = readfile.read_attribute(fname)
     return (atr['LENGTH'], atr['WIDTH'])
コード例 #29
0
ファイル: utils.py プロジェクト: xuubing/PySAR
def check_loaded_dataset(work_dir='./', print_msg=True):
    """Check the result of loading data for the following two rules:
        1. file existance
        2. file attribute readability

    Parameters: work_dir  : string, PySAR working directory
                print_msg : bool, print out message
    Returns:    True, if all required files and dataset exist; otherwise, ERROR
                    If True, PROCESS, SLC folder could be removed.
                stack_file  : 
                geom_file   :
                lookup_file :
    Example:    work_dir = os.path.expandvars('$SCRATCHDIR/SinabungT495F50AlosA/PYSAR')
                ut.check_loaded_dataset(work_dir)
    """
    load_complete = True

    if not work_dir:
        work_dir = os.getcwd()
    work_dir = os.path.abspath(work_dir)

    # 1. interferograms stack file: unwrapPhase, coherence
    flist = [os.path.join(work_dir, 'INPUTS/ifgramStack.h5')]
    stack_file = is_file_exist(flist, abspath=True)
    if stack_file is not None:
        obj = ifgramStack(stack_file)
        obj.open(print_msg=False)
        for dname in ['unwrapPhase', 'coherence']:
            if dname not in obj.datasetNames:
                raise ValueError(
                    'required dataset "{}" is missing in file {}'.format(
                        dname, stack_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './INPUTS/ifgramStack.h5')

    atr = readfile.read_attribute(stack_file)

    # 2. geom_file: height
    if 'X_FIRST' in atr.keys():
        flist = [os.path.join(work_dir, 'INPUTS/geometryGeo.h5')]
    else:
        flist = [os.path.join(work_dir, 'INPUTS/geometryRadar.h5')]
    geom_file = is_file_exist(flist, abspath=True)
    if geom_file is not None:
        obj = geometry(geom_file)
        obj.open(print_msg=False)
        dname = geometryDatasetNames[0]
        if dname not in obj.datasetNames:
            raise ValueError(
                'required dataset "{}" is missing in file {}'.format(
                    dname, geom_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './INPUTS/geometry*.h5')

    # 3. lookup_file: latitude,longitude or rangeCoord,azimuthCoord
    # could be different than geometry file in case of roipac and gamma
    flist = [os.path.join(work_dir, 'INPUTS/geometry*.h5')]
    lookup_file = get_lookup_file(flist, abspath=True, print_msg=print_msg)
    if lookup_file is not None:
        obj = geometry(lookup_file)
        obj.open(print_msg=False)

        if atr['PROCESSOR'] in ['isce', 'doris']:
            dnames = [geometryDatasetNames[1], geometryDatasetNames[2]]
        elif atr['PROCESSOR'] in ['gamma', 'roipac']:
            dnames = [geometryDatasetNames[3], geometryDatasetNames[4]]
        else:
            raise AttributeError('InSAR processor: {}'.format(
                atr['PROCESSOR']))

        for dname in dnames:
            if dname not in obj.datasetNames:
                load_complete = False
                raise Exception(
                    'required dataset "{}" is missing in file {}'.format(
                        dname, lookup_file))
    else:
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                './INPUTS/geometry*.h5')

    # print message
    if print_msg:
        print(('Loaded dataset are processed by '
               'InSAR software: {}'.format(atr['PROCESSOR'])))
        if 'X_FIRST' in atr.keys():
            print('Loaded dataset is in GEO coordinates')
        else:
            print('Loaded dataset is in RADAR coordinates')
        print('Interferograms Stack: {}'.format(stack_file))
        print('Geometry File       : {}'.format(geom_file))
        print('Lookup Table File   : {}'.format(lookup_file))
        if load_complete:
            print('-' * 50)
            print(
                'All data needed found/loaded/copied. Processed 2-pass InSAR data can be removed.'
            )
        print('-' * 50)
    return load_complete, stack_file, geom_file, lookup_file
コード例 #30
0
ファイル: ifgram_reconstruction.py プロジェクト: whigg/PySAR
def main(argv):

    # Inputs
    try:
        ifgram_file = argv[0]
        timeseries_file = argv[1]
    except:
        usage()
        sys.exit(1)

    try:
        outfile = argv[2]
    except:
        outfile = 'recon_'+ifgram_file

    atr = readfile.read_attribute(timeseries_file)
    length = int(atr['LENGTH'])
    width = int(atr['WIDTH'])

    # Read time-series file
    print('loading timeseries ...')
    h5ts = h5py.File(timeseries_file, 'r')
    date_list = sorted(h5ts['timeseries'].keys())
    date_num = len(date_list)
    timeseries = np.zeros((date_num, length*width))

    print('number of acquisitions: '+str(date_num))
    prog_bar = ptime.progressBar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5ts['timeseries'].get(date)[:]
        timeseries[i, :] = d.flatten(0)
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    h5ts.close()
    del d

    range2phase = -4*np.pi/float(atr['WAVELENGTH'])
    timeseries = range2phase*timeseries

    # Estimate interferograms from timeseries
    print('estimating interferograms from timeseries using design matrix from input interferograms')
    A, B = ut.design_matrix(ifgram_file)
    p = -1*np.ones([A.shape[0], 1])
    Ap = np.hstack((p, A))
    estData = np.dot(Ap, timeseries)
    del timeseries

    # Write interferograms file
    print('writing >>> '+outfile)
    h5 = h5py.File(ifgram_file, 'r')
    ifgram_list = sorted(h5['interferograms'].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)

    h5out = h5py.File(outfile, 'w')
    group = h5out.create_group('interferograms')

    print('number of interferograms: '+str(ifgram_num))
    prog_bar = ptime.progressBar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        data = np.reshape(estData[i, :], (length, width))

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=data)
        for key, value in h5['interferograms'][ifgram].attrs.items():
            gg.attrs[key] = value
        prog_bar.update(i+1, suffix=date12_list[i])
    prog_bar.close()
    h5.close()
    h5out.close()
    print('Done.')
    return outfile