Esempio n. 1
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'incidenceAngle.h5'

    # Calculate look angle
    angle = ut.incidence_angle(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print 'Input file is geocoded, only center incident angle is calculated: '
        print angle
        length = int(atr['FILE_LENGTH'])
        width = int(atr['WIDTH'])
        angle_mat = np.zeros((length, width), np.float32)
        angle_mat[:] = angle
        angle = angle_mat

    print 'writing >>> ' + outFile
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'degree'
    try:
        atr.pop('ref_date')
    except:
        pass
    writefile.write(angle, atr, outFile)
    return outFile
Esempio n. 2
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage()
        sys.exit(1)

    try:
        outFile = argv[1]
    except:
        outFile = 'incidenceAngle.h5'

    # Calculate look angle
    angle = ut.incidence_angle(atr, dimension=2)

    # Geo coord
    if 'Y_FIRST' in atr.keys():
        print 'Input file is geocoded, only center incident angle is calculated: '
        print angle
        return angle

    # Radar coord
    else:
        print 'writing >>> ' + outFile
        atr['FILE_TYPE'] = 'mask'
        atr['UNIT'] = 'degree'
        writefile.write(angle, atr, outFile)
        return outFile
Esempio n. 3
0
def main(argv):
    try:
        File = argv[0]
        atr = readfile.read_attribute(File)
    except:
        usage();  sys.exit(1)
    
    try:    outFile = argv[1]
    except: outFile = 'incidence_angle.h5'
    
    #print '\n*************** Generate Incidence Angle *****************'
    ##### Calculate look angle
    angle = ut.incidence_angle(atr)
    
    ##### Output
    print 'writing >>> '+outFile
    atr['FILE_TYPE'] = 'mask'
    atr['UNIT'] = 'degree'
    writefile.write(angle, atr, outFile)
Esempio n. 4
0
def main(argv):

    inps = cmdLineParse()
    inps.dem_file = ut.get_file_list([inps.dem_file])[0]
    inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
    atr = readfile.read_attribute(inps.timeseries_file)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Get Acquisition time
    inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                             inps.grib_source)
    print 'Time of cloest vailable product: ' + inps.hour

    if not os.path.isdir(inps.grib_source):
        print 'making directory: ' + inps.grib_source
        os.mkdir(inps.grib_source)

    ## Loop to download
    inps.grib_file_list = []
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    dateList = sorted(h5timeseries['timeseries'].keys())
    for d in dateList:
        print[d]
        if inps.grib_source == 'ECMWF':
            grib_file = './ECMWF/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = './ERA/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = './MERRA/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = './NARR/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

        if os.path.isfile(grib_file):
            print grib_file + ' already exists.'
        else:
            if inps.grib_source == 'ECMWF':
                pa.ECMWFdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'ERA':
                pa.ERAdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'MERRA':
                pa.MERRAdload([d], inps.hour, './' + inps.grib_source + '/')
            elif inps.grib_source == 'NARR':
                pa.NARRdload([d], inps.hour, './' + inps.grib_source + '/')

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data + phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()
    print 'Done.'

    return
Esempio n. 5
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Extract the common area of two input files
    # Basic info
    atr1 = readfile.read_attribute(inps.file[0])
    atr2 = readfile.read_attribute(inps.file[1])
    if any('X_FIRST' not in i for i in [atr1, atr2]):
        sys.exit('ERROR: Not all input files are geocoded.')

    k1 = atr1['FILE_TYPE']
    print 'Input 1st file is ' + k1

    # Common AOI in lalo
    west, east, south, north = get_overlap_lalo(atr1, atr2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width = int(round((east - west) / lon_step))
    length = int(round((south - north) / lat_step))

    # Read data in common AOI: LOS displacement, heading angle, incident angle
    u_los = np.zeros((2, width * length))
    heading = []
    incidence = []
    for i in range(len(inps.file)):
        fname = inps.file[i]
        print '---------------------'
        print 'reading ' + fname
        atr = readfile.read_attribute(fname)

        [x0, x1] = subset.coord_geo2radar([west, east], atr, 'lon')
        [y0, y1] = subset.coord_geo2radar([north, south], atr, 'lat')
        V = readfile.read(fname, (x0, y0, x1, y1))[0]
        u_los[i, :] = V.flatten(0)

        heading_angle = float(atr['HEADING'])
        if heading_angle < 0.:
            heading_angle += 360.
        print 'heading angle: ' + str(heading_angle)
        heading_angle *= np.pi / 180.
        heading.append(heading_angle)

        inc_angle = float(ut.incidence_angle(atr, dimension=0))
        #print 'incidence angle: '+str(inc_angle)
        inc_angle *= np.pi / 180.
        incidence.append(inc_angle)

    ##### 2. Project displacement from LOS to Horizontal and Vertical components
    # math for 3D: cos(theta)*Uz - cos(alpha)*sin(theta)*Ux + sin(alpha)*sin(theta)*Uy = Ulos
    # math for 2D: cos(theta)*Uv - sin(alpha-az)*sin(theta)*Uh = Ulos   #Uh_perp = 0.0
    # This could be easily modified to support multiple view geometry (e.g. two adjcent tracks from asc & desc) to resolve 3D

    # Design matrix
    A = np.zeros((2, 2))
    for i in range(len(inps.file)):
        A[i, 0] = np.cos(incidence[i])
        A[i, 1] = np.sin(incidence[i]) * np.sin(heading[i] - inps.azimuth)

    A_inv = np.linalg.pinv(A)
    u_vh = np.dot(A_inv, u_los)

    u_v = np.reshape(u_vh[0, :], (length, width))
    u_h = np.reshape(u_vh[1, :], (length, width))

    ##### 3. Output
    # Attributes
    atr = atr1.copy()
    atr['WIDTH'] = str(width)
    atr['FILE_LENGTH'] = str(length)
    atr['X_FIRST'] = str(west)
    atr['Y_FIRST'] = str(north)
    atr['X_STEP'] = str(lon_step)
    atr['Y_STEP'] = str(lat_step)

    print '---------------------'
    outname = inps.outfile[0]
    print 'writing   vertical component to file: ' + outname
    writefile.write(u_v, atr, outname)

    outname = inps.outfile[1]
    print 'writing horizontal component to file: ' + outname
    writefile.write(u_h, atr, outname)

    print 'Done.'
    return
Esempio n. 6
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in atr_dem.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(
                inps.dem_file)[0] + '4pyaps' + atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Grib data directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(
                inps.dem_file)) + '/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: ' + inps.weather_dir
    grib_dir = inps.weather_dir + '/' + inps.grib_source
    if not os.path.isdir(grib_dir):
        print 'making directory: ' + grib_dir
        os.makedirs(grib_dir)

    ## Get Acquisition time
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                                 inps.grib_source)
    print 'Time of cloest available product: ' + inps.hour

    ## Get grib file list and date list
    inps.grib_file_list = []
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print 'read date list info from: ' + inps.timeseries_file
    else:
        dateList = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print 'read date list info from: ' + inps.date_list_file

    for d in dateList:
        if inps.grib_source == 'ECMWF':
            grib_file = grib_dir + '/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = grib_dir + '/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = grib_dir + '/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = grib_dir + '/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

    ## Get date list to download
    grib_file_existed = ut.get_file_list(inps.grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode(
            [os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [
            i for i in grib_file_existed
            if os.path.getsize(i) != grib_filesize_mode
        ]
        print 'number of grib files existed    : %d' % len(grib_file_existed)
        print 'file size mode: %d' % grib_filesize_mode
        if grib_file_corrupted:
            print '------------------------------------------------------------------------------'
            print 'corrupted grib files detected! Delete them and re-download...'
            print 'number of grib files corrupted  : %d' % len(
                grib_file_corrupted)
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print rmCmd
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print '------------------------------------------------------------------------------'
    grib_file2download = sorted(
        list(set(inps.grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print 'number of grib files to download: %d' % len(date_list2download)
    print '------------------------------------------------------------------------------\n'

    ## Download grib file using PyAPS
    if inps.grib_source == 'ECMWF':
        pa.ECMWFdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'ERA':
        pa.ERAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'MERRA':
        pa.MERRAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'NARR':
        pa.NARRdload(date_list2download, inps.hour, grib_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data - phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm ' + inps.dem_file + ' ' + inps.dem_file + '.rsc '
        print rmCmd
        os.system(rmCmd)

    print 'Done.'

    return
Esempio n. 7
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + suffix + os.path.splitext(
                inps.timeseries_file)[1]

    # 1. template_file
    if inps.template_file:
        print 'read option from template file: ' + inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    # Read Time Series
    print "loading time series: " + inps.timeseries_file
    atr = readfile.read_attribute(inps.timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)

    # Exclude date info
    #inps.ex_date = ['20070115','20100310']
    if inps.ex_date:
        inps = get_exclude_date(inps, date_list)
        if inps.ex_date:
            inps.ex_flag = np.array([i not in inps.ex_date for i in date_list])

    timeseries = np.zeros((len(date_list), length * width), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten('F')
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Perpendicular Baseline
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0)
        if inps.pbase.shape[1] > 1:
            print '\tconsider P_BASELINE variation in azimuth direction'
        else:
            pbase = inps.pbase
    except:
        print '\tCannot find P_BASELINE_TIMESERIES from timeseries file.'
        print '\tTrying to calculate it from interferograms file'
        if inps.ifgram_file:
            inps.pbase = np.array(
                ut.perp_baseline_ifgram2timeseries(
                    inps.ifgram_file)[0]).reshape(date_num, 1)
        else:
            message = 'No interferogram file input!\n'+\
                      'Can not correct for DEM residula without perpendicular base info!'
            raise Exception(message)

    # Temporal Baseline
    print 'read temporal baseline'
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(
        date_num, 1)

    # Incidence angle (look angle in the paper)
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            print 'reading incidence angle from file: ' + inps.incidence_angle
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            try:
                inps.incidence_angle = np.array(float(inps.incidence_angle))
                print 'use input incidence angle : ' + str(
                    inps.incidence_angle)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.incidence_angle))
    else:
        print 'calculate incidence angle using attributes of time series file'
        if inps.pbase.shape[1] > 1:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=2)
        else:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=1)
    inps.incidence_angle *= np.pi / 180.0

    # Range distance
    if inps.range_dis:
        if os.path.isfile(inps.range_dis):
            print 'reading range distance from file: ' + inps.range_dis
            inps.range_dis = readfile.read(inps.range_dis)[0]
        else:
            try:
                inps.range_dis = np.array(float(inps.range_dis))
                print 'use input range distance : ' + str(inps.range_dis)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.range_dis))
    else:
        print 'calculate range distance using attributes from time series file'
        if inps.pbase.shape[1] > 1:
            inps.range_dis = ut.range_distance(atr, dimension=2)
        else:
            inps.range_dis = ut.range_distance(atr, dimension=1)

    # Design matrix - temporal deformation model using tbase
    print '-------------------------------------------------'
    if inps.phase_velocity:
        print 'using phase velocity history'
        A1 = np.ones((date_num - 1, 1))
        A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0
        A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]**
              3) / np.diff(inps.tbase, axis=0) / 6.0
        #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\
        #      inps.tbase[0:date_num-1]**2) / 6.0
    else:
        print 'using phase history'
        A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
        A2 = inps.tbase**2 / 2.0
        A3 = inps.tbase**3 / 6.0

    # Polynomial order of model
    print "temporal deformation model's polynomial order = " + str(
        inps.poly_order)
    if inps.poly_order == 1: A_def = A1
    elif inps.poly_order == 2: A_def = np.hstack((A1, A2))
    elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3))

    # step function
    if inps.step_date:
        print "temporal deformation model's step function step at " + inps.step_date
        step_yy = ptime.yyyymmdd2years(inps.step_date)
        yy_list = ptime.yyyymmdd2years(date_list)
        flag_array = np.array(yy_list) >= step_yy
        A_step = np.zeros((date_num, 1))
        A_step[flag_array] = 1.0
        A_def = np.hstack((A_def, A_step))

    # Heresh's original code for phase history approach
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))
    print '-------------------------------------------------'

    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    delta_z_mat = np.zeros([length, width], dtype=np.float32)
    resid_n = np.zeros([A_def.shape[0], length * width], dtype=np.float32)
    constC = np.zeros([length, width], dtype=np.float32)
    #delta_a_mat = np.zeros([length, width])
    if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' pixel by pixel: %d loops in total' % (length*width)
        prog_bar = ptime.progress_bar(maxValue=length * width,
                                      prefix='calculating: ')
        for i in range(length * width):
            row = i % length
            col = i / length
            range_dis = inps.range_dis[row, col]
            inc_angle = inps.incidence_angle[row, col]
            # Consider P_BASELINE variation within one interferogram
            if inps.pbase.shape[1] > 1:
                pbase = inps.pbase[:, row].reshape(date_num, 1)

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i] = ts_dis - np.dot(A, X)

            # Update DEM error / timeseries matrix
            delta_z = X[0]
            delta_z_mat[row, col] = delta_z
            if inps.update_timeseries:
                timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten()
            prog_bar.update(i + 1, every=length * width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' column by column: %d loops in total' % (width)
        prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ')
        for i in range(width):
            range_dis = inps.range_dis[i]
            inc_angle = inps.incidence_angle[i]

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i * length:(i + 1) * length]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag, :])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X)
            constC[:, i] = X[1].reshape((1, length))

            # Update DEM error / timeseries matrix
            delta_z = X[0].reshape((1, length))
            delta_z_mat[:, i] = delta_z
            if inps.update_timeseries:
                timeseries[:, i * length:(i + 1) * length] -= np.dot(
                    A_delta_z, delta_z)
            prog_bar.update(i + 1, every=width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0:
        print 'inversing using L2-norm minimization (unweighted least squares) for the whole area'

        # Design matrix - DEM error using pbase, range distance and incidence angle
        A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle))
        if inps.phase_velocity:
            pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
            A_delta_z_v = pbase_v / (inps.range_dis *
                                     np.sin(inps.incidence_angle))
            A = np.hstack((A_delta_z_v, A_def))
        else:
            A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

        # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
        if inps.phase_velocity:
            timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase,
                                                               axis=0)

        if inps.ex_date:
            X = np.dot(A_inv, timeseries[inps.ex_flag, :])
        else:
            X = np.dot(A_inv, timeseries)

        # Residual vector n
        resid_n = ts_dis - np.dot(A, X)

        # Update DEM error / timeseries matrix
        delta_z_mat = X[0].reshape((1, length * width))
        if inps.update_timeseries:
            timeseries -= np.dot(A_delta_z, delta_z_mat)
        delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F')

    else:
        print 'ERROR: Script only support same dimension for both incidence angle and range distance matrix.'
        print 'dimension of incidence angle: ' + str(inps.incidence_angle.ndim)
        print 'dimension of range distance: ' + str(inps.range_dis.ndim)
        sys.exit(1)

    ##------------------------------------------------ Output  --------------------------------------------##
    # DEM error file
    if 'Y_FIRST' in atr.keys():
        dem_error_file = 'demGeo_error.h5'
    else:
        dem_error_file = 'demRadar_error.h5'
    #if inps.phase_velocity:  suffix = '_pha_poly'+str(inps.poly_order)
    #else:                    suffix = '_vel_poly'+str(inps.poly_order)
    #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1]
    print 'writing >>> ' + dem_error_file
    atr_dem_error = atr.copy()
    atr_dem_error['FILE_TYPE'] = 'dem'
    atr_dem_error['UNIT'] = 'm'
    writefile.write(delta_z_mat, atr_dem_error, dem_error_file)

    ## Phase Constant C = resid_n[0,:]
    #atrC = atr.copy()
    #atrC['FILE_TYPE'] = 'mask'
    #atrC['UNIT'] = 'm'
    #writefile.write(constC, atrC, 'constD.h5')

    ## Corrected DEM file
    #if inps.dem_file:
    #    inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1]
    #    print '--------------------------------------'
    #    print 'writing >>> '+inps.dem_outfile
    #    dem, atr_dem = readfile.read(inps.dem_file)
    #    writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile)

    #outfile = 'delta_acc.h5'
    #print 'writing >>> '+outfile
    #atr_dem_error = atr.copy()
    #atr_dem_error['FILE_TYPE'] = 'velocity'
    #atr_dem_error['UNIT'] = 'm/s'
    #writefile.write(delta_a_mat, atr_dem_error, outfile)
    #print '**************************************'

    # Corrected Time Series
    if inps.update_timeseries:
        print 'writing >>> ' + inps.outfile
        print 'number of dates: ' + str(len(date_list))
        h5out = h5py.File(inps.outfile, 'w')
        group = h5out.create_group('timeseries')
        prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ')
        for i in range(date_num):
            date = date_list[i]
            d = np.reshape(timeseries[i][:], [length, width], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5out.close()

    outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5'
    print 'writing >>> ' + outFile
    print 'number of dates: ' + str(A_def.shape[0])
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ')
    for i in range(A_def.shape[0]):
        date = date_list[i]
        d = np.reshape(resid_n[i][:], [length, width], order='F')
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    # Attribute
    for key, value in atr.iteritems():
        group.attrs[key] = value
    if A_def.shape[0] == date_num:
        group.attrs['UNIT'] = 'm'
    else:
        group.attrs['UNIT'] = 'm/yr'
    h5out.close()

    return
def main(argv):
    inps = cmdLineParse()

    ##### Check default input arguments
    # default output filename
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + '_tropHgt.h5'

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pix_num = length * width

    # default DEM file
    if not inps.dem_file:
        if 'X_FIRST' in atr.keys():
            inps.dem_file = ['demGeo_tight.h5', 'demGeo.h5']
        else:
            inps.dem_file = ['demRadar.h5']
    try:
        inps.dem_file = ut.get_file_list(inps.dem_file)[0]
    except:
        inps.dem_file = None
        sys.exit('ERROR: No DEM file found!')

    # default Mask file
    if not inps.mask_file:
        if 'X_FIRST' in atr.keys():
            inps.mask_file = 'geo_maskTempCoh.h5'
        else:
            inps.mask_file = 'maskTempCoh.h5'
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None
            sys.exit('ERROR: No mask file found!')

    ##### Read Mask
    print 'reading mask from file: ' + inps.mask_file
    mask = readfile.read(inps.mask_file, epoch='mask')[0].flatten(1)
    ndx = mask != 0
    msk_num = np.sum(ndx)
    print 'total            pixel number: %d' % pix_num
    print 'estimating using pixel number: %d' % msk_num

    ##### Read DEM
    print 'read DEM from file: ' + inps.dem_file
    dem = readfile.read(inps.dem_file, epoch='height')[0]

    ref_y = int(atr['ref_y'])
    ref_x = int(atr['ref_x'])
    dem -= dem[ref_y, ref_x]

    print 'considering the incidence angle of each pixel ...'
    inc_angle = ut.incidence_angle(atr, dimension=2)
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ##### Design matrix for elevation v.s. phase
    dem = dem.flatten(1)
    if inps.poly_order == 1:
        A = np.vstack((dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem, np.ones(pix_num))).T
    elif inps.poly_order == 2:
        A = np.vstack((dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**2, dem, np.ones(pix_num))).T
    elif inps.poly_order == 3:
        A = np.vstack((dem[ndx]**3, dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(pix_num))).T
    print 'polynomial order: %d' % inps.poly_order

    A_inv = np.linalg.pinv(A)

    ##### Calculate correlation coefficient
    print 'Estimating the tropospheric effect between the differences of the subsequent epochs and DEM'

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]

    print '----------------------------------------------------------'
    print 'correlation of DEM with each time-series epoch:'
    corr_array = np.zeros(date_num)
    par_dict = {}
    for i in range(date_num):
        date = date_list[i]
        if date == ref_date:
            cc = 0.0
            par = np.zeros(inps.poly_order + 1)
        else:
            data = h5[k].get(date)[:].flatten(1)

            C = np.zeros((2, msk_num))
            C[0, :] = dem[ndx]
            C[1, :] = data[ndx]
            cc = np.corrcoef(C)[0, 1]

            corr_array[i] = cc
            if inps.threshold and np.abs(cc) < inps.threshold:
                par = np.zeros(inps.poly_order + 1)
            else:
                par = np.dot(A_inv, data[ndx])
        print '%s: %.2f' % (date, cc)
        par_dict[date] = par

    average_phase_height_corr = np.nansum(np.abs(corr_array)) / (date_num - 1)
    print '----------------------------------------------------------'
    print 'Average Correlation of DEM with time-series epochs: %.2f' % average_phase_height_corr

    # Correlation of DEM with Difference of subsequent epochs (Not used for now)
    corr_diff_dict = {}
    par_diff_dict = {}
    for i in range(date_num - 1):
        date1 = date_list[i]
        date2 = date_list[i + 1]
        date12 = date1 + '-' + date2

        data1 = h5[k].get(date1)[:].flatten(1)
        data2 = h5[k].get(date2)[:].flatten(1)
        data_diff = data2 - data1

        C_diff = np.zeros((2, msk_num))
        C_diff[0, :] = dem[ndx]
        C_diff[1, :] = data_diff[ndx]
        cc_diff = np.corrcoef(C_diff)[0, 1]

        corr_diff_dict[date12] = cc_diff
        par = np.dot(A_inv, data_diff[ndx])
        par_diff_dict[date12] = par

    ##### Correct and write time-series file
    print '----------------------------------------------------------'
    print 'removing the stratified tropospheric delay from each epoch'
    print 'writing >>> ' + inps.outfile
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]

        if date != ref_date:
            par = par_dict[date]
            trop_delay = np.reshape(np.dot(B, par), [width, length]).T
            trop_delay -= trop_delay[ref_y, ref_x]
            data -= trop_delay

        dset = group.create_dataset(date, data=data, compression='gzip')
        prog_bar.update(i + 1, suffix=date)

    for key, value in atr.iteritems():
        group.attrs[key] = value

    prog_bar.close()
    h5out.close()
    h5.close()

    print 'Done.'
    return inps.outfile
Esempio n. 9
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in atr_dem.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if   inps.weather_model in ['ECMWF','ERA-Interim']:   inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA'  :                   inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':                   inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR' :                   inps.grib_source = 'NARR'
    else: raise Reception('Unrecognized weather model: '+inps.weather_model)
    print 'grib source: '+inps.grib_source

    # Get weather directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: '+inps.weather_dir

    # Get date list to download
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print 'read date list info from: '+inps.timeseries_file
    else:
        dateList = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print 'read date list info from: '+inps.date_list_file

    # Get Acquisition time - hour
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
    print 'Time of cloest available product: '+inps.hour

    ## Download data using PyAPS
    inps.grib_file_list = dload_grib(dateList, inps.hour, inps.weather_model, inps.weather_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: '+str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle*np.pi/180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source+'.h5'
    print 'writing >>> '+tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
    print 'writing >>> '+inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = dateList[0]
    print 'calculating phase delay on reference date: '+ref_date
    ref_date_grib_file = None
    for fname in inps.grib_file_list:
        if ref_date in fname:
            ref_date_grib_file = fname
    phs_ref = get_delay(ref_date_grib_file, atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        grib_file = inps.grib_file_list[i] 
        date = re.findall('\d{8}', grib_file)[0]

        # Get phase delay
        if date != ref_date:
            print 'calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file))
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing to HDF5 files ...'
        data = h5timeseries['timeseries'].get(date)[:]
        dset  = group_tropCor.create_dataset(date, data=data-phs, compression='gzip')
        dset  = group_trop.create_dataset(date, data=phs, compression='gzip')

    ## Write Attributes
    for key,value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value
    
    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm '+inps.dem_file+' '+inps.dem_file+'.rsc '
        print rmCmd
        os.system(rmCmd)
    
    print 'Done.'

    return