Exemplo n.º 1
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in atr_dem.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(inps.dem_file)[0]+'4pyaps'+atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if   inps.weather_model in ['ECMWF','ERA-Interim']:   inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA'  :                   inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':                   inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR' :                   inps.grib_source = 'NARR'
    else: raise Reception('Unrecognized weather model: '+inps.weather_model)
    print 'grib source: '+inps.grib_source

    # Get weather directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.timeseries_file))+'/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(inps.dem_file))+'/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: '+inps.weather_dir

    # Get date list to download
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print 'read date list info from: '+inps.timeseries_file
    else:
        dateList = ptime.yyyymmdd(np.loadtxt(inps.date_list_file, dtype=str, usecols=(0,)).tolist())
        print 'read date list info from: '+inps.date_list_file

    # Get Acquisition time - hour
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'], inps.grib_source)
    print 'Time of cloest available product: '+inps.hour

    ## Download data using PyAPS
    inps.grib_file_list = dload_grib(dateList, inps.hour, inps.weather_model, inps.weather_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: '+str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle*np.pi/180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source+'.h5'
    print 'writing >>> '+tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(inps.timeseries_file)[0]+'_'+inps.grib_source+'.h5'
    print 'writing >>> '+inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    try:    ref_date = atr['ref_date']
    except: ref_date = dateList[0]
    print 'calculating phase delay on reference date: '+ref_date
    ref_date_grib_file = None
    for fname in inps.grib_file_list:
        if ref_date in fname:
            ref_date_grib_file = fname
    phs_ref = get_delay(ref_date_grib_file, atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        grib_file = inps.grib_file_list[i] 
        date = re.findall('\d{8}', grib_file)[0]

        # Get phase delay
        if date != ref_date:
            print 'calculate phase delay on %s from file %s' % (date, os.path.basename(grib_file))
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing to HDF5 files ...'
        data = h5timeseries['timeseries'].get(date)[:]
        dset  = group_tropCor.create_dataset(date, data=data-phs, compression='gzip')
        dset  = group_trop.create_dataset(date, data=phs, compression='gzip')

    ## Write Attributes
    for key,value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value
    
    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm '+inps.dem_file+' '+inps.dem_file+'.rsc '
        print rmCmd
        os.system(rmCmd)
    
    print 'Done.'

    return
Exemplo n.º 2
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(inps.template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceBased'
    if key in key_list and template[key] in ['auto', 'yes']:
        inps.coherence_based = True

    key = prefix + 'keepMinSpanTree'
    if key in key_list and template[key] in ['no']:
        inps.keep_mst = False

    key = prefix + 'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    # find coherence file from input files if inps.coherence_file does not exists.
    if inps.coherence_based and not os.path.isfile(inps.coherence_file):
        k_list = [readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file]
        try:
            coh_file_idx = k_list.index('coherence')
        except ValueError:
            print 'No coherence file found! Can not use coherence-based method without it.'
        inps.coherence_file = inps.file[coh_file_idx]

    key = prefix + 'minCoherence'
    if key in key_list:
        if template[key] == 'auto':
            inps.min_coherence = 0.7
        else:
            inps.min_coherence = float(template[key])

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            try:
                inps.mask_file = ut.get_file_list(['maskLand.h5',
                                                   'mask.h5'])[0]
            except:
                inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'maskAoi.yx'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_pix_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
            sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])

    key = prefix + 'maskAoi.lalo'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_geo_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
            sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
            # Check trans file
            try:
                inps.trans_file = ut.get_file_list(inps.trans_file)[0]
            except:
                inps.trans_file = None
                print 'Warning: no mapping transformation file found! Can not use ' + key + ' option without it.'
                print 'skip this option.'
                inps.aoi_pix_box = None

    ## Network Modification based on thresholds
    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_temp_baseline = float(value)

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_perp_baseline = float(value)

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_date = [i for i in value.replace(',', ' ').split()]

    key = prefix + 'excludeIfgIndex'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_ifg_index = [
                i for i in value.replace(',', ' ').split()
            ]

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.start_date = ptime.yymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.end_date = ptime.yymmdd(value)

    return inps
Exemplo n.º 3
0
def main(argv):

    ##### Check Inputs
    try:
        File = argv[0]
    except:
        usage()
        sys.exit(1)
    ext = os.path.splitext(File)[1].lower()

    #################### File Structure #####################
    try:
        argv[1]
        if argv[1] in ['--struct', '--structure', '--tree'
                       ] and ext in ['.h5', '.he5']:
            print '***** HDF5 File Structure *****'
            print_hdf5_structure(File)
            return
    except:
        pass

    #################### Basic Info #####################
    try:
        atr = readfile.read_attribute(File)
    except:
        print 'Can not read file: ' + File
        sys.exit(1)
    k = atr['FILE_TYPE']

    # Print out date list for timeseries HDF5 file
    try:
        if k in ['timeseries'] and argv[1] in ['--date']:
            h5 = h5py.File(File, 'r')
            dateList = h5[k].keys()
            for date in dateList:
                print date
            h5.close()
            return
    except:
        pass

    print '\n************************ File Info *****************************'
    print 'File name   : ' + os.path.basename(File)
    print 'File type   : ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE']
    try:
        atr['X_FIRST']
        print 'Coordinates : GEO'
    except:
        print 'Coordinates : radar'

    #################### HDF5 File Info #####################
    if ext in ['.h5', '.he5']:
        h5file = h5py.File(File, 'r')
        ##### Group Info
        print 'All groups in this file:'
        print h5file.keys()

        ##### DateList / IgramList
        if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
            epochList = sorted(h5file[k].keys())

    if k == 'timeseries':
        try:
            print_timseries_date_info(epochList)
        except:
            pass
        print '*************** Attributes **************'
        print_attributes(atr)

    elif k in ['interferograms', 'coherence', 'wrapped']:
        ##### Plot Attributes of One Epoch
        try:
            epochNum = int(argv[1])
            epochAtr = h5file[k][epochList[epochNum - 1]].attrs
            print '*****************************************'
            print epochList[epochNum - 1]
            print '*************** Attributes **************'
            print_attributes(epochAtr)
            print '*****************************************'
            print epochList[epochNum - 1]
        ##### Plot Epoch List Info
        except:
            print '*****************************************'
            print 'Number of ' + k + ': ' + str(len(epochList))
            print '*****************************************'
            print 'List of the ' + k + ':             number'
            for i in range(len(epochList)):
                print epochList[i] + '    ' + str(i + 1)
            print '*****************************************'
            print 'Number of ' + k + ': ' + str(len(epochList))

    ##### All other file types, except for timeseries/interferograms/coherence/wrapped
    else:
        print '*************** Attributes **************'
        print_attributes(atr)

    try:
        h5file.close()
    except:
        pass
    print '****************************************************************'
    return
Exemplo n.º 4
0
def main(argv):
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)

    atr = readfile.read_attribute(inps.file[0])
    length = int(atr['FILE_LENGTH'])
    width  = int(atr['WIDTH'])

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            remove_reference_pixel(file)
        return

    ##### Check Input Coordinates
    # Read ref_y/x/lat/lon from reference/template
    # priority: Direct Input > Reference File > Template File
    if inps.template_file:
        print 'reading reference info from template: '+inps.template_file
        inps = read_seed_template2inps(inps.template_file, inps)
    if inps.reference_file:
        print 'reading reference info from reference: '+inps.reference_file
        inps = read_seed_reference2inps(inps.reference_file, inps)

    ## Do not use ref_lat/lon input for file in radar-coord
    #if not 'X_FIRST' in atr.keys() and (inps.ref_lat or inps.ref_lon):
    #    print 'Lat/lon reference input is disabled for file in radar coord.'
    #    inps.ref_lat = None
    #    inps.ref_lon = None

    # Convert ref_lat/lon to ref_y/x
    if inps.ref_lat and inps.ref_lon:
        if 'X_FIRST' in atr.keys():
            inps.ref_y = subset.coord_geo2radar(inps.ref_lat, atr, 'lat')
            inps.ref_x = subset.coord_geo2radar(inps.ref_lon, atr, 'lon')
        else:
            # Convert lat/lon to az/rg for radar coord file using geomap*.trans file
            inps.ref_y, inps.ref_x = ut.glob2radar(np.array(inps.ref_lat), np.array(inps.ref_lon),\
                                                   inps.trans_file, atr)[0:2]
        print 'Input reference point in lat/lon: '+str([inps.ref_lat, inps.ref_lon])
    print 'Input reference point in   y/x  : '+str([inps.ref_y, inps.ref_x])

    # Do not use ref_y/x outside of data coverage
    if (inps.ref_y and inps.ref_x and
        not (0<= inps.ref_y <= length and 0<= inps.ref_x <= width)):
        inps.ref_y = None
        inps.ref_x = None
        print 'WARNING: input reference point is OUT of data coverage!'
        print 'Continue with other method to select reference point.'

    # Do not use ref_y/x in masked out area
    if inps.ref_y and inps.ref_x and inps.mask_file:
        print 'mask: '+inps.mask_file
        mask = readfile.read(inps.mask_file)[0]
        if mask[inps.ref_y, inps.ref_x] == 0:
            inps.ref_y = None
            inps.ref_x = None
            print 'WARNING: input reference point is in masked OUT area!'
            print 'Continue with other method to select reference point.'

    ##### Select method
    if inps.ref_y and inps.ref_x:
        inps.method = 'input-coord'
    elif inps.coherence_file:
        if os.path.isfile(inps.coherence_file):
            inps.method = 'max-coherence'
        else: 
            inps.coherence_file = None

    if inps.method == 'manual':
        inps.parallel = False
        print 'Parallel processing is disabled for manual seeding method.'

    ##### Seeding file by file
    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(len(inps.file))

    if len(inps.file) == 1:
        seed_file_inps(inps.file[0], inps, inps.outfile)
        
    elif inps.parallel:
        #num_cores = min(multiprocessing.cpu_count(), len(inps.file))
        #print 'parallel processing using %d cores ...'%(num_cores)
        Parallel(n_jobs=num_cores)(delayed(seed_file_inps)(file, inps) for file in inps.file)
    else:
        for File in inps.file:
            seed_file_inps(File, inps)

    print 'Done.'
    return
Exemplo n.º 5
0
def load_geometry_hdf5(fileType, fileList, outfile=None, exDict=dict()):
    '''Load multiple geometry files into hdf5 file: geometryGeo.h5 or geometryRadar.h5.
    File structure:
        /geometry.attrs
        /geometry/latitude          #for geometryRadar.h5 only, from ISCE/Doris lookup table
        /geometry/longitude         #for geometryRadar.h5 only, from ISCE/Doris lookup table
        /geometry/rangeCoord        #for geometryGeo.h5 only, from ROI_PAC/Gamma lookup table
        /geometry/azimuthCoord      #for geometryGeo.h5 only, from ROI_PAC/Gamma lookup table
        /geometry/height
        /geometry/incidenceAngle
        /geometry/headingAngle
        /geometry/slantRangeDistance
        /geometry/shadowMask
        /geometry/waterMask
    '''
    ext = os.path.splitext(fileList[0])[1]
    atr = readfile.read_attribute(fileList[0])
    if not outfile:
        if 'Y_FIRST' in atr.keys():
            outfile = 'geometryGeo.h5'
        else:
            outfile = 'geometryRadar.h5'
        # output directory
        if 'timeseries_dir' in exDict.keys() and exDict['timeseries_dir']:
            outdir = exDict['timeseries_dir']
        else:
            outdir = os.path.abspath(os.getcwd())
        outfile = os.path.join(outdir, outfile)
    outfile = os.path.abspath(outfile)

    #####Check overlap with existing hdf5 file
    h5dnameList = []
    if os.path.isfile(outfile):
        print os.path.basename(outfile) + '  already exists.'
        try:
            atr = readfile.read_attribute(outfile)
        except:
            print 'File exists but not readable, delete it.'
            rmCmd = 'rm ' + outfile
            print rmCmd
            os.system(rmCmd)
        h5 = h5py.File(outfile, 'r')
        h5dnameList = sorted(h5['geometry'].keys())
        h5.close()

    dnameList = []
    fileList2 = list(fileList)
    for fname in fileList2:
        fbase = os.path.basename(fname).lower()
        if ((fbase.startswith('lat') and 'latitude' in h5dnameList) or\
            (fbase.startswith('lon') and 'longitude' in h5dnameList) or\
            (fbase.startswith('los') and 'incidenceAngle' in h5dnameList) or\
            (fbase.startswith('shadowmask') and 'shadowMask' in h5dnameList) or\
            (fbase.startswith('watermask') and 'waterMask' in h5dnameList) or\
            (fbase.startswith('incidenceang') and 'incidenceAngle' in h5dnameList) or\
            (fbase.endswith(('.trans','.utm_to_rdc')) and 'rangeCoord' in h5dnameList) or\
            ((fbase.startswith(('hgt','dem')) or fbase.endswith(('.hgt','.dem','wgs84'))) and 'height' in h5dnameList) or\
            #(fbase.startswith('geometry') and any(i in h5dnameList for i in ['rangeCoord','longitude'])) or \
            (fbase.startswith('rangedist') and 'slantRangeDistance' in h5dnameList)):
            fileList.remove(fname)

    # Loop - Writing files into hdf5 file
    if fileList:
        print 'number of ' + ext + ' to add: ' + str(len(fileList))
        ##Open HDF5 file
        if os.path.isfile(outfile):
            print 'open ' + outfile + ' with r+ mode'
            h5 = h5py.File(outfile, 'r+')
        else:
            print 'open ' + outfile + ' with w mode'
            h5 = h5py.File(outfile, 'w')

        ##top level group
        if fileType not in h5.keys():
            group = h5.create_group(fileType)
            print 'create group: ' + fileType
        else:
            group = h5[fileType]
            print 'open group: ' + fileType
        ##datasets
        for fname in fileList:
            fbase = os.path.basename(fname).lower()
            print 'Add ' + fname
            if fbase.startswith('lat'):
                data, atr = readfile.read(fname)
                dset = group.create_dataset('latitude',
                                            data=data,
                                            compression='gzip')

            elif fbase.startswith('lon'):
                data, atr = readfile.read(fname)
                dset = group.create_dataset('longitude',
                                            data=data,
                                            compression='gzip')

            elif fbase.startswith('los'):
                d0, d1, atr = readfile.read(fname)
                dset = group.create_dataset('incidenceAngle',
                                            data=d0,
                                            compression='gzip')
                dset = group.create_dataset('headingAngle',
                                            data=d1,
                                            compression='gzip')

            elif 'shadowmask' in fbase:
                data, atr = readfile.read(fname)
                dset = group.create_dataset('shadowMask',
                                            data=data,
                                            compression='gzip')

            elif 'watermask' in fbase:
                data, atr = readfile.read(fname)
                dset = group.create_dataset('waterMask',
                                            data=data,
                                            compression='gzip')

            elif 'incidenceang' in fbase:
                data, atr = readfile.read(fname)
                dset = group.create_dataset('incidenceAngle',
                                            data=data,
                                            compression='gzip')

            elif 'rangedist' in fbase:
                data, atr = readfile.read(fname)
                dset = group.create_dataset('slantRangeDistance',
                                            data=data,
                                            compression='gzip')

            elif fbase.endswith(('.trans', '.utm_to_rdc')):
                d0, d1, atr = readfile.read(fname)
                dset = group.create_dataset('rangeCoord',
                                            data=d0,
                                            compression='gzip')
                dset = group.create_dataset('azimuthCoord',
                                            data=d1,
                                            compression='gzip')

            elif fbase.startswith(('hgt', 'dem')) or fbase.endswith(
                ('.hgt', '.dem', 'wgs84')):
                data, atr = readfile.read(fname)
                dset = group.create_dataset('height',
                                            data=data,
                                            compression='gzip')

            else:
                print 'Un-recognized file type: ' + fbase

            # PySAR attributes
            try:
                atr['PROJECT_NAME'] = exDict['project_name']
            except:
                atr['PROJECT_NAME'] = 'PYSAR'
            key = 'INSAR_PROCESSOR'
            if key not in atr.keys():
                try:
                    atr[key] = exDict['insar_processor']
                except:
                    pass
            # Write attributes
            for key, value in atr.iteritems():
                if key not in group.attrs.keys():
                    group.attrs[key] = str(value)
        h5.close()
    else:
        print 'All input ' + ext + ' are included, no need to re-load.'
        fileList = None
    return outfile
Exemplo n.º 6
0
def remove_surface(File, surf_type, maskFile=None, outFile=None, ysub=None):
    start = time.time()
    atr = readfile.read_attribute(File)

    # Output File Name
    if not outFile:
        outFile = os.path.splitext(
            File)[0] + '_' + surf_type + os.path.splitext(File)[1]

    if maskFile:
        Mask = readfile.read(maskFile)[0]
        print 'read mask file: ' + maskFile
    else:
        Mask = np.ones((int(atr['FILE_LENGTH']), int(atr['WIDTH'])))
        print 'use mask of the whole area'

    ##### Input File Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'Input file is ' + k
    print 'remove ramp type: ' + surf_type

    ## Multiple Datasets File
    if k in ['interferograms', 'coherence', 'wrapped', 'timeseries']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())
        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5flat = h5py.File(outFile, 'w')
        group = h5flat.create_group(k)
        print 'writing >>> ' + outFile

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            dset = group.create_dataset(epoch, data=data_n, compression='gzip')
            prog_bar.update(i + 1, suffix=epoch)
        for key, value in h5file[k].attrs.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch = epochList[i]
            data = h5file[k][epoch].get(epoch)[:]

            if not ysub:
                data_n, ramp = remove_data_surface(data, Mask, surf_type)
            else:
                data_n = remove_data_multiple_surface(data, Mask, surf_type,
                                                      ysub)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data_n, compression='gzip')
            for key, value in h5file[k][epoch].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

    ## Single Dataset File
    else:
        data, atr = readfile.read(File)
        print 'Removing ' + surf_type + ' from ' + k

        if not ysub:
            data_n, ramp = remove_data_surface(data, Mask, surf_type)
        else:
            data_n = remove_data_multiple_surface(data, Mask, surf_type, ysub)

        print 'writing >>> ' + outFile
        writefile.write(data_n, atr, outFile)

    try:
        h5file.close()
        h5flat.close()
        prog_bar.close()
    except:
        pass

    print 'Remove ' + surf_type + ' took ' + str(time.time() - start) + ' secs'
    return outFile
Exemplo n.º 7
0
def seed_file_inps(File, inps=None, outFile=None):
    '''Seed input file with option from input namespace
    Return output file name if succeed; otherwise, return None
    '''
    # Optional inputs
    if not outFile:  outFile = 'Seeded_'+os.path.basename(File)
    if not inps:  inps = cmdLineParse([''])
    print '----------------------------------------------------'
    print 'seeding file: '+File
    
    # Get stack and mask
    stack = ut.get_file_stack(File, inps.mask_file)
    mask = ~np.isnan(stack)
    if np.nansum(mask) == 0.0:
        print '\n*****************************************************'
        print   'ERROR:'
        print   'There is no pixel that has valid phase value in all datasets.' 
        print   'Check the file!'
        print   'Seeding failed'
        sys.exit(1)

    atr = readfile.read_attribute(File)
    # 1. Reference using global average 
    if inps.method == 'global-average':
        print '\n---------------------------------------------------------'
        print 'Automatically Seeding using Global Spatial Average Value '
        print '---------------------------------------------------------'
        print 'Calculating the global spatial average value for each epoch'+\
              ' of all valid pixels ...'
        width = int(atr['WIDTH'])
        length = int(atr['FILE_LENGTH'])
        box = (0,0,width,length)
        meanList = ut.spatial_average(File, mask, box)[0]
        inps.ref_y = ''
        inps.ref_x = ''
        outFile = seed_file_reference_value(File, outFile, meanList, inps.ref_y, inps.ref_x)
        return outFile

    # 2. Reference using specific pixel
    # 2.1 Find reference y/x
    if not inps.ref_y or not inps.ref_x:
        if inps.coherence_file:
            inps.method = 'max-coherence'
            inps.ref_y, inps.ref_x = select_max_coherence_yx(inps.coherence_file, mask, inps.min_coherence)
        elif inps.method == 'random':
            inps.ref_y, inps.ref_x = random_select_reference_yx(mask)
        elif inps.method == 'manual':
            inps = manual_select_reference_yx(stack, inps)

    # 2.2 Seeding file with reference y/x
    if inps.ref_y and inps.ref_x and mask[inps.ref_y, inps.ref_x]:
        if inps.mark_attribute:
            re_select = True
            try:
                ref_x_orig == int(atr['ref_x'])
                ref_y_orig == int(atr['ref_y'])
                if inps.ref_x == ref_x_orig and inps.ref_y == ref_y_orig:
                    re_select = False
                    print 'Same reference pixel is already selected/saved in file, skip updating file attributes'
            except: pass
            if re_select:
                print 'Add/update ref_x/y attribute to file: '+File
                atr_ref = dict()
                atr_ref['ref_x'] = str(inps.ref_x)
                atr_ref['ref_y'] = str(inps.ref_y)
                if 'X_FIRST' in atr.keys():
                    atr_ref['ref_lat'] = str(subset.coord_radar2geo(inps.ref_y, atr, 'y'))
                    atr_ref['ref_lon'] = str(subset.coord_radar2geo(inps.ref_x, atr, 'x'))
                print atr_ref
                outFile = ut.add_attribute(File, atr_ref)
        else:
            print 'Referencing input file to pixel in y/x: (%d, %d)'%(inps.ref_y, inps.ref_x)
            box = (inps.ref_x, inps.ref_y, inps.ref_x+1, inps.ref_y+1)
            refList = ut.spatial_average(File, mask, box)[0]
            outFile = seed_file_reference_value(File, outFile, refList, inps.ref_y, inps.ref_x)
    else:
        raise ValueError('Can not find reference y/x or Nan value.')

    return outFile
Exemplo n.º 8
0
def main(argv):
    inps = cmdLineParse()

    #print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print 'input '+k+' file: '+inps.timeseries_file
    if not k == 'timeseries':
        sys.exit('ERROR: input file is not timeseries!') 
    h5file = h5py.File(inps.timeseries_file)

    #####################################
    ## Date Info
    dateListAll = sorted(h5file[k].keys())
    print '--------------------------------------------'
    print 'Dates from input file: '+str(len(dateListAll))
    print dateListAll

    inps.ex_date = get_exclude_date(inps, dateListAll)

    dateList = sorted(list(set(dateListAll) - set(inps.ex_date)))
    print '--------------------------------------------'
    if len(dateList) == len(dateListAll):
        print 'using all dates to calculate the velocity'
    else:
        print 'Dates used to estimate the velocity: '+str(len(dateList))
        print dateList
    print '--------------------------------------------'

    # Date Aux Info
    dates, datevector = ptime.date_list2vector(dateList)

    #####################################
    ## Inversion
    # Design matrix
    B = np.ones([len(datevector),2])
    B[:,0] = datevector
    #B_inv = np.linalg.pinv(B)
    B_inv = np.dot(np.linalg.inv(np.dot(B.T,B)), B.T)
    B_inv = np.array(B_inv, np.float32)

    # Loading timeseries
    print "Loading time series file: "+inps.timeseries_file+' ...'
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    dateNum = len(dateList)
    timeseries = np.zeros([dateNum,length*width],np.float32)
    prog_bar = ptime.progress_bar(maxValue=dateNum, prefix='loading: ')
    for i in range(dateNum):
        date = dateList[i]
        timeseries[i,:] = h5file[k].get(date)[:].flatten()
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    h5file.close()

    # Velocity Inversion
    print 'Calculating velocity ...'
    X = np.dot(B_inv, timeseries)
    velocity = np.reshape(X[0,:], [length,width])

    print 'Calculating rmse ...'
    timeseries_linear = np.dot(B, X)
    timeseries_residual = timeseries - timeseries_linear
    rmse = np.reshape(np.sqrt((np.sum((timeseries_residual)**2,0))/dateNum), [length,width])
    
    print 'Calculating the standard deviation of the estimated velocity ...'
    s1 = np.sqrt(np.sum(timeseries_residual**2,0) / (dateNum-2))
    s2 = np.sqrt(np.sum((datevector-np.mean(datevector))**2))
    std = np.reshape(s1/s2, [length,width])

    # SSt=np.sum((timeseries-np.mean(timeseries,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################  
    # covariance of the velocities

    #####################################
    # Output file name
    if not inps.outfile:
        inps.outfile = 'velocity.h5'

    inps.outfile_rmse = os.path.splitext(inps.outfile)[0]+'Rmse'+os.path.splitext(inps.outfile)[1]
    inps.outfile_std = os.path.splitext(inps.outfile)[0]+'Std'+os.path.splitext(inps.outfile)[1]
    inps.outfile_r2 = os.path.splitext(inps.outfile)[0]+'R2'+os.path.splitext(inps.outfile)[1]

    # Attributes
    atr['date1'] = datevector[0]
    atr['date2'] = datevector[dateNum-1]

    # File Writing
    print '--------------------------------------'
    atr['FILE_TYPE'] = 'velocity'
    print 'writing >>> '+inps.outfile
    writefile.write(velocity, atr, inps.outfile)
    
    #atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> '+inps.outfile_rmse
    writefile.write(rmse, atr, inps.outfile_rmse)
    
    #atr['FILE_TYPE'] = 'rmse'
    print 'writing >>> '+inps.outfile_std
    writefile.write(std, atr, inps.outfile_std)

    print 'Done.\n'
    return inps.outfile
Exemplo n.º 9
0
def main(argv):
    dp             = 1.0
    ntrans         = 1
    save_to_mat    = 'off'
    flip_profile   = 'no'
    which_gps      = 'all'
    flip_updown    = 'yes'
    incidence_file ='incidence_file'
    display_InSAR              = 'on'
    display_Average            = 'on'
    disp_std = 'on'

    ##### Input Args
    try:  opts, args = getopt.getopt(argv,"f:s:e:n:d:g:l:h:r:L:F:p:u:G:S:i:I:A:U:E:")
    except getopt.GetoptError:   Usage() ; sys.exit(1)

    for opt,arg in opts:
        if   opt == '-f':   velocityFile = arg
        elif opt == '-s':   y0,x0  = [int(i) for i in arg.split(',')]
        elif opt == '-e':   y1,x1  = [int(i) for i in arg.split(',')]
        elif opt == '-n':   ntrans = int(arg)
        elif opt == '-d':   dp     = float(arg)
        elif opt == '-g':   gpsFile=arg
        elif opt == '-r':   refStation=arg
        elif opt == '-i':   incidence_file=arg
        elif opt == '-L':   stationsList = arg.split(',')
        elif opt == '-F':   FaultCoords  = arg.split(',')
        elif opt == '-p':   flip_profile = arg
        elif opt == '-u':   flip_updown  = arg; print flip_updown
        elif opt == '-G':   which_gps =arg
        elif opt == '-S':   gps_source=arg
        elif opt == '-h':   hbound=float(arg)
        elif opt == '-l':   lbound=float(arg)
        elif opt == '-I':   display_InSAR   = arg
        elif opt == '-A':   display_Average = arg
        elif opt == '-U':   disp_std        = arg
        elif opt == '-E':   save_to_mat     = arg

    ##### Input File Info
    try: atr = readfile.read_attribute(velocityFile)
    except:  Usage(); sys.exit(1)
    k = atr['FILE_TYPE']
    print 'input file is '+k

    h5file = h5py.File(velocityFile,'r')
    z= h5file[k].get(k)[:]

    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    try: lat,lon,lat_step,lon_step,lat_all,lon_all = get_lat_lon(atr)
    except:  print 'radar coordinate'

    ##### Fault Coordinates
    try:
        Lat0 = dms2d(FaultCoords[0]); Lon0 = dms2d(FaultCoords[1])
        Lat1 = dms2d(FaultCoords[2]); Lon1 = dms2d(FaultCoords[3])
        Length,Width=np.shape(z)
        Yf0,Xf0=find_row_column(Lon0,Lat0,lon,lat,lon_step,lat_step)
        Yf1,Xf1=find_row_column(Lon1,Lat1,lon,lat,lon_step,lat_step)

        print '*********************************************'
        print ' Fault Coordinates:'
        print '   --------------------------  '
        print '    Lat          Lon'
        print str(Lat0) + ' , ' +str(Lon0)
        print str(Lat1) + ' , ' +str(Lon1)
        print '   --------------------------  '
        print '    row          column'
        print str(Yf0) + ' , ' +str(Xf0)
        print str(Yf1) + ' , ' +str(Xf1)
        print '*********************************************'
        #mf=float(Yf1-Yf0)/float((Xf1-Xf0))  # slope of the fault line
        #cf=float(Yf0-mf*Xf0)   # intercept of the fault line
        #df0=dist_point_from_line(mf,cf,x0,y0,1,1)   #distance of the profile start point from the Fault line
        #df1=dist_point_from_line(mf,cf,x1,y1,1,1)  #distance of the profile end point from the Fault line

        #mp=-1./mf  # slope of profile which is perpendicualr to the fault line
        #x1=int((df0+df1)/np.sqrt(1+mp**2)+x0)    # correcting the end point of the profile to be on a line perpendicular to the Fault
        #y1=int(mp*(x1-x0)+y0)
    except:
        print '*********************************************'
        print 'No information about the Fault coordinates!'
        print '*********************************************'

#############################################################################
    try:
        x0;y0;x1;y1
    except:
        fig = plt.figure()
        ax=fig.add_subplot(111)
        ax.imshow(z)
        try: ax.plot([Xf0,Xf1],[Yf0,Yf1],'k-')
        except: print 'Fault line is not specified'

        xc=[]
        yc=[]
        print 'please click on start and end point of the desired profile'
        def onclick(event):
            if event.button==1:
                print 'click'
                xc.append(int(event.xdata))
                yc.append(int(event.ydata))
        cid = fig.canvas.mpl_connect('button_press_event', onclick)
        plt.show();
        x0=xc[0];x1=xc[1]
        y0=yc[0];y1=yc[1]
##############################################################################
    try:
        mf=float(Yf1-Yf0)/float((Xf1-Xf0))  # slope of the fault line
        cf=float(Yf0-mf*Xf0)   # intercept of the fault line
        df0=dist_point_from_line(mf,cf,x0,y0,1,1)   #distance of the profile start point from the Fault line
        df1=dist_point_from_line(mf,cf,x1,y1,1,1)  #distance of the profile end point from the Fault line

        mp=-1./mf  # slope of profile which is perpendicualr to the fault line
        x1=int((df0+df1)/np.sqrt(1+mp**2)+x0)    # correcting the end point of the profile to be on a line perpendicular to the Fault
        y1=int(mp*(x1-x0)+y0)
    except:
        Info_aboutFault='No'

##############################################################################
    print '******************************************************'
    print 'First profile coordinates:'
    print 'Start point:  y = '+str(y0)+', x = '+str(x0)
    print 'End   point:  y = '+str(y1)+', x = '+str(x1)
    print '******************************************************'
    length = int(np.hypot(x1-x0, y1-y0))
    x, y = np.linspace(x0, x1, length), np.linspace(y0, y1, length)
    zi = z[y.astype(np.int), x.astype(np.int)]
    try:
        lat_transect=lat_all[y.astype(np.int), x.astype(np.int)]
        lon_transect=lon_all[y.astype(np.int), x.astype(np.int)]
    except:
        lat_transect='Nan'
        lon_transect='Nan'

    earth_radius = 6371e3;    # in meter
    try:
        dx=float(atr['X_STEP'])*np.pi/180.0*earth_radius*np.sin(np.mean(lat)*np.pi/180)
        dy=float(atr['Y_STEP'])*np.pi/180.0*earth_radius
        DX=(x-x0)*dx
        DY=(y-y0)*dy
        D =np.hypot(DX, DY)
        print 'geo coordinate:'
        print 'profile length = ' +str(D[-1]/1000.0) + ' km'
        # df0_km=dist_point_from_line(mf,cf,x0,y0,dx,dy)
    except:
        dx=float(atr['RANGE_PIXEL_SIZE'])
        dy=float(atr['AZIMUTH_PIXEL_SIZE'])
        DX=(x-x0)*dx
        DY=(y-y0)*dy
        D=np.hypot(DX, DY)

        print 'radar coordinate:'
        print 'profile length = ' +str(D[-1]/1000.0) + ' km'
        # df0_km=dist_point_from_line(mf,cf,x0,y0,dx,dy)

    try: df0_km=dist_point_from_line(mf,cf,x0,y0,dx,dy)
    except: print 'Fault line is not specified'

#    import pdb; pdb.set_trace()


    transect      = np.zeros([len(D),ntrans])
    transect[:,0] = zi
    XX0=[];XX1=[]
    YY0=[];YY1=[]
    XX0.append(x0);XX1.append(x1)
    YY0.append(y0);YY1.append(y1)

    if ntrans >1:
        m  = float(y1-y0)/float((x1-x0))
        c  = float(y0-m*x0)
        m1 = -1.0/m
        if lat_transect=='Nan':
            for i in range(1,ntrans):
                X0=i*dp/np.sqrt(1+m1**2)+x0
                Y0=m1*(X0-x0)+y0
                X1=i*dp/np.sqrt(1+m1**2)+x1
                Y1=m1*(X1-x1)+y1
                zi=get_transect(z,X0,Y0,X1,Y1)
                transect[:,i]=zi
                XX0.append(X0);XX1.append(X1);
                YY0.append(Y0);YY1.append(Y1);
        else:
            transect_lat      = np.zeros([len(D),ntrans])
            transect_lat[:,0] = lat_transect
            transect_lon      = np.zeros([len(D),ntrans])
            transect_lon[:,0] = lon_transect

            for i in range(1,ntrans):
                X0=i*dp/np.sqrt(1+m1**2)+x0
                Y0=m1*(X0-x0)+y0
                X1=i*dp/np.sqrt(1+m1**2)+x1
                Y1=m1*(X1-x1)+y1
                zi=get_transect(z,X0,Y0,X1,Y1)
                lat_transect=get_transect(lat_all,X0,Y0,X1,Y1)
                lon_transect=get_transect(lon_all,X0,Y0,X1,Y1)
                transect[:,i]=zi
                transect_lat[:,i]=lat_transect
                transect_lon[:,i]=lon_transect
                XX0.append(X0);XX1.append(X1);
                YY0.append(Y0);YY1.append(Y1);


    #############################################
    try:  m_prof_edge,c_prof_edge=line(XX0[0],YY0[0],XX0[-1],YY0[-1])
    except:  print 'Plotting one profile'

    ###############################################################################
    if flip_profile=='yes':
        transect=np.flipud(transect);
        try:         df0_km=np.max(D)-df0_km;
        except:    print '';


    print '******************************************************'
    try:    gpsFile
    except: gpsFile='Nogps'
    print 'GPS velocity file:'
    print gpsFile
    print '*******************************************************'
    if os.path.isfile(gpsFile):
       insarData=z
       del z
       fileName, fileExtension = os.path.splitext(gpsFile)
     #  print fileExtension
     #  if fileExtension =='.cmm4':
     #      print 'reading cmm4 velocities'
     #      Stations, gpsData = redGPSfile_cmm4(gpsFile)
     #      idxRef=Stations.index(refStation)
     #      Lon,Lat,Ve,Vn,Se,Sn,Corr,Hrate,H12=gpsData[idxRef,:]
     #      Lon=Lon-360.0
          # Lat,Lon,Ve,Se,Vn,Sn,Corr,NumEpochs,timeSpan,AvgEpochTimes = gpsData[idxRef,:]
     #      Vu=0
     #  else:
     #      Stations, gpsData = redGPSfile(gpsFile)
     #      idxRef=Stations.index(refStation)
     #      Lat,Lon,Vn,Ve,Sn,Se,Corr,Vu,Su = gpsData[idxRef,:]

       Stations,Lat,Lon,Ve,Se,Vn,Sn=readGPSfile(gpsFile,gps_source)
       idxRef=Stations.index(refStation)
       Length,Width=np.shape(insarData)
      # lat,lon,lat_step,lon_step = get_lat_lon(h5file,Length,Width)
       lat,lon,lat_step,lon_step,lat_all,lon_all=get_lat_lon(h5file)
       IDYref,IDXref=find_row_column(Lon[idxRef],Lat[idxRef],lon,lat,lon_step,lat_step)
       if (not np.isnan(IDYref)) and (not np.isnan(IDXref)):
         print 'referencing InSAR data to the GPS station at : ' + str(IDYref) + ' , '+ str(IDXref)
         if not np.isnan(insarData[IDYref][IDXref]):
             transect = transect - insarData[IDYref][IDXref]
             insarData=insarData - insarData[IDYref][IDXref]

         else:

             print '''
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

      WARNING: nan value for InSAR data at the refernce pixel!
               reference station should be a pixel with valid value in InSAR data.

               please select another GPS station as the reference station.

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
                   '''
             sys.exit(1)
       else:
         print 'WARNING:'
         print 'Reference GPS station is out of the area covered by InSAR data'
         print 'please select another GPS station as the reference station.'
         sys.exit(1)

       try:
         stationsList
       except:
         stationsList = Stations


      # theta=23.0*np.pi/180.0
       if os.path.isfile(incidence_file):
           print 'Using exact look angle for each pixel'
           h5file_theta=h5py.File(incidence_file,'r')
           dset=h5file_theta['mask'].get('mask')
           theta=dset[0:dset.shape[0],0:dset.shape[1]]
           theta=theta*np.pi/180.0
       else:
           print 'Using average look angle'
           theta=np.ones(np.shape(insarData))*23.0*np.pi/180.0

       heading=193.0*np.pi/180.0

     #  unitVec=[-np.sin(theta)*np.sin(heading),-np.cos(heading)*np.sin(theta),-np.cos(theta)]
       unitVec=[np.cos(heading)*np.sin(theta),-np.sin(theta)*np.sin(heading),0]#-np.cos(theta)]

      #  [0.0806152480932643, 0.34918300221540616, -0.93358042649720174]
       # print unitVec
       # unitVec=[0.3,-0.09,0.9]
      # unitVec=[-0.3,0.09,-0.9]
      # unitVec=[-0.3,0.09,0]

      # print '*******************************************'
      # print 'unit vector to project GPS to InSAR LOS:'
      # print unitVec
      # print '*******************************************'
      # gpsLOS_ref=unitVec[0]*Ve[idxRef]+unitVec[1]*Vn[idxRef]#+unitVec[2]*Vu[idxRef]

#       print np.shape(theta)
#       print IDYref
#       print IDXref
#       print theta[IDYref,IDXref]

       gpsLOS_ref = gps_to_LOS(Ve[idxRef],Vn[idxRef],theta[IDYref,IDXref],heading)
       print '%%%%%%^^^^^^^%%%%%%%%'
       print gpsLOS_ref/1000.0
      # insarData=insarData -gpsLOS_ref/1000.0
      # transect = transect -gpsLOS_ref/1000.0

       GPS=[]
       GPS_station=[]
       GPSx=[]
       GPSy=[]
       GPS_lat=[]
       GPS_lon=[]
       for st in stationsList:
         try :
           idx=Stations.index(st)

          # gpsLOS = unitVec[0]*Ve[idx]+unitVec[1]*Vn[idx]#+unitVec[2]*Vu[idx]

         #  gpsLOS = gps_to_LOS(Ve[idx],Vn[idx],theta[idx],heading)
         #  gpsLOS=gpsLOS-gpsLOS_ref

           IDY,IDX=find_row_column(Lon[idx],Lat[idx],lon,lat,lon_step,lat_step)
           print theta[IDY,IDX]
           gpsLOS = gps_to_LOS(Ve[idx],Vn[idx],theta[IDY,IDX],heading)
         #  gpsLOS = gpsLOS-gpsLOS_ref

           if which_gps =='all':
             if theta[IDY,IDX]!=0.0:
               GPS.append(gpsLOS-gpsLOS_ref)
               GPS_station.append(st)
               GPSx.append(IDX)
               GPSy.append(IDY)
               GPS_lat.append(Lat[idx])
               GPS_lon.append(Lon[idx])
           elif not np.isnan(insarData[IDY][IDX]):
             if theta[IDY,IDX]!=0.0:
               GPS.append(gpsLOS-gpsLOS_ref)
               GPS_station.append(st)
               GPSx.append(IDX)
               GPSy.append(IDY)
               GPS_lat.append(Lat[idx])
               GPS_lon.append(Lon[idx])
         except:
           NoInSAR='yes'

      # print GPS_station
      # print gpsLOS
       DistGPS=[]
       GPS_in_bound=[]
       GPS_in_bound_st=[]
       GPSxx=[]
       GPSyy=[]
       for i in range(len(GPS_station)):
         gx=GPSx[i]
         gy=GPSy[i]
 #        print '******************'
      #   print gx
      #   print gy
         if which_gps in ['all','insar']:
             check_result = 'True'
         else:
             check_result=check_st_in_box(gx,gy,x0,y0,x1,y1,X0,Y0,X1,Y1)

         if check_result=='True':
           check_result2=check_st_in_box2(gx,gy,x0,y0,x1,y1,X0,Y0,X1,Y1)
           GPS_in_bound_st.append(GPS_station[i])
           GPS_in_bound.append(GPS[i])
           GPSxx.append(GPSx[i])
           GPSyy.append(GPSy[i])
          # gy=y0+1
          # gx=x0+1
          # gxp,gyp=get_intersect(m,c,gx,gy)
          # Dx=dx*(gx-gxp);Dy=dy*(gy-gyp)
          # print gxp
          # print gyp
           dg = dist_point_from_line(m,c,gx,gy,1,1) # distance of GPS station from the first profile line
          # DistGPS.append(np.hypot(Dx,Dy))
          # X0=dg/np.sqrt(1+m1**2)+x0
          # Y0=m1*(X0-x0)+y0
          # DistGPS.append(np.hypot(dx*(gx-X0), dy*(gy-Y0)))

           DistGPS.append(dist_point_from_line(m_prof_edge,c_prof_edge,GPSx[i],GPSy[i],dx,dy))


       print '****************************************************'
       print 'GPS stations in the profile area:'
       print GPS_in_bound_st
       print '****************************************************'
       GPS_in_bound = np.array(GPS_in_bound)
       DistGPS = np.array(DistGPS)
   #    axes[1].plot(DistGPS/1000.0, -1*GPS_in_bound/1000, 'bo')

    if gpsFile=='Nogps':

        insarData=z
        GPSxx=[]
        GPSyy=[]
        GPSx=[];GPSy=[]
        GPS=[]
        XX0[0]=x0;XX1[0]=x1;YY0[0]=y0;YY1[0]=y1

   # else:

    print '****************'
    print 'flip up-down'
    print flip_updown

    if flip_updown=='yes' and gpsFile!='Nogps':
       print 'Flipping up-down'
       transect=-1*transect
       GPS_in_bound=-1*GPS_in_bound
    elif flip_updown=='yes':
       print 'Flipping up-down'
       transect=-1*transect


    if flip_profile=='yes' and gpsFile!='Nogps':

       GPS=np.flipud(GPS)
       GPS_in_bound=np.flipud(GPS_in_bound)
       DistGPS=np.flipud(max(D)-DistGPS)


    fig, axes = plt.subplots(nrows=2)
    axes[0].imshow(insarData)
    for i in range(ntrans):
        axes[0].plot([XX0[i], XX1[i]], [YY0[i], YY1[i]], 'r-')

    axes[0].plot(GPSx,GPSy,'b^')
    axes[0].plot(GPSxx,GPSyy,'k^')
    if gpsFile!='Nogps':
        axes[0].plot(IDXref,IDYref,'r^')
    axes[0].axis('image')
    axes[1].plot(D/1000.0,transect,'ko',ms=1)

    avgInSAR=np.array(nanmean(transect,axis=1))
    stdInSAR=np.array(nanstd(transect,axis=1))
  #  print avgInSAR
  #  print stdInSAR

      #std=np.std(transect,1)
   # axes[1].plot(D/1000.0, avgInSAR, 'r-')
    try:
      axes[1].plot(DistGPS/1000.0, -1*GPS_in_bound/1000, 'b^',ms=10)
    except:
      print ''
   # pl.fill_between(x, y-error, y+error,alpha=0.6, facecolor='0.20')
   # print transect
#############################################################################

    fig2, axes2 = plt.subplots(nrows=1)
    axes2.imshow(insarData)
    #for i in range(ntrans):
    axes2.plot([XX0[0], XX1[0]], [YY0[0], YY1[0]], 'k-')
    axes2.plot([XX0[-1], XX1[-1]], [YY0[-1], YY1[-1]], 'k-')
    axes2.plot([XX0[0], XX0[-1]], [YY0[0], YY0[-1]], 'k-')
    axes2.plot([XX1[0], XX1[-1]], [YY1[0], YY1[-1]], 'k-')

    try:
       axes2.plot([Xf0,Xf1],[Yf0,Yf1], 'k-')
    except:
       FaultLine='None'


    axes2.plot(GPSx,GPSy,'b^')
    axes2.plot(GPSxx,GPSyy,'k^')
    if gpsFile!='Nogps':
        axes2.plot(IDXref,IDYref,'r^')
    axes2.axis('image')

    figName = 'transect_area.png'
    print 'writing '+figName
    plt.savefig(figName)

#############################################################################
    fig = plt.figure()
    fig.set_size_inches(10,4)
    ax = plt.Axes(fig, [0., 0., 1., 1.], )
    ax=fig.add_subplot(111)
    if display_InSAR in ['on','On','ON']:
       ax.plot(D/1000.0,transect*1000,'o',ms=1,mfc='Black', linewidth='0')


############################################################################
# save the profile data:
    if save_to_mat in ['ON','on','On','yes','y','YES','Yes']:
       import scipy.io as sio
       matFile='transect.mat'
       dataset={}
       dataset['datavec']=transect
       try:
         dataset['lat']=transect_lat
         dataset['lon']=transect_lon
       except:
         dataset['lat']='Nan'
         dataset['lon']='Nan'
       dataset['Unit']='m'
       dataset['Distance_along_profile']=D
       print '*****************************************'
       print ''
       print 'writing transect to >>> '+matFile
       sio.savemat(matFile, {'dataset': dataset})
       print ''
       print '*****************************************'
############################################################################
 #   ax.plot(D/1000.0, avgInSAR*1000, 'r-')

#    ax.plot(D/1000.0,transect*1000/(np.sin(23.*np.pi/180.)*np.cos(38.*np.pi/180.0)),'o',ms=1,mfc='Black', linewidth='0')
#    ax.plot(D/1000.0, avgInSAR*1000/(np.sin(23.*np.pi/180.)*np.cos(38.*np.pi/180.0)), 'r-')

#############################################################################
    if disp_std in ['on','On','ON']:

       for i in np.arange(0.0,1.01,0.01):
          ax.plot(D/1000.0, (avgInSAR-i*stdInSAR)*1000, '-',color='#DCDCDC',alpha=0.5)#,color='#DCDCDC')#'LightGrey')
       for i in np.arange(0.0,1.01,0.01):
          ax.plot(D/1000.0, (avgInSAR+i*stdInSAR)*1000, '-',color='#DCDCDC',alpha=0.5)#'LightGrey')
#############################################################################
    if display_Average in ['on','On','ON']:
       ax.plot(D/1000.0, avgInSAR*1000, 'r-')
###########
  # ax.fill_between(D/1000.0, (avgInSAR-stdInSAR)*1000, (avgInSAR+stdInSAR)*1000,where=(avgInSAR+stdInSAR)*1000>=(avgInSAR-stdInSAR)*1000,alpha=1, facecolor='Red')

    try:
        ax.plot(DistGPS/1000.0, -1*GPS_in_bound, '^',ms=10,mfc='Cyan')
    except:
        print ''
    ax.set_ylabel('LOS velocity [mm/yr]',fontsize=26)
    ax.set_xlabel('Distance along profile [km]',fontsize=26)


   # print '******************'
   # print 'Dsitance of fault from the beginig of profile(km):'
   # print df0_km/1000.0


    ###################################################################
    #lower and higher bounds for diplaying the profile

    try:
       lbound
       hbound
    except:
       lbound=np.nanmin(transect)*1000
       hbound=np.nanmax(transect)*1000


    ###################################################################
    #To plot the Fault location on the profile
    try:
       ax.plot([df0_km/1000.0,df0_km/1000.0], [lbound,hbound], '--',color='black',linewidth='2')
    except:
       fault_loc='None'

    ###################################################################


    try:
         ax.set_ylim(lbound,hbound)
    except:
         ylim='no'

   # try:
   #      ax.set_xlim(-10,300)
   # except:
    #     xlim='no'


##########
#Temporary To plot DEM
   # try:
#    majorLocator = MultipleLocator(5)
#    ax.yaxis.set_major_locator(majorLocator)
#    minorLocator   = MultipleLocator(1)
#    ax.yaxis.set_minor_locator(minorLocator)

#    plt.tick_params(which='major', length=15,width=2)
#    plt.tick_params(which='minor', length=6,width=2)

#    try:
#       for tick in ax.xaxis.get_major_ticks():
#                tick.label.set_fontsize(26)
#       for tick in ax.yaxis.get_major_ticks():
#                tick.label.set_fontsize(26)
#
#       plt.tick_params(which='major', length=15,width=2)
#       plt.tick_params(which='minor', length=6,width=2)
#    except:
#       print 'couldn not fix the ticks! '


    figName = 'transect.png'
    print 'writing '+figName
    plt.savefig(figName)
    print ''
    print '________________________________'
#############################################################################
    plt.show()
Exemplo n.º 10
0
def timeseries_inversion(ifgramFile='unwrapIfgram.h5',
                         coherenceFile='coherence.h5',
                         inps_dict=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        inps_dict     - dict, including the following options:
                        weight_function
                        min_coherence
                        max_coherence
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    '''
    total = time.time()

    if not inps_dict:
        inps_dict = vars(cmdLineParse())
    weight_func = inps_dict['weight_function']
    min_coh = inps_dict['min_coherence']
    max_coh = inps_dict['max_coherence']

    # Basic Info
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pixel_num = length * width

    h5ifgram = h5py.File(ifgramFile, 'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    if inps_dict['weight_function'] == 'no':
        ifgram_list = ut.check_drop_ifgram(h5ifgram, atr, ifgram_list)
    ifgram_num = len(ifgram_list)

    # Convert ifgram_list to date12/8_list
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((date_num - 1, 1))

    print 'number of interferograms: ' + str(ifgram_num)
    print 'number of acquisitions  : ' + str(date_num)
    print 'number of pixels: ' + str(pixel_num)

    # Reference pixel in space
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
    except:
        print 'ERROR: No ref_x/y found! Can not inverse interferograms without reference in space.'
        print 'run seed_data.py ' + ifgramFile + ' --mark-attribute for a quick referencing.'
        sys.exit(1)

    ##### Read Interferograms
    print 'reading interferograms ...'
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for j in range(ifgram_num):
        ifgram = ifgram_list[j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[:]
        #d[d != 0.] -= d[ref_y, ref_x]
        d -= d[ref_y, ref_x]
        ifgram_data[j] = d.flatten()
        prog_bar.update(j + 1, suffix=date12_list[j])
    h5ifgram.close()
    prog_bar.close()

    #####---------------------- Inversion ----------------------#####
    # Design matrix
    A, B = ut.design_matrix(ifgramFile, date12_list)

    if weight_func == 'no':
        print 'generalized inversion using SVD (Berardino et al., 2002, IEEE-TGRS)'
        print 'inversing time series ...'
        B_inv = np.array(np.linalg.pinv(B), np.float32)
        ts_rate = np.dot(B_inv, ifgram_data)
        ts1 = ts_rate * np.tile(tbase_diff, (1, pixel_num))
        ts0 = np.array([0.] * pixel_num, np.float32)
        ts_data = np.vstack((ts0, np.cumsum(ts1, axis=0)))
        del ts_rate, ts0, ts1

        # Temporal coherence
        print 'calculating temporal coherence (Tizzani et al., 2007, RSE)'
        temp_coh = np.zeros((1, pixel_num), np.float32) + 0j
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for i in range(ifgram_num):
            ifgram_est = np.dot(A[i, :], ts_data[1:, :])
            ifgram_diff = ifgram_data[i, :] - ifgram_est
            temp_coh += np.exp(1j * ifgram_diff)
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        del ifgram_data, ifgram_est, ifgram_diff
        temp_coh = np.array((np.absolute(temp_coh) / ifgram_num).reshape(
            (length, width)),
                            dtype=np.float32)

    else:
        print 'weighted least square (WLS) inversion using coherence pixel by pixel'
        if np.linalg.matrix_rank(A) < date_num - 1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not inverse the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)

        pixel_mask = np.ones(pixel_num, np.bool_)
        print 'reading coherence: ' + os.path.basename(coherenceFile)
        h5coh = h5py.File(coherenceFile, 'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        prog_bar = ptime.progress_bar(maxValue=ifgram_num)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[:].flatten()
            d[np.isnan(d)] = 0.
            pixel_mask[d == 0.] = 0
            coh_data[j] = d
            prog_bar.update(j + 1, suffix=date12_list[j])
        h5coh.close()
        prog_bar.close()

        # Get mask of valid pixels to inverse
        print 'skip pixels with zero coherence in at least one interferogram'
        print 'skip pixels with zero phase     in all          interferograms'
        ifgram_stack = ut.get_file_stack(ifgramFile).flatten()
        pixel_mask[ifgram_stack == 0.] = 0

        pixel_num2inv = np.sum(pixel_mask)
        pixel_idx2inv = np.where(pixel_mask)[0]
        ifgram_data = ifgram_data[:, pixel_mask]
        coh_data = coh_data[:, pixel_mask]
        print 'number of pixels to inverse: %d' % (pixel_num2inv)

        ##### Calculate Weight matrix
        weight = coh_data
        if weight_func.startswith('var'):
            print 'convert coherence to weight using inverse of variance: x**2/(1-x**2) from Hanssen (2001, for 4.2.32)'
            weight[weight > 0.999] = 0.999
            if weight_func == 'variance-max-coherence':
                print 'constrain the max coherence to %f' % max_coh
                weight[weight > max_coh] = max_coh
            weight = np.square(weight)
            weight *= 1. / (1. - weight)
            if weight_func == 'variance-log':
                print 'use log(1/variance)+1 as weight'
                weight = np.log(weight + 1)
        elif weight_func.startswith('lin'):
            print 'use coherence as weight directly (Tong et al., 2016, RSE)'
        elif weight_func.startswith('norm'):
            print 'convert coherence to weight using CDF of normal distribution: N(%f, %f)' % (
                mu, std)
            mu = (min_coh + max_coh) / 2.0
            std = (max_coh - min_coh) / 6.0
            chunk_size = 1000
            chunk_num = int(pixel_num2inv / chunk_size) + 1
            prog_bar = ptime.progress_bar(maxValue=chunk_num)
            for i in range(chunk_num):
                i0 = (i - 1) * chunk_size
                i1 = min([pixel_num2inv, i0 + chunk_size])
                weight[:, i0:i1] = norm.cdf(weight[:, i0:i1], mu, std)
                prog_bar.update(i + 1, every=10)
            prog_bar.close()
            #weight = norm.cdf(weight, mu, std)
        else:
            print 'Un-recognized weight function: %s' % weight_func
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inversing time series ...'
        ts_data = np.zeros((date_num, pixel_num), np.float32)
        temp_coh = np.zeros(pixel_num, np.float32)
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            # Inverse timeseries
            ifgram_pixel = ifgram_data[:, i]
            weight_pixel = weight[:, i]
            W = np.diag(weight_pixel)
            ts = np.linalg.inv(A.T.dot(W).dot(A)).dot(
                A.T).dot(W).dot(ifgram_pixel)
            ts_data[1:, pixel_idx2inv[i]] = ts

            # Calculate weighted temporal coherence
            ifgram_diff = ifgram_pixel - np.dot(A, ts)
            temp_coh_pixel = np.abs(
                np.sum(np.multiply(weight_pixel, np.exp(1j * ifgram_diff)),
                       axis=0)) / np.sum(weight_pixel)
            temp_coh[pixel_idx2inv[i]] = temp_coh_pixel

            prog_bar.update(i + 1, every=2000, suffix=str(i + 1) + ' pixels')
        prog_bar.close()
        del ifgram_data, weight

    #####---------------------- Outputs ----------------------#####
    ## 1.1 Convert time-series phase to displacement
    print 'converting phase to range'
    phase2range = -1 * float(atr['WAVELENGTH']) / (4. * np.pi)
    ts_data *= phase2range

    ## 1.2 Write time-series data matrix
    timeseriesFile = 'timeseries.h5'
    print 'writing >>> ' + timeseriesFile
    print 'number of acquisitions: ' + str(date_num)
    h5timeseries = h5py.File(timeseriesFile, 'w')
    group = h5timeseries.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date8_list[i]
        dset = group.create_dataset(date,
                                    data=ts_data[i].reshape(length, width),
                                    compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    ## 1.3 Write time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(
        ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(
        None, '[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None, '[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None, '[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'
    for key, value in atr.iteritems():
        group.attrs[key] = value
    h5timeseries.close()
    del ts_data

    ## 2. Write Temporal Coherence File
    tempCohFile = 'temporalCoherence.h5'
    print 'writing >>> ' + tempCohFile
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    writefile.write(temp_coh.reshape(length, width), atr, tempCohFile)

    print 'Time series inversion took ' + str(time.time() -
                                              total) + ' secs\nDone.'
    return timeseriesFile, tempCohFile
Exemplo n.º 11
0
def mask_file(File, maskFile, outFile=None, inps_dict=None):
    ''' Mask input File with maskFile
    Inputs:
        File/maskFile - string, 
        inps_dict - dictionary including the following options:
                    subset_x/y - list of 2 ints, subset in x/y direction
                    thr - float, threshold/minValue to generate mask
    Output:
        outFile - string
    '''

    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'masking ' + k + ' file: ' + File + ' ...'

    # Read maskFile
    atrm = readfile.read_attribute(maskFile)
    km = atrm['FILE_TYPE']
    if km not in multi_group_hdf5_file + multi_dataset_hdf5_file:
        print 'reading mask file: ' + maskFile
        mask = readfile.read(maskFile, epoch='mask')[0]
        if inps_dict:
            mask = update_mask(mask, inps_dict)

    if not outFile:
        outFile = os.path.splitext(File)[0] + '_masked' + os.path.splitext(
            File)[1]

    if k in ['timeseries', 'interferograms', 'wrapped', 'coherence']:
        h5file = h5py.File(File, 'r')
        epochList = sorted(h5file[k].keys())

        h5out = h5py.File(outFile, 'w')
        print 'writing >>> ' + outFile

    ##### Multiple Dataset File
    if k == 'timeseries':
        print 'number of acquisitions: ' + str(len(epochList))
        group = h5out.create_group(k)
        for d in epochList:
            print d
            unw = h5file[k].get(d)[:]

            unw = mask_matrix(unw, mask, inps_dict['fill_value'])

            dset = group.create_dataset(d, data=unw, compression='gzip')
        for key, value in atr.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms', 'wrapped', 'coherence']:
        print 'number of interferograms: ' + str(len(epochList))
        gg = h5out.create_group(k)

        # Mask multi group file with multi group coherence file
        if km == 'coherence':
            h5mask = h5py.File(maskFile, 'r')
            cohList = sorted(h5mask[km].keys())
            if len(cohList) != len(epochList):
                sys.exit('ERROR: cohERROR: erence mask file has different\
                number of interferograms than input file!')

        for i in range(len(epochList)):
            igram = epochList[i]
            if km == 'coherence':
                coh = cohList[i]
                sys.stdout.write('\r%s %s %s/%s ...' %
                                 (igram, coh, i + 1, len(epochList)))
                sys.stdout.flush()
            else:
                sys.stdout.write('\r%s %s/%s ...' %
                                 (igram, i + 1, len(epochList)))
                sys.stdout.flush()

            unw = h5file[k][igram].get(igram)[:]

            if km == 'coherence':
                mask = h5mask[km][coh].get(coh)[:]
                if inps_dict:
                    mask = update_mask(mask, inps_dict, print_msg=False)

            unw = mask_matrix(unw, mask, inps_dict['fill_value'])

            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=unw, compression='gzip')
            for key, value in h5file[k][igram].attrs.iteritems():
                group.attrs[key] = value

    ##### Single Dataset File
    elif k in ['.trans', '.utm_to_rdc', '.UTM_TO_RDC']:
        rg, az, atr = readfile.read(File)
        rg = mask_matrix(rg, mask, inps_dict['fill_value'])
        az = mask_matrix(az, mask, inps_dict['fill_value'])
        print 'writing >>> ' + outFile
        writefile.write(rg, az, atr, outFile)

    else:
        unw, atr = readfile.read(File)
        unw = mask_matrix(unw, mask, inps_dict['fill_value'])
        print 'writing >>> ' + outFile
        writefile.write(unw, atr, outFile)

    try:
        h5file.close()
    except:
        pass
    try:
        h5out.close()
    except:
        pass
    try:
        h5mask.close()
    except:
        pass
    return outFile
Exemplo n.º 12
0
    inps = parser.parse_args()
    if (not inps.disp_fig or inps.fig_base) and not inps.save_fig:
        inps.save_fig = True
    if inps.ylim:
        inps.ylim = sorted(inps.ylim)
    return inps


###########################################################################################
if __name__ == '__main__':
    #######Actual code.
    inps = cmdLineParse()

    # Time Series Info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print 'input file is ' + k + ': ' + inps.timeseries_file
    if not k == 'timeseries':
        raise ValueError('Only timeseries file is supported!')

    h5 = h5py.File(inps.timeseries_file, 'r')
    dateList = sorted(h5[k].keys())
    date_num = len(dateList)
    inps.dates, tims = ptime.date_list2vector(dateList)

    # Read exclude dates
    if inps.ex_date_list:
        input_ex_date = list(inps.ex_date_list)
        inps.ex_date_list = []
        if input_ex_date:
Exemplo n.º 13
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)

    if inps.dem_file:
        inps.dem_file = ut.get_file_list([inps.dem_file])[0]
        # Convert DEM to ROIPAC format
        if os.path.splitext(inps.dem_file)[1] in ['.h5']:
            print 'convert DEM file to ROIPAC format'
            dem, atr_dem = readfile.read(inps.dem_file)
            if 'Y_FIRST' in atr_dem.keys():
                atr_dem['FILE_TYPE'] = '.dem'
            else:
                atr_dem['FILE_TYPE'] = '.hgt'
            outname = os.path.splitext(
                inps.dem_file)[0] + '4pyaps' + atr_dem['FILE_TYPE']
            inps.dem_file = writefile.write(dem, atr_dem, outname)

    print '*******************************************************************************'
    print 'Downloading weather model data ...'

    ## Get Grib Source
    if inps.weather_model in ['ECMWF', 'ERA-Interim']:
        inps.grib_source = 'ECMWF'
    elif inps.weather_model == 'ERA':
        inps.grib_source = 'ERA'
    elif inps.weather_model == 'MERRA':
        inps.grib_source = 'MERRA'
    elif inps.weather_model == 'NARR':
        inps.grib_source = 'NARR'
    else:
        raise Reception('Unrecognized weather model: ' + inps.weather_model)
    print 'grib source: ' + inps.grib_source

    ## Grib data directory
    if not inps.weather_dir:
        if inps.timeseries_file:
            inps.weather_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER'
        elif inps.dem_file:
            inps.weather_dir = os.path.dirname(os.path.abspath(
                inps.dem_file)) + '/../WEATHER'
        else:
            inps.weather_dir = os.path.abspath(os.getcwd())
    print 'Store weather data into directory: ' + inps.weather_dir
    grib_dir = inps.weather_dir + '/' + inps.grib_source
    if not os.path.isdir(grib_dir):
        print 'making directory: ' + grib_dir
        os.makedirs(grib_dir)

    ## Get Acquisition time
    if not inps.hour:
        inps.hour = closest_weather_product_time(atr['CENTER_LINE_UTC'],
                                                 inps.grib_source)
    print 'Time of cloest available product: ' + inps.hour

    ## Get grib file list and date list
    inps.grib_file_list = []
    if not inps.date_list_file:
        h5timeseries = h5py.File(inps.timeseries_file, 'r')
        dateList = sorted(h5timeseries['timeseries'].keys())
        h5timeseries.close()
        print 'read date list info from: ' + inps.timeseries_file
    else:
        dateList = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print 'read date list info from: ' + inps.date_list_file

    for d in dateList:
        if inps.grib_source == 'ECMWF':
            grib_file = grib_dir + '/ERA-Int_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'ERA':
            grib_file = grib_dir + '/ERA_' + d + '_' + inps.hour + '.grb'
        elif inps.grib_source == 'MERRA':
            grib_file = grib_dir + '/merra-' + d + '-' + inps.hour + '.hdf'
        elif inps.grib_source == 'NARR':
            grib_file = grib_dir + '/narr-a_221_' + d + '_' + inps.hour + '00_000.grb'
        inps.grib_file_list.append(grib_file)

    ## Get date list to download
    grib_file_existed = ut.get_file_list(inps.grib_file_list)
    if grib_file_existed:
        grib_filesize_mode = ut.mode(
            [os.path.getsize(i) for i in grib_file_existed])
        grib_file_corrupted = [
            i for i in grib_file_existed
            if os.path.getsize(i) != grib_filesize_mode
        ]
        print 'number of grib files existed    : %d' % len(grib_file_existed)
        print 'file size mode: %d' % grib_filesize_mode
        if grib_file_corrupted:
            print '------------------------------------------------------------------------------'
            print 'corrupted grib files detected! Delete them and re-download...'
            print 'number of grib files corrupted  : %d' % len(
                grib_file_corrupted)
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print rmCmd
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print '------------------------------------------------------------------------------'
    grib_file2download = sorted(
        list(set(inps.grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print 'number of grib files to download: %d' % len(date_list2download)
    print '------------------------------------------------------------------------------\n'

    ## Download grib file using PyAPS
    if inps.grib_source == 'ECMWF':
        pa.ECMWFdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'ERA':
        pa.ERAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'MERRA':
        pa.MERRAdload(date_list2download, inps.hour, grib_dir)
    elif inps.grib_source == 'NARR':
        pa.NARRdload(date_list2download, inps.hour, grib_dir)

    if inps.download:
        print 'Download completed, exit as planned.'
        return

    print '*******************************************************************************'
    print 'Calcualting delay for each epoch.'

    ## Get Incidence angle: to map the zenith delay to the slant delay
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            inps.incidence_angle = float(inps.incidence_angle)
            print 'incidence angle: ' + str(inps.incidence_angle)
    else:
        print 'calculating incidence angle ...'
        inps.incidence_angle = ut.incidence_angle(atr)
    inps.incidence_angle = inps.incidence_angle * np.pi / 180.0

    ## Create delay hdf5 file
    tropFile = inps.grib_source + '.h5'
    print 'writing >>> ' + tropFile
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')

    ## Create tropospheric corrected timeseries hdf5 file
    if not inps.out_file:
        ext = os.path.splitext(inps.timeseries_file)[1]
        inps.out_file = os.path.splitext(
            inps.timeseries_file)[0] + '_' + inps.grib_source + '.h5'
    print 'writing >>> ' + inps.out_file
    h5timeseries_tropCor = h5py.File(inps.out_file, 'w')
    group_tropCor = h5timeseries_tropCor.create_group('timeseries')

    ## Calculate phase delay on reference date
    if 'ref_date' in atr.keys():
        ref_idx = dateList.index(atr['ref_date'])
    else:
        ref_idx = 0
    print 'calculating phase delay on reference date: ' + dateList[ref_idx]
    phs_ref = get_delay(inps.grib_file_list[ref_idx], atr, vars(inps))

    ## Loop to calculate phase delay on the other dates
    h5timeseries = h5py.File(inps.timeseries_file, 'r')
    for i in range(len(inps.grib_file_list)):
        # Get phase delay
        grib_file = inps.grib_file_list[i]
        if not i == ref_idx:
            print dateList[i]
            phs = get_delay(grib_file, atr, vars(inps))
        else:
            phs = np.copy(phs_ref)
        # Get relative phase delay in time
        phs -= phs_ref

        # Write dataset
        print 'writing hdf5 file ...'
        data = h5timeseries['timeseries'].get(dateList[i])[:]
        dset = group_tropCor.create_dataset(dateList[i],
                                            data=data - phs,
                                            compression='gzip')
        dset = group_trop.create_dataset(dateList[i],
                                         data=phs,
                                         compression='gzip')

    ## Write Attributes
    for key, value in atr.iteritems():
        group_tropCor.attrs[key] = value
        group_trop.attrs[key] = value

    h5timeseries.close()
    h5timeseries_tropCor.close()
    h5trop.close()

    # Delete temporary DEM file in ROI_PAC format
    if '4pyaps' in inps.dem_file:
        rmCmd = 'rm ' + inps.dem_file + ' ' + inps.dem_file + '.rsc '
        print rmCmd
        os.system(rmCmd)

    print 'Done.'

    return
Exemplo n.º 14
0
def main(argv):
    inps = cmdLineParse()
    suffix = '_demErr'
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + suffix + os.path.splitext(
                inps.timeseries_file)[1]

    # 1. template_file
    if inps.template_file:
        print 'read option from template file: ' + inps.template_file
        inps = read_template2inps(inps.template_file, inps)

    # Read Time Series
    print "loading time series: " + inps.timeseries_file
    atr = readfile.read_attribute(inps.timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5['timeseries'].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)

    # Exclude date info
    #inps.ex_date = ['20070115','20100310']
    if inps.ex_date:
        inps = get_exclude_date(inps, date_list)
        if inps.ex_date:
            inps.ex_flag = np.array([i not in inps.ex_date for i in date_list])

    timeseries = np.zeros((len(date_list), length * width), np.float32)
    prog_bar = ptime.progress_bar(maxValue=date_num, prefix='loading: ')
    for i in range(date_num):
        date = date_list[i]
        d = h5['timeseries'].get(date)[:]
        timeseries[i][:] = d.flatten('F')
        prog_bar.update(i + 1, suffix=date)
    del d
    h5.close()
    prog_bar.close()

    # Perpendicular Baseline
    print 'read perpendicular baseline'
    try:
        inps.pbase = ut.perp_baseline_timeseries(atr, dimension=0)
        if inps.pbase.shape[1] > 1:
            print '\tconsider P_BASELINE variation in azimuth direction'
        else:
            pbase = inps.pbase
    except:
        print '\tCannot find P_BASELINE_TIMESERIES from timeseries file.'
        print '\tTrying to calculate it from interferograms file'
        if inps.ifgram_file:
            inps.pbase = np.array(
                ut.perp_baseline_ifgram2timeseries(
                    inps.ifgram_file)[0]).reshape(date_num, 1)
        else:
            message = 'No interferogram file input!\n'+\
                      'Can not correct for DEM residula without perpendicular base info!'
            raise Exception(message)

    # Temporal Baseline
    print 'read temporal baseline'
    inps.tbase = np.array(ptime.date_list2tbase(date_list)[0]).reshape(
        date_num, 1)

    # Incidence angle (look angle in the paper)
    if inps.incidence_angle:
        if os.path.isfile(inps.incidence_angle):
            print 'reading incidence angle from file: ' + inps.incidence_angle
            inps.incidence_angle = readfile.read(inps.incidence_angle)[0]
        else:
            try:
                inps.incidence_angle = np.array(float(inps.incidence_angle))
                print 'use input incidence angle : ' + str(
                    inps.incidence_angle)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.incidence_angle))
    else:
        print 'calculate incidence angle using attributes of time series file'
        if inps.pbase.shape[1] > 1:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=2)
        else:
            inps.incidence_angle = ut.incidence_angle(atr, dimension=1)
    inps.incidence_angle *= np.pi / 180.0

    # Range distance
    if inps.range_dis:
        if os.path.isfile(inps.range_dis):
            print 'reading range distance from file: ' + inps.range_dis
            inps.range_dis = readfile.read(inps.range_dis)[0]
        else:
            try:
                inps.range_dis = np.array(float(inps.range_dis))
                print 'use input range distance : ' + str(inps.range_dis)
            except:
                raise ValueError('Can not read input incidence angle: ' +
                                 str(inps.range_dis))
    else:
        print 'calculate range distance using attributes from time series file'
        if inps.pbase.shape[1] > 1:
            inps.range_dis = ut.range_distance(atr, dimension=2)
        else:
            inps.range_dis = ut.range_distance(atr, dimension=1)

    # Design matrix - temporal deformation model using tbase
    print '-------------------------------------------------'
    if inps.phase_velocity:
        print 'using phase velocity history'
        A1 = np.ones((date_num - 1, 1))
        A2 = (inps.tbase[1:date_num] + inps.tbase[0:date_num - 1]) / 2.0
        A3 = (inps.tbase[1:date_num]**3 - inps.tbase[0:date_num - 1]**
              3) / np.diff(inps.tbase, axis=0) / 6.0
        #A3 = (inps.tbase[1:date_num]**2 + inps.tbase[1:date_num]*inps.tbase[0:date_num-1] +\
        #      inps.tbase[0:date_num-1]**2) / 6.0
    else:
        print 'using phase history'
        A1 = np.hstack((np.ones((date_num, 1)), inps.tbase))
        A2 = inps.tbase**2 / 2.0
        A3 = inps.tbase**3 / 6.0

    # Polynomial order of model
    print "temporal deformation model's polynomial order = " + str(
        inps.poly_order)
    if inps.poly_order == 1: A_def = A1
    elif inps.poly_order == 2: A_def = np.hstack((A1, A2))
    elif inps.poly_order == 3: A_def = np.hstack((A1, A2, A3))

    # step function
    if inps.step_date:
        print "temporal deformation model's step function step at " + inps.step_date
        step_yy = ptime.yyyymmdd2years(inps.step_date)
        yy_list = ptime.yyyymmdd2years(date_list)
        flag_array = np.array(yy_list) >= step_yy
        A_step = np.zeros((date_num, 1))
        A_step[flag_array] = 1.0
        A_def = np.hstack((A_def, A_step))

    # Heresh's original code for phase history approach
    #A_def = np.hstack((A2,A1,np.ones((date_num,1))))
    print '-------------------------------------------------'

    ##---------------------------------------- Loop for L2-norm inversion  -----------------------------------##
    delta_z_mat = np.zeros([length, width])
    resid_n = np.zeros([A_def.shape[0], length * width])
    constC = np.zeros([length, width])
    #delta_a_mat = np.zeros([length, width])
    if inps.incidence_angle.ndim == 2 and inps.range_dis.ndim == 2:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' pixel by pixel: %d loops in total' % (length*width)
        prog_bar = ptime.progress_bar(maxValue=length * width,
                                      prefix='calculating: ')
        for i in range(length * width):
            row = i % length
            col = i / length
            range_dis = inps.range_dis[row, col]
            inc_angle = inps.incidence_angle[row, col]
            # Consider P_BASELINE variation within one interferogram
            if inps.pbase.shape[1] > 1:
                pbase = inps.pbase[:, row].reshape(date_num, 1)

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i] = ts_dis - np.dot(A, X)

            # Update DEM error / timeseries matrix
            delta_z = X[0]
            delta_z_mat[row, col] = delta_z
            if inps.update_timeseries:
                timeseries[:, i] -= np.dot(A_delta_z, delta_z).flatten()
            prog_bar.update(i + 1, every=length * width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 1 and inps.range_dis.ndim == 1:
        print 'inversing using L2-norm minimization (unweighted least squares)'\
              ' column by column: %d loops in total' % (width)
        prog_bar = ptime.progress_bar(maxValue=width, prefix='calculating: ')
        for i in range(width):
            range_dis = inps.range_dis[i]
            inc_angle = inps.incidence_angle[i]

            # Design matrix - DEM error using pbase, range distance and incidence angle
            A_delta_z = pbase / (range_dis * np.sin(inc_angle))
            if inps.phase_velocity:
                pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
                A_delta_z_v = pbase_v / (range_dis * np.sin(inc_angle))
                A = np.hstack((A_delta_z_v, A_def))
            else:
                A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

            # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
            ts_dis = timeseries[:, i * length:(i + 1) * length]
            if inps.phase_velocity:
                ts_dis = np.diff(ts_dis, axis=0) / np.diff(inps.tbase, axis=0)

            if inps.ex_date:
                X = np.dot(A_inv, ts_dis[inps.ex_flag, :])
            else:
                X = np.dot(A_inv, ts_dis)

            # Residual vector n
            resid_n[:, i * length:(i + 1) * length] = ts_dis - np.dot(A, X)
            constC[:, i] = X[1].reshape((1, length))

            # Update DEM error / timeseries matrix
            delta_z = X[0].reshape((1, length))
            delta_z_mat[:, i] = delta_z
            if inps.update_timeseries:
                timeseries[:, i * length:(i + 1) * length] -= np.dot(
                    A_delta_z, delta_z)
            prog_bar.update(i + 1, every=width / 100)
        prog_bar.close()

    elif inps.incidence_angle.ndim == 0 and inps.range_dis.ndim == 0:
        print 'inversing using L2-norm minimization (unweighted least squares) for the whole area'

        # Design matrix - DEM error using pbase, range distance and incidence angle
        A_delta_z = pbase / (inps.range_dis * np.sin(inps.incidence_angle))
        if inps.phase_velocity:
            pbase_v = np.diff(pbase, axis=0) / np.diff(inps.tbase, axis=0)
            A_delta_z_v = pbase_v / (inps.range_dis *
                                     np.sin(inps.incidence_angle))
            A = np.hstack((A_delta_z_v, A_def))
        else:
            A = np.hstack((A_delta_z, A_def))

            # L-2 norm inversion
            if inps.ex_date:
                A_inv = np.linalg.pinv(A[inps.ex_flag, :])
            else:
                A_inv = np.linalg.pinv(A)

        # Get unknown parameters X = [delta_z, vel, acc, delta_acc, ...]
        if inps.phase_velocity:
            timeseries = np.diff(timeseries, axis=0) / np.diff(inps.tbase,
                                                               axis=0)

        if inps.ex_date:
            X = np.dot(A_inv, timeseries[inps.ex_flag, :])
        else:
            X = np.dot(A_inv, timeseries)

        # Residual vector n
        resid_n = ts_dis - np.dot(A, X)

        # Update DEM error / timeseries matrix
        delta_z_mat = X[0].reshape((1, length * width))
        if inps.update_timeseries:
            timeseries -= np.dot(A_delta_z, delta_z_mat)
        delta_z_mat = np.reshape(delta_z_mat, [length, width], order='F')

    else:
        print 'ERROR: Script only support same dimension for both incidence angle and range distance matrix.'
        print 'dimension of incidence angle: ' + str(inps.incidence_angle.ndim)
        print 'dimension of range distance: ' + str(inps.range_dis.ndim)
        sys.exit(1)

    ##------------------------------------------------ Output  --------------------------------------------##
    # DEM error file
    if 'Y_FIRST' in atr.keys():
        dem_error_file = 'demGeo_error.h5'
    else:
        dem_error_file = 'demRadar_error.h5'
    #if inps.phase_velocity:  suffix = '_pha_poly'+str(inps.poly_order)
    #else:                    suffix = '_vel_poly'+str(inps.poly_order)
    #dem_error_file = os.path.splitext(dem_error_file)[0]+suffix+os.path.splitext(dem_error_file)[1]
    print 'writing >>> ' + dem_error_file
    atr_dem_error = atr.copy()
    atr_dem_error['FILE_TYPE'] = 'dem'
    atr_dem_error['UNIT'] = 'm'
    writefile.write(delta_z_mat, atr_dem_error, dem_error_file)

    ## Phase Constant C = resid_n[0,:]
    #atrC = atr.copy()
    #atrC['FILE_TYPE'] = 'mask'
    #atrC['UNIT'] = 'm'
    #writefile.write(constC, atrC, 'constD.h5')

    ## Corrected DEM file
    #if inps.dem_file:
    #    inps.dem_outfile = os.path.splitext(inps.dem_file)[0]+suffix+os.path.splitext(inps.dem_file)[1]
    #    print '--------------------------------------'
    #    print 'writing >>> '+inps.dem_outfile
    #    dem, atr_dem = readfile.read(inps.dem_file)
    #    writefile.write(dem+delta_z_mat, atr_dem, inps.dem_outfile)

    #outfile = 'delta_acc.h5'
    #print 'writing >>> '+outfile
    #atr_dem_error = atr.copy()
    #atr_dem_error['FILE_TYPE'] = 'velocity'
    #atr_dem_error['UNIT'] = 'm/s'
    #writefile.write(delta_a_mat, atr_dem_error, outfile)
    #print '**************************************'

    # Corrected Time Series
    if inps.update_timeseries:
        print 'writing >>> ' + inps.outfile
        print 'number of dates: ' + str(len(date_list))
        h5out = h5py.File(inps.outfile, 'w')
        group = h5out.create_group('timeseries')
        prog_bar = ptime.progress_bar(maxValue=date_num, prefix='writing: ')
        for i in range(date_num):
            date = date_list[i]
            d = np.reshape(timeseries[i][:], [length, width], order='F')
            dset = group.create_dataset(date, data=d, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        for key, value in atr.iteritems():
            group.attrs[key] = value
        h5out.close()

    outFile = os.path.splitext(inps.outfile)[0] + 'InvResid.h5'
    print 'writing >>> ' + outFile
    print 'number of dates: ' + str(A_def.shape[0])
    h5out = h5py.File(outFile, 'w')
    group = h5out.create_group('timeseries')
    prog_bar = ptime.progress_bar(maxValue=A_def.shape[0], prefix='writing: ')
    for i in range(A_def.shape[0]):
        date = date_list[i]
        d = np.reshape(resid_n[i][:], [length, width], order='F')
        dset = group.create_dataset(date, data=d, compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()
    # Attribute
    for key, value in atr.iteritems():
        group.attrs[key] = value
    if A_def.shape[0] == date_num:
        group.attrs['UNIT'] = 'm'
    else:
        group.attrs['UNIT'] = 'm/yr'
    h5out.close()

    return
Exemplo n.º 15
0
def ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box=None):
    '''
    Inputs:
        ifgramFile    - string, interferograms hdf5 file
        coherenceFile - string, coherence hdf5 file
        box           - 4-tuple, left, upper, right, and lower pixel coordinate of area of interest
        meta          - dict, including the following attributes:

                        #Interferograms
                        length/width - int, file size for each interferogram
                        ifgram_list  - list of string, interferogram dataset name
                        date12_list  - list of string, YYMMDD-YYMMDD
                        ref_value    - np.array in size of (ifgram_num, 1)
                                       reference pixel coordinate in row/column number
                        ref_y/x      - int, reference pixel coordinate in row/column number

                        #Time-series
                        date8_list   - list of string in YYYYMMDD
                        tbase_diff   - np.array in size of (date_num-1, 1), differential temporal baseline

                        #Inversion
                        weight_function   - no, fim, var, coh
    Outputs:
        ts       - 3D np.array in size of (date_num, row_num, col_num)
        temp_coh - 2D np.array in size of (row_num, col_num)
        tsStd    - 3D np.array in size of (date_num, row_num, col_num)
    '''

    ##### Get patch size/index
    if not box:
        box = (0,0,meta['width'],meta['length'])
    c0,r0,c1,r1 = box
    print 'processing %8d/%d lines ...' % (r1, meta['length'])

    ## Initiate output data matrixs
    row_num = r1-r0
    col_num = c1-c0
    pixel_num = row_num * col_num
    date_num = len(meta['date8_list'])
    ts = np.zeros((date_num, pixel_num), np.float32)
    tsStd = np.zeros((date_num, pixel_num), np.float32)
    temp_coh = np.zeros(pixel_num, np.float32)

    ##### Mask for pixels to invert
    mask = np.ones(pixel_num, np.bool_)
    ## 1 - Water Mask
    if meta['water_mask_file']:
        print 'skip pixels on water with mask from file: %s' % (os.path.basename(meta['water_mask_file']))
        try:    waterMask = readfile.read(meta['water_mask_file'], epoch='waterMask')[0][r0:r1,c0:c1].flatten()
        except: waterMask = readfile.read(meta['water_mask_file'], epoch='mask')[0][r0:r1,c0:c1].flatten()
        mask *= np.array(waterMask, np.bool_)

    ## 2 - Mask for Zero Phase in ALL ifgrams
    print 'skip pixels with zero/nan value in all interferograms'
    ifgram_stack = ut.get_file_stack(ifgramFile)[r0:r1,c0:c1].flatten()
    mask *= ~np.isnan(ifgram_stack)
    mask *= ifgram_stack != 0.

    ## Invert pixels on mask 1+2
    pixel_num2inv = np.sum(mask)
    pixel_idx2inv = np.where(mask)[0]
    print 'number of pixels to invert: %s out of %s' % (pixel_num2inv, pixel_num)
    if pixel_num2inv < 1:
        ts = ts.reshape(date_num, row_num, col_num)
        temp_coh = temp_coh.reshape(row_num, col_num)
        tsStd = tsStd.reshape(date_num, row_num, col_num)
        return ts, temp_coh, tsStd

    ##### Read interferograms
    ifgram_num = len(meta['ifgram_list'])
    ifgram_data = np.zeros((ifgram_num, pixel_num), np.float32)
    date12_list = meta['date12_list']

    if meta['skip_zero_phase']:
        print 'skip zero phase value (masked out and filled during phase unwrapping)'
    atr = readfile.read_attribute(ifgramFile)
    h5ifgram = h5py.File(ifgramFile,'r')
    for j in range(ifgram_num):
        ifgram = meta['ifgram_list'][j]
        d = h5ifgram['interferograms'][ifgram].get(ifgram)[r0:r1,c0:c1].flatten()
        if meta['skip_zero_phase']:
            d[d != 0.] -= meta['ref_value'][j]
        else:
            d -= meta['ref_value'][j]
        ifgram_data[j] = d
        sys.stdout.write('\rreading interferograms %s/%s ...' % (j+1, ifgram_num))
        sys.stdout.flush()
    print ' '
    h5ifgram.close()
    #ifgram_data -= meta['ref_value']

    ## 3 - Mask for Non-Zero Phase in ALL ifgrams (share one B in sbas inversion)
    maskAllNet = np.all(ifgram_data, axis=0)
    maskAllNet *= mask
    maskPartNet = mask ^ maskAllNet

    ##### Design matrix
    A,B = ut.design_matrix(ifgramFile, date12_list)
    try:    ref_date = str(np.loadtxt('reference_date.txt', dtype=str))
    except: ref_date = meta['date8_list'][0]
    #print 'calculate decorrelation noise covariance with reference date = %s' % (ref_date)
    refIdx = meta['date8_list'].index(ref_date)
    timeIdx = [i for i in range(date_num)]
    timeIdx.remove(refIdx)
    Astd = ut.design_matrix(ifgramFile, date12_list, referenceDate=ref_date)[0]

    ##### Inversion
    if meta['weight_function'] in ['no','uniform']:
        if np.sum(maskAllNet) > 0:
            print 'inverting pixels with valid phase in all     ifgrams with OLS (%.0f pixels) ...' % (np.sum(maskAllNet))
            ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,maskAllNet], meta['tbase_diff'], skipZeroPhase=False)
            ts[1:,maskAllNet] = ts1
            temp_coh[maskAllNet] = tempCoh1

        if np.sum(maskPartNet) > 0:
            print 'inverting pixels with valid phase in part of ifgrams with SVD ...'
            pixel_num2inv = np.sum(maskPartNet)
            pixel_idx2inv = np.where(maskPartNet)[0]
            prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
            for i in range(pixel_num2inv):
                idx = pixel_idx2inv[i]
                ts1, tempCoh1 = network_inversion_sbas(B, ifgram_data[:,idx], meta['tbase_diff'], meta['skip_zero_phase'])
                ts[1:, idx] = ts1.flatten()
                temp_coh[idx] = tempCoh1
                prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels')
            prog_bar.close()

    else:
        ##### Read coherence
        coh_data = np.zeros((ifgram_num, pixel_num), np.float32)
        h5coh = h5py.File(coherenceFile,'r')
        coh_list = sorted(h5coh['coherence'].keys())
        coh_list = ut.check_drop_ifgram(h5coh)
        for j in range(ifgram_num):
            ifgram = coh_list[j]
            d = h5coh['coherence'][ifgram].get(ifgram)[r0:r1,c0:c1]
            d[np.isnan(d)] = 0.
            coh_data[j] = d.flatten()
            sys.stdout.write('\rreading coherence %s/%s ...' % (j+1, ifgram_num))
            sys.stdout.flush()
        print ' '
        h5coh.close()

        ##### Calculate Weight matrix
        weight = np.array(coh_data, np.float64)
        L = int(atr['ALOOKS']) * int(atr['RLOOKS'])
        epsilon = 1e-4
        if meta['weight_function'].startswith('var'):
            print 'convert coherence to weight using inverse of phase variance'
            print '    with phase PDF for distributed scatterers from Tough et al. (1995)'
            weight = 1.0 / coherence2phase_variance_ds(weight, L, print_msg=True)

        elif meta['weight_function'].startswith(('lin','coh','cor')):
            print 'use coherence as weight directly (Perissin & Wang, 2012; Tong et al., 2016)'
            weight[weight < epsilon] = epsilon

        elif meta['weight_function'].startswith(('fim','fisher')):
            print 'convert coherence to weight using Fisher Information Index (Seymour & Cumming, 1994)'
            weight = coherence2fisher_info_index(weight, L)

        else:
            print 'Un-recognized weight function: %s' % meta['weight_function']
            sys.exit(-1)

        ##### Weighted Inversion pixel by pixel
        print 'inverting time series ...'
        prog_bar = ptime.progress_bar(maxValue=pixel_num2inv)
        for i in range(pixel_num2inv):
            idx = pixel_idx2inv[i]
            ts1, tempCoh1, tsStd1 = network_inversion_wls(A, ifgram_data[:,idx], weight[:,idx], Astd=Astd,\
                                                          skipZeroPhase=meta['skip_zero_phase'])
            ts[1:, idx] = ts1.flatten()
            temp_coh[idx] = tempCoh1
            tsStd[timeIdx, idx] = tsStd1.flatten()
            prog_bar.update(i+1, every=100, suffix=str(i+1)+'/'+str(pixel_num2inv)+' pixels')
        prog_bar.close()

    ts = ts.reshape(date_num, row_num, col_num)
    temp_coh = temp_coh.reshape(row_num, col_num)
    tsStd = tsStd.reshape(date_num, row_num, col_num)


    ##Write to temp hdf5 files for parallel processing
    if meta['parallel']:
        fname = meta['ftemp_base']+str(int(r0/meta['row_step']))+'.h5'
        print 'writing >>> '+fname
        h5temp = h5py.File(fname, 'w')
        group = h5temp.create_group('timeseries')
        dset = group.create_dataset('timeseries', shape=(date_num+1, row_num, col_num), dtype=np.float32)
        dset[0:-1,:,:] = ts
        dset[1,:,:] = temp_coh
        h5temp.close()
        return
    else:
        return ts, temp_coh, tsStd
Exemplo n.º 16
0
def file_operation(fname, operator, operand, fname_out=None):
    '''Mathmathic operation of file'''

    # Basic Info
    atr = readfile.read_attribute(fname)
    k = atr['FILE_TYPE']
    print 'input is ' + k + ' file: ' + fname
    print 'operation: file %s %f' % (operator, operand)

    # default output filename
    if not fname_out:
        if operator in ['+', 'plus', 'add', 'addition']: suffix = 'plus'
        elif operator in ['-', 'minus', 'substract', 'substraction']:
            suffix = 'minus'
        elif operator in ['*', 'times', 'multiply', 'multiplication']:
            suffix = 'multiply'
        elif operator in ['/', 'obelus', 'divide', 'division']:
            suffix = 'divide'
        elif operator in ['^', 'pow', 'power']:
            suffix = 'pow'
        ext = os.path.splitext(fname)[1]
        fname_out = os.path.splitext(fname)[0] + '_' + suffix + str(
            operand) + ext

    ##### Multiple Dataset HDF5 File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(fname, 'r')
        epoch_list = sorted(h5[k].keys())
        epoch_num = len(epoch_list)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

        h5out = h5py.File(fname_out, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + fname_out

        if k == 'timeseries':
            print 'number of acquisitions: ' + str(epoch_num)
            for i in range(epoch_num):
                date = epoch_list[i]
                data = h5[k].get(date)[:]

                data_out = data_operation(data, operator, operand)

                dset = group.create_dataset(date,
                                            data=data_out,
                                            compression='gzip')
                prog_bar.update(i + 1, suffix=date)
            for key, value in atr.iteritems():
                group.attrs[key] = value

        elif k in ['interferograms', 'wrapped', 'coherence']:
            print 'number of interferograms: ' + str(epoch_num)
            date12_list = ptime.list_ifgram2date12(epoch_list)
            for i in range(epoch_num):
                ifgram = epoch_list[i]
                data = h5[k][ifgram].get(ifgram)[:]

                data_out = data_operation(data, operator, operand)

                gg = group.create_group(ifgram)
                dset = gg.create_dataset(ifgram,
                                         data=data_out,
                                         compression='gzip')
                for key, value in h5[k][ifgram].attrs.iteritems():
                    gg.attrs[key] = value
                prog_bar.update(i + 1, suffix=date12_list[i])

        h5.close()
        h5out.close()
        prog_bar.close()

    ##### Duo datasets non-HDF5 File
    elif k in ['.trans']:
        rg, az, atr = readfile.read(fname)
        rg_out = data_operation(rg, operator, operand)
        az_out = data_operation(az, operator, operand)
        print 'writing >>> ' + fname_out
        writefile.write(rg_out, az_out, atr, fname_out)

    ##### Single Dataset File
    else:
        data, atr = readfile.read(fname)
        data_out = data_operation(data, operator, operand)
        print 'writing >>> ' + fname_out
        writefile.write(data_out, atr, fname_out)

    return fname_out
Exemplo n.º 17
0
def ifgram_inversion(ifgramFile='unwrapIfgram.h5', coherenceFile='coherence.h5', meta=None):
    '''Implementation of the SBAS algorithm.
    modified from sbas.py written by scott baker, 2012 

    Inputs:
        ifgramFile    - string, HDF5 file name of the interferograms
        coherenceFile - string, HDF5 file name of the coherence
        meta          - dict, including the following options:
                        weight_function
                        chunk_size - float, max number of data (ifgram_num*row_num*col_num)
                                     to read per loop; to control the memory
    Output:
        timeseriesFile - string, HDF5 file name of the output timeseries
        tempCohFile    - string, HDF5 file name of temporal coherence
    Example:
        meta = dict()
        meta['weight_function'] = 'variance'
        meta['chunk_size'] = 0.5e9
        meta['timeseriesFile'] = 'timeseries_var.h5'
        meta['tempCohFile'] = 'temporalCoherence_var.h5'
        ifgram_inversion('unwrapIfgram.h5', 'coherence.h5', meta)
    '''
    if 'tempCohFile' not in meta.keys():
        meta['tempCohFile'] = 'temporalCoherence.h5'
    meta['timeseriesStdFile'] = 'timeseriesDecorStd.h5'
    total = time.time()

    if not meta:
        meta = vars(cmdLineParse())

    if meta['update_mode'] and not ut.update_file(meta['timeseriesFile'], ifgramFile):
        return meta['timeseriesFile'], meta['tempCohFile']

    ##### Basic Info
    # length/width
    atr = readfile.read_attribute(ifgramFile)
    length = int(atr['FILE_LENGTH'])
    width  = int(atr['WIDTH'])
    meta['length'] = length
    meta['width']  = width

    # ifgram_list
    h5ifgram = h5py.File(ifgramFile,'r')
    ifgram_list = sorted(h5ifgram['interferograms'].keys())
    #if meta['weight_function'] in ['no','uniform']:
    #    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    ifgram_list = ut.check_drop_ifgram(h5ifgram)
    meta['ifgram_list'] = ifgram_list
    ifgram_num = len(ifgram_list)

    # date12_list/date8_list/tbase_diff
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    m_dates = [i.split('-')[0] for i in date12_list]
    s_dates = [i.split('-')[1] for i in date12_list]
    date8_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
    date_num = len(date8_list)
    meta['date8_list'] = date8_list
    meta['date12_list'] = date12_list

    tbase_list = ptime.date_list2tbase(date8_list)[0]
    tbase_diff = np.diff(tbase_list).reshape((-1,1))
    meta['tbase_diff'] = tbase_diff

    print 'number of interferograms: %d' % (ifgram_num)
    print 'number of acquisitions  : %d' % (date_num)
    print 'number of columns: %d' % (width)
    print 'number of lines  : %d' % (length)

    ##### ref_y/x/value
    try:
        ref_x = int(atr['ref_x'])
        ref_y = int(atr['ref_y'])
        print 'reference pixel in y/x: [%d, %d]' % (ref_y, ref_x)
        ref_value = np.zeros((ifgram_num,1), np.float32)
        for j in range(ifgram_num):
            ifgram = ifgram_list[j]
            dset = h5ifgram['interferograms'][ifgram].get(ifgram)
            ref_value[j] = dset[ref_y,ref_x]
        meta['ref_y'] = ref_y
        meta['ref_x'] = ref_x
        meta['ref_value'] = ref_value
    except:
        if meta['skip_ref']:
            meta['ref_value'] = 0.0
            print 'skip checking reference pixel info - This is for SIMULATION ONLY.'
        else:
            print 'ERROR: No ref_x/y found! Can not invert interferograms without reference in space.'
            print 'run seed_data.py '+ifgramFile+' --mark-attribute for a quick referencing.'
            sys.exit(1)
    h5ifgram.close()

    ##### Rank of Design matrix for weighted inversion
    A, B = ut.design_matrix(ifgramFile, date12_list)
    print '-------------------------------------------------------------------------------'
    if meta['weight_function'] in ['no','uniform']:
        print 'generic least square inversion with min-norm phase velocity'
        print '    based on Berardino et al. (2002, IEEE-TGRS)'
        print '    OLS for pixels with fully     connected network'
        print '    SVD for pixels with partially connected network'
        if np.linalg.matrix_rank(A) < date_num-1:
            print 'WARNING: singular design matrix! Inversion result can be biased!'
            print 'continue using its SVD solution on all pixels'
    else:
        print 'weighted least square (WLS) inversion with min-norm phase, pixelwise'
        if np.linalg.matrix_rank(A) < date_num-1:
            print 'ERROR: singular design matrix!'
            print '    Input network of interferograms is not fully connected!'
            print '    Can not invert the weighted least square solution.'
            print 'You could try:'
            print '    1) Add more interferograms to make the network fully connected:'
            print '       a.k.a., no multiple subsets nor network islands'
            print "    2) Use '-w no' option for non-weighted SVD solution."
            sys.exit(-1)
    print '-------------------------------------------------------------------------------'


    ##### Invert time-series phase
    ##Check parallel environment
    if meta['weight_function'] in ['no','uniform']:
        meta['parallel'] = False
    if meta['parallel']:
        num_cores, meta['parallel'], Parallel, delayed = ut.check_parallel(1000, print_msg=False)

    ##Split into chunks to reduce memory usage
    r_step = meta['chunk_size']/ifgram_num/width         #split in lines
    if meta['weight_function'] not in ['no','uniform']:  #more memory usage (coherence) for WLS
        r_step /= 2.0
        if meta['parallel']:
            r_step /= num_cores
    r_step = int(ceil_to_1(r_step))
    meta['row_step'] = r_step
    chunk_num = int((length-1)/r_step)+1

    if chunk_num > 1:
        print 'maximum chunk size: %.1E' % (meta['chunk_size'])
        print 'split %d lines into %d patches for processing' % (length, chunk_num)
        print '    with each patch up to %d lines' % (r_step)
        if meta['parallel']:
            print 'parallel processing using %d cores ...' % (min([num_cores,chunk_num]))

    ##Computing the inversion
    box_list = []
    for i in range(chunk_num):
        r0 = i*r_step
        r1 = min([length, r0+r_step])
        box = (0,r0,width,r1)
        box_list.append(box)
    box_num = len(box_list)

    if not meta['parallel']:
        timeseries = np.zeros((date_num, length, width), np.float32)
        timeseriesStd = np.zeros((date_num, length, width), np.float32)
        tempCoh = np.zeros((length, width), np.float32)
        for i in range(box_num):
            if box_num > 1:
                print '\n------- Processing Patch %d out of %d --------------' % (i+1, box_num)
            box = box_list[i]
            ts, tcoh, tsStd = ifgram_inversion_patch(ifgramFile, coherenceFile, meta, box)
            tempCoh[box[1]:box[3],box[0]:box[2]] = tcoh
            timeseries[:,box[1]:box[3],box[0]:box[2]] = ts
            timeseriesStd[:,box[1]:box[3],box[0]:box[2]] = tsStd

    else:
        ##Temp file list
        meta['ftemp_base'] = 'timeseries_temp_'
        temp_file_list = [meta['ftemp_base']+str(i)+'.h5' for i in range(chunk_num)]

        ##Computation
        Parallel(n_jobs=num_cores)(delayed(ifgram_inversion_patch)\
                                   (ifgramFile, coherenceFile, meta, box) for box in box_list)

        ##Concatenate temp files
        print 'concatenating temporary timeseries files ...'
        timeseries = np.zeros((date_num, length, width), np.float32)
        tempCoh = np.zeros((length, width), np.float32)
        rmCmd = 'rm'
        for i in range(chunk_num):
            fname = temp_file_list[i]
            box = box_list[i]
            print 'reading '+fname
            h5temp = h5py.File(fname, 'r')
            dset = h5temp['timeseries'].get('timeseries')
            timeseries[:,box[1]:box[3],box[0]:box[2]] = dset[0:-1,:,:]
            tempCoh[box[1]:box[3],box[0]:box[2]] = dset[-1,:,:]
            h5temp.close()
            rmCmd += ' '+fname
        print rmCmd
        os.system(rmCmd)

    print 'converting phase to range'
    phase2range = -1*float(atr['WAVELENGTH'])/(4.*np.pi)
    timeseries *= phase2range
    timeseriesStd *= abs(phase2range)

    ##### Calculate time-series attributes
    print 'calculating perpendicular baseline timeseries'
    pbase, pbase_top, pbase_bottom = ut.perp_baseline_ifgram2timeseries(ifgramFile, ifgram_list)
    pbase = str(pbase.tolist()).translate(None,'[],')  # convert np.array into string separated by white space
    pbase_top = str(pbase_top.tolist()).translate(None,'[],')
    pbase_bottom = str(pbase_bottom.tolist()).translate(None,'[],')
    atr['P_BASELINE_TIMESERIES'] = pbase
    atr['P_BASELINE_TOP_TIMESERIES'] = pbase_top
    atr['P_BASELINE_BOTTOM_TIMESERIES'] = pbase_bottom
    atr['ref_date'] = date8_list[0]
    atr['FILE_TYPE'] = 'timeseries'
    atr['UNIT'] = 'm'

    ##### Output
    ## 1. Write time-series file
    meta['timeseriesFile'] = write_timeseries_hdf5_file(timeseries, date8_list, atr,\
                                                        timeseriesFile=meta['timeseriesFile'])
    if not np.all(timeseriesStd == 0.):
        meta['timeseriesStdFile'] = write_timeseries_hdf5_file(timeseriesStd, date8_list, atr,\
                                                               timeseriesFile=meta['timeseriesStdFile'])

    ## 2. Write Temporal Coherence File
    print 'writing >>> '+meta['tempCohFile']
    atr['FILE_TYPE'] = 'temporal_coherence'
    atr['UNIT'] = '1'
    meta['tempCohFile'] = writefile.write(tempCoh, atr, meta['tempCohFile'])

    print 'Time series inversion took ' + str(time.time()-total) +' secs\nDone.'
    return meta['timeseriesFile'], meta['tempCohFile']
Exemplo n.º 18
0
def main(argv):

    ##### Inputs
    try:
        ifgram_file = argv[0]
        timeseries_file = argv[1]
    except:
        usage(); sys.exit(1)
  
    try:    outfile = argv[2]
    except: outfile = 'reconstructed_'+ifgram_file

    atr = readfile.read_attribute(timeseries_file)
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])

    ##### Read time-series file
    print 'loading timeseries ...'
    h5ts = h5py.File(timeseries_file, 'r')
    date_list = sorted(h5ts['timeseries'].keys())
    date_num = len(date_list)
    timeseries = np.zeros((date_num, length*width))

    print 'number of acquisitions: '+str(date_num)
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        d = h5ts['timeseries'].get(date)[:]
        timeseries[i,:] = d.flatten(0)
        prog_bar.update(i+1, suffix=date)
    prog_bar.close()
    h5ts.close()
    del d

    range2phase = -4*np.pi/float(atr['WAVELENGTH'])
    timeseries = range2phase*timeseries

    #####  Estimate interferograms from timeseries
    print 'estimating interferograms from timeseries using design matrix from input interferograms'
    A,B = ut.design_matrix(ifgram_file)
    p = -1*np.ones([A.shape[0],1])
    Ap = np.hstack((p,A))
    estData = np.dot(Ap, timeseries)
    del timeseries

    ##### Write interferograms file
    print 'writing >>> '+outfile
    h5 = h5py.File(ifgram_file,'r')
    ifgram_list = sorted(h5['interferograms'].keys())
    ifgram_num = len(ifgram_list)
    date12_list = ptime.list_ifgram2date12(ifgram_list)
    
    h5out = h5py.File(outfile,'w')
    group = h5out.create_group('interferograms')

    print 'number of interferograms: '+str(ifgram_num)
    prog_bar = ptime.progress_bar(maxValue=ifgram_num)
    for i in range(ifgram_num):
        ifgram = ifgram_list[i]
        data = np.reshape(estData[i,:],(length, width))

        gg = group.create_group(ifgram)
        dset = gg.create_dataset(ifgram, data=data, compression='gzip')
        for key, value in h5['interferograms'][ifgram].attrs.iteritems():
            gg.attrs[key] = value
        prog_bar.update(i+1, suffix=date12_list[i])
    prog_bar.close()
    h5.close()
    h5out.close()
    print 'Done.'
    return outfile
Exemplo n.º 19
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    #print '\n****************** Network Modification ********************'

    if (not inps.reference_file and not inps.template_file and\
        not inps.max_temp_baseline and not inps.max_perp_baseline and\
        not inps.drop_ifg_index and not inps.drop_date and \
        not inps.coherence_file):
        # Display network for manually modification when there is no other modification input.
        print 'No input found to remove interferogram, continue by display the network to select it manually ...'
        inps.disp_network = True

    # Update inps if template is input
    if inps.template_file:
        inps = update_inps_with_template(inps, inps.template_file)

    # Convert index : input to continous index list
    if inps.drop_ifg_index:
        ifg_index = list(inps.drop_ifg_index)
        inps.drop_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.drop_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.drop_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.drop_ifg_index = sorted(inps.drop_ifg_index)
        if max(inps.drop_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.drop_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                print date12

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_file:
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # Calculate spatial average coherence
        if not inps.mask_file:
            mask = readfile.read(inps.mask_file)[0]
            print 'mask coherence with file: ' + inps.mask_file
        else:
            mask = None
        cohTextFile = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.list'
        if os.path.isfile(cohTextFile):
            print 'average coherence in space has been calculated before and store in file: ' + cohTextFile
            print 'read it directly, or delete it and re-run the script to re-calculate the list'
            cohTxt = np.loadtxt(cohTextFile, dtype=str)
            mean_coherence_list = [float(i) for i in cohTxt[:, 1]]
            coh_date12_list = [i for i in cohTxt[:, 0]]
        else:
            print 'calculating average coherence of each interferogram ...'
            mean_coherence_list = ut.spatial_average(inps.coherence_file,
                                                     mask,
                                                     saveList=True)
            coh_date12_list = pnet.get_date12_list(inps.coherence_file)
        print 'date12 with average coherence < ' + str(
            inps.min_coherence) + ': '
        for i in range(len(coh_date12_list)):
            if mean_coherence_list[i] < inps.min_coherence:
                date12 = coh_date12_list[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.igram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 2.4 Update date12_to_rmv from drop_ifg_index
    if inps.drop_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.drop_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from drop_date
    if inps.drop_date:
        inps.drop_date = ptime.yymmdd(inps.drop_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.drop_date)
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.drop_date) or (date2 in inps.drop_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                print date12

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = list(set(date12_to_rmv))
    date12_to_rmv = sorted(date12_to_rmv)
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'list   of interferograms to remove:'
    print date12_to_rmv

    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                outFile = 'Modified_Mask.h5'
                print 'writing >>> ' + outFile
                ut.nonzero_mask(Modified_File, outFile)
            elif k == 'coherence':
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'Modified_average_spatial_coherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)
                Modified_CoherenceFile = Modified_File

        # Plot result
        if inps.plot:
            print '\nplot modified network and save to file.'
            plotCmd = 'plot_network.py ' + Modified_File + ' --coherence ' + Modified_CoherenceFile + ' --nodisplay'
            print plotCmd
            os.system(plotCmd)

        print 'Done.'
        return
    else:
        print 'No interferogram dropped, skip update.'
        return
Exemplo n.º 20
0
def main(argv):
    inps = cmdLineParse()

    ##### 1. Extract the common area of two input files
    # Basic info
    atr1 = readfile.read_attribute(inps.file[0])
    atr2 = readfile.read_attribute(inps.file[1])
    if any('X_FIRST' not in i for i in [atr1, atr2]):
        sys.exit('ERROR: Not all input files are geocoded.')

    k1 = atr1['FILE_TYPE']
    print 'Input 1st file is ' + k1

    # Common AOI in lalo
    west, east, south, north = get_overlap_lalo(atr1, atr2)
    lon_step = float(atr1['X_STEP'])
    lat_step = float(atr1['Y_STEP'])
    width = int(round((east - west) / lon_step))
    length = int(round((south - north) / lat_step))

    # Read data in common AOI: LOS displacement, heading angle, incident angle
    u_los = np.zeros((2, width * length))
    heading = []
    incidence = []
    for i in range(len(inps.file)):
        fname = inps.file[i]
        print '---------------------'
        print 'reading ' + fname
        atr = readfile.read_attribute(fname)

        [x0, x1] = subset.coord_geo2radar([west, east], atr, 'lon')
        [y0, y1] = subset.coord_geo2radar([north, south], atr, 'lat')
        V = readfile.read(fname, (x0, y0, x1, y1))[0]
        u_los[i, :] = V.flatten(0)

        heading_angle = float(atr['HEADING'])
        if heading_angle < 0.:
            heading_angle += 360.
        print 'heading angle: ' + str(heading_angle)
        heading_angle *= np.pi / 180.
        heading.append(heading_angle)

        inc_angle = float(ut.incidence_angle(atr, dimension=0))
        #print 'incidence angle: '+str(inc_angle)
        inc_angle *= np.pi / 180.
        incidence.append(inc_angle)

    ##### 2. Project displacement from LOS to Horizontal and Vertical components
    # math for 3D: cos(theta)*Uz - cos(alpha)*sin(theta)*Ux + sin(alpha)*sin(theta)*Uy = Ulos
    # math for 2D: cos(theta)*Uv - sin(alpha-az)*sin(theta)*Uh = Ulos   #Uh_perp = 0.0
    # This could be easily modified to support multiple view geometry (e.g. two adjcent tracks from asc & desc) to resolve 3D

    # Design matrix
    A = np.zeros((2, 2))
    for i in range(len(inps.file)):
        A[i, 0] = np.cos(incidence[i])
        A[i, 1] = np.sin(incidence[i]) * np.sin(heading[i] - inps.azimuth)

    A_inv = np.linalg.pinv(A)
    u_vh = np.dot(A_inv, u_los)

    u_v = np.reshape(u_vh[0, :], (length, width))
    u_h = np.reshape(u_vh[1, :], (length, width))

    ##### 3. Output
    # Attributes
    atr = atr1.copy()
    atr['WIDTH'] = str(width)
    atr['FILE_LENGTH'] = str(length)
    atr['X_FIRST'] = str(west)
    atr['Y_FIRST'] = str(north)
    atr['X_STEP'] = str(lon_step)
    atr['Y_STEP'] = str(lat_step)

    print '---------------------'
    outname = inps.outfile[0]
    print 'writing   vertical component to file: ' + outname
    writefile.write(u_v, atr, outname)

    outname = inps.outfile[1]
    print 'writing horizontal component to file: ' + outname
    writefile.write(u_h, atr, outname)

    print 'Done.'
    return
Exemplo n.º 21
0
def seed_file_reference_value(File, outName, refList, ref_y='', ref_x=''):
    ## Seed Input File with reference value in refList
    print 'Reference value: '
    print refList

    #####  IO Info
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'file type: '+k

    ##### Multiple Dataset File
    if k in ['timeseries','interferograms','wrapped','coherence']:
        ##### Input File Info
        h5file = h5py.File(File,'r')
        epochList = sorted(h5file[k].keys())
        epochNum  = len(epochList)

        ##### Check Epoch Number
        if not epochNum == len(refList):
            print '\nERROR: Reference value has different epoch number'+\
                  'from input file.'
            print 'Reference List epoch number: '+str(refList)
            print 'Input file     epoch number: '+str(epochNum)
            sys.exit(1)

        ##### Output File Info
        h5out = h5py.File(outName,'w')
        group = h5out.create_group(k)
        print 'writing >>> '+outName
        prog_bar = ptime.progress_bar(maxValue=epochNum, prefix='seeding: ')

    ## Loop
    if k == 'timeseries':
        print 'number of acquisitions: '+str(epochNum)
        for i in range(epochNum):
            epoch = epochList[i]
            data = h5file[k].get(epoch)[:]
            data -= refList[i]
            dset = group.create_dataset(epoch, data=data, compression='gzip')
            prog_bar.update(i+1, suffix=epoch)
        atr  = seed_attributes(atr,ref_x,ref_y)
        for key,value in atr.iteritems():
            group.attrs[key] = value

    elif k in ['interferograms','wrapped','coherence']:
        print 'number of interferograms: '+str(epochNum)
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epochNum):
            epoch = epochList[i]
            #print epoch
            data = h5file[k][epoch].get(epoch)[:]
            atr  = h5file[k][epoch].attrs

            if k == 'interferograms':
                data[data != 0.] -= refList[i]
            else:
                data -= refList[i]
            atr  = seed_attributes(atr,ref_x,ref_y)

            gg = group.create_group(epoch)
            dset = gg.create_dataset(epoch, data=data, compression='gzip')
            for key, value in atr.iteritems():
                gg.attrs[key] = value

            prog_bar.update(i+1, suffix=date12_list[i])
  
    ##### Single Dataset File
    else:
        print 'writing >>> '+outName
        data,atr = readfile.read(File)
        data -= refList
        atr  = seed_attributes(atr,ref_x,ref_y)
        writefile.write(data,atr,outName)
  
    ##### End & Cleaning
    try:
        prog_bar.close()
        h5file.close()
        h5out.close()
    except:
        pass

    return outName
Exemplo n.º 22
0
def diff_file(file1, file2, outName=None, force=False):
    '''Subtraction/difference of two input files'''
    if not outName:
        outName = os.path.splitext(file1)[0]+'_diff_'+os.path.splitext(os.path.basename(file2))[0]+\
                  os.path.splitext(file1)[1]

    print file1 + ' - ' + file2
    # Read basic info
    atr = readfile.read_attribute(file1)
    print 'Input first file is ' + atr['PROCESSOR'] + ' ' + atr['FILE_TYPE']
    k = atr['FILE_TYPE']

    # Multi-dataset/group file
    if k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        # Check input files type for multi_dataset/group files
        atr2 = readfile.read_attribute(file2)
        k2 = atr2['FILE_TYPE']

        h5_1 = h5py.File(file1)
        h5_2 = h5py.File(file2)
        epochList = sorted(h5_1[k].keys())
        epochList2 = sorted(h5_2[k2].keys())
        if not all(i in epochList2 for i in epochList):
            print 'ERROR: ' + file2 + ' does not contain all group of ' + file1
            if force and k in ['timeseries']:
                print 'Continue and enforce the differencing for their shared dates only!'
            else:
                sys.exit(1)

        h5out = h5py.File(outName, 'w')
        group = h5out.create_group(k)
        print 'writing >>> ' + outName

        epoch_num = len(epochList)
        prog_bar = ptime.progress_bar(maxValue=epoch_num)

    if k in ['timeseries']:
        print 'number of acquisitions: ' + str(len(epochList))
        # check reference date
        if atr['ref_date'] == atr2['ref_date']:
            ref_date = None
        else:
            ref_date = atr['ref_date']
            data2_ref = h5_2[k2].get(ref_date)[:]
            print 'consider different reference date'
        # check reference pixel
        ref_y = int(atr['ref_y'])
        ref_x = int(atr['ref_x'])
        if ref_y == int(atr2['ref_y']) and ref_x == int(atr2['ref_x']):
            ref_y = None
            ref_x = None
        else:
            print 'consider different reference pixel'

        # calculate difference in loop
        for i in range(epoch_num):
            date = epochList[i]
            data1 = h5_1[k].get(date)[:]
            if date in epochList2:
                data2 = h5_2[k2].get(date)[:]
                if ref_date:
                    data2 -= data2_ref
                if ref_x and ref_y:
                    data2 -= data2[ref_y, ref_x]
                data = diff_data(data1, data2)
            elif force:
                data = data1
            else:
                sys.exit('dataset %s is not found in file %' % (date, file2))

            dset = group.create_dataset(date, data=data, compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        for key, value in atr.iteritems():
            group.attrs[key] = value

        prog_bar.close()
        h5out.close()
        h5_1.close()
        h5_2.close()

    elif k in ['interferograms', 'coherence', 'wrapped']:
        print 'number of interferograms: ' + str(len(epochList))
        date12_list = ptime.list_ifgram2date12(epochList)
        for i in range(epoch_num):
            epoch1 = epochList[i]
            epoch2 = epochList2[i]
            data1 = h5_1[k][epoch1].get(epoch1)[:]
            data2 = h5_2[k2][epoch2].get(epoch2)[:]
            data = diff_data(data1, data2)
            gg = group.create_group(epoch1)
            dset = gg.create_dataset(epoch1, data=data, compression='gzip')
            for key, value in h5_1[k][epoch1].attrs.iteritems():
                gg.attrs[key] = value
            prog_bar.update(i + 1, suffix=date12_list[i])

        prog_bar.close()
        h5out.close()
        h5_1.close()
        h5_2.close()

    # Sing dataset file
    else:
        data1, atr1 = readfile.read(file1)
        data2, atr2 = readfile.read(file2)
        data = diff_data(data1, data2)
        print 'writing >>> ' + outName
        writefile.write(data, atr1, outName)

    return outName
Exemplo n.º 23
0
def correct_lod_file(File, outFile=None):
    # Check Sensor Type
    print 'input file: ' + File
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    platform = atr['PLATFORM']
    print 'platform: ' + platform
    if not platform.lower() in ['env', 'envisat']:
        print 'No need to correct LOD for ' + platform
        sys.exit(1)

    # Output Filename
    if not outFile:
        ext = os.path.splitext(File)[1]
        outFile = os.path.splitext(File)[0] + '_LODcor' + ext

    # Get LOD phase ramp from empirical model
    width = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    range_resolution = float(atr['RANGE_PIXEL_SIZE'])

    r = np.linspace(0, width - 1, width)
    R = range_resolution * r * (3.87e-7)
    Ramp = np.tile(R, [length, 1])

    yref = int(atr['ref_y'])
    xref = int(atr['ref_x'])
    Ramp -= Ramp[yref][xref]

    # Correct LOD Ramp for Input File
    if k in multi_group_hdf5_file + multi_dataset_hdf5_file:
        h5 = h5py.File(File, 'r')
        epochList = sorted(h5[k].keys())

        h5out = h5py.File(outFile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'wrapped']:
            print 'number of interferograms: ' + str(len(epochList))
            wvl = float(atr['WAVELENGTH'])
            Ramp *= -4 * np.pi / wvl
            for epoch in epochList:
                print epoch
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                dates = ptime.yyyymmdd(atr['DATE12'].split('-'))
                dates = ptime.yyyymmdd2years(dates)
                dt = date[1] - date[0]
                data -= Ramp * dt

                gg = group.create_group(epoch)
                dset = gg.create_dataset(epoch, data=data, compression='gzip')
                for key, value in atr.iteritems():
                    gg.attrs[key] = value

        elif k == 'timeseries':
            print 'number of acquisitions: ' + str(len(epochList))
            tbase = [
                float(dy) / 365.25
                for dy in ptime.date_list2tbase(epochList)[0]
            ]
            for i in range(len(epochList)):
                epoch = epochList[i]
                print epoch
                data = h5[k].get(epoch)[:]

                data -= Ramp * tbase[i]

                dset = group.create_dataset(epoch,
                                            data=data,
                                            compression='gzip')
            for key, value in atr.iteritems():
                group.attrs[key] = value
        else:
            print 'No need to correct for LOD for ' + k + ' file'
            sys.exit(1)

        h5.close()
        h5out.close()

    else:
        data, atr = readfile.read(File)
        data -= Ramp
        writefile.write(data, atr, outFile)

    return outFile
Exemplo n.º 24
0
def geocode_file_roipac(infile, geomap_file, outfile=None):
    '''Geocode one file'''
    # Input file info
    atr = readfile.read_attribute(infile)
    k = atr['FILE_TYPE']
    print 'geocoding ' + k + ' file: ' + infile + ' ...'

    # roipac outfile name info - intermediate product
    ext = os.path.splitext(infile)[1]
    infile_base = os.path.basename(infile).split(ext)[0]
    if k == 'coherence': roipac_ext = '.cor'
    elif k == 'wrapped': roipac_ext = '.int'
    else: roipac_ext = '.unw'

    # temporary geomap file - needed for parallel processing
    geomap_file_orig = geomap_file
    geomap_file = geomap_file_orig.split(
        '.trans')[0] + '4' + infile_base + '.trans'
    cpCmd = 'cp ' + geomap_file_orig + ' ' + geomap_file
    os.system(cpCmd)
    print cpCmd
    cpCmd = 'cp ' + geomap_file_orig + '.rsc ' + geomap_file + '.rsc'
    os.system(cpCmd)
    print cpCmd

    # Output file name
    if not outfile:
        outfile = 'geo_' + infile
    print 'writing >>> ' + outfile

    # Multi-dataset file
    if k in ['timeseries', 'interferograms', 'coherence', 'wrapped']:
        h5 = h5py.File(infile, 'r')
        epochList = sorted(h5[k].keys())
        print 'number of epochs: ' + str(len(epochList))

        h5out = h5py.File(outfile, 'w')
        group = h5out.create_group(k)

        if k in ['interferograms', 'coherence', 'wrapped']:
            for epoch in epochList:
                print epoch
                data = h5[k][epoch].get(epoch)[:]
                atr = h5[k][epoch].attrs

                roipac_outname = infile_base + '_' + epoch + roipac_ext
                geo_amp, geo_data, geo_rsc = geocode_data_roipac(
                    data, geomap_file, roipac_outname)
                geo_atr = geocode_attribute(atr, geo_rsc)

                gg = group.create_group('geo_' + epoch)
                dset = gg.create_dataset('geo_' + epoch,
                                         data=geo_data,
                                         compression='gzip')
                for key, value in geo_atr.iteritems():
                    gg.attrs[key] = value

        elif k in ['timeseries']:
            for epoch in epochList:
                print epoch
                data = h5[k].get(epoch)[:]

                roipac_outname = infile_base + '_' + epoch + roipac_ext
                geo_amp, geo_data, geo_rsc = geocode_data_roipac(
                    data, geomap_file, roipac_outname)

                dset = group.create_dataset(epoch,
                                            data=geo_data,
                                            compression='gzip')
            geo_atr = geocode_attribute(atr, geo_rsc)
            for key, value in geo_atr.iteritems():
                group.attrs[key] = value

        h5.close()
        h5out.close()

    # Single-dataset file
    else:
        data, atr = readfile.read(infile)
        roipac_outname = infile_base + roipac_ext

        geo_amp, geo_data, geo_rsc = geocode_data_roipac(
            data, geomap_file, roipac_outname)
        geo_atr = geocode_attribute(atr, geo_rsc)

        writefile.write(geo_data, geo_atr, outfile)

    # delete temporary geomap file
    rmCmd = 'rm ' + geomap_file
    os.system(rmCmd)
    print rmCmd
    rmCmd = 'rm ' + geomap_file + '.rsc'
    os.system(rmCmd)
    print rmCmd

    return outfile
Exemplo n.º 25
0
def load_file(fileList, inps_dict=dict(), outfile=None, file_type=None):
    '''Load input file(s) into one HDF5 file 
    It supports ROI_PAC files only for now.
    Inputs:
        fileList  - string / list of string, path of files to load
        inps_dict - dict, including the following attributes
                    PROJECT_NAME   : KujuAlosAT422F650  (extra attribute dictionary to add to output file)
                    sensor         : (optional)
                    timeseries_dir : directory of time series analysis, e.g. KujuAlosAT422F650/PYSAR
                    insar_processor: InSAR processor, roipac, isce, gamma, doris
        outfile   - string, output file name
        file_type - string, group name for output HDF5 file, interferograms, coherence, dem, etc.
    Output:
        outfile - string, output file name
    Example:
        unwrapIfgram.h5 = load_file('filt*.unw', inps_dict=vars(inps))
    '''
    # Get project_name from input template file
    if not 'project_name' in inps_dict.keys(
    ) and 'template_file' in inps_dict.keys():
        template_filename_list = [
            os.path.basename(i) for i in inps_dict['template_file']
        ]
        try:
            template_filename_list.remove('pysarApp_template.txt')
        except:
            pass
        if template_filename_list:
            inps_dict['project_name'] = os.path.splitext(
                template_filename_list[0])[0]

    #Sensor
    inps_dict['PLATFORM'] = project_name2sensor(inps_dict['project_name'])

    # Input file(s) info
    fileList = ut.get_file_list(fileList, abspath=True)
    if not fileList:
        return None

    ##### Prepare attributes file
    processor = inps_dict['insar_processor']
    print '--------------------------------------------'
    print 'preparing attributes files using prep_%s.py ...' % processor
    # prepare multiple files input for cmd calling
    files_input = ''
    for x in fileList:
        files_input += x + ' '
    # call prepare_*.py
    if processor == 'gamma':
        prepCmd = 'prep_gamma.py ' + files_input
        os.system(prepCmd)
    elif processor == 'roipac':
        prepCmd = 'prep_roipac.py ' + files_input
        os.system(prepCmd)
    elif processor == 'isce':
        prepCmd = 'prep_isce.py ' + files_input
        #os.system(prepCmd)
    else:
        print 'Un-supported InSAR processor: ' + processor
        print 'Skip preparing attributes files'

    print '----------------------------'
    print 'loading files ...'
    atr = readfile.read_attribute(fileList[0])
    k = atr['FILE_TYPE']
    print 'Input file(s) is ' + atr['PROCESSOR'] + ' ' + k

    # Get output file type
    if not file_type:
        if k in ['.unw']: file_type = 'interferograms'
        elif k in ['.cor']: file_type = 'coherence'
        elif k in ['.int']: file_type = 'wrapped'
        elif k in ['.byt']: file_type = 'snaphu_connect_component'
        elif k in ['.msk']: file_type = 'mask'
        elif k in ['.hgt', '.dem', 'dem', '.hgt_sim']:
            file_type = 'dem'
        elif k in ['.trans', '.utm_to_rdc', 'geometry']:
            file_type = 'geometry'
        else:
            file_type = k

    # Get output file name
    if not outfile:
        # output file basename
        if file_type == 'interferograms': outfile = 'unwrapIfgram.h5'
        elif file_type == 'coherence': outfile = 'coherence.h5'
        elif file_type == 'wrapped': outfile = 'wrapIfgram.h5'
        elif file_type == 'snaphu_connect_component':
            outfile = 'snaphuConnectComponent.h5'
        elif file_type == 'mask':
            outfile = 'mask.h5'
        elif file_type == 'dem':
            if 'Y_FIRST' in atr.keys():
                outfile = 'demGeo.h5'
            else:
                outfile = 'demRadar.h5'

        # output directory
        if 'timeseries_dir' in inps_dict.keys(
        ) and inps_dict['timeseries_dir']:
            outdir = inps_dict['timeseries_dir']
        else:
            outdir = os.path.abspath(os.getcwd())
        if outfile:
            outfile = outdir + '/' + outfile
    if outfile:
        outfile = os.path.abspath(outfile)

    # Convert
    if file_type in multi_group_hdf5_file:
        outfile = load_multi_group_hdf5(file_type,
                                        fileList,
                                        outfile=outfile,
                                        exDict=inps_dict)[0]

    elif file_type in single_dataset_hdf5_file:
        outfile = load_single_dataset_hdf5(file_type,
                                           fileList[-1],
                                           outfile=outfile,
                                           exDict=inps_dict)

    elif file_type in ['geometry', '.trans', '.utm_to_rdc', '.UTM_TO_RDC']:
        outfile = load_geometry_hdf5(file_type,
                                     fileList,
                                     outfile=outfile,
                                     exDict=inps_dict)
    else:
        warnings.warn('Un-supported file type: ' + file_type)

    return outfile
Exemplo n.º 26
0
def main(argv):
    try:
        File = argv[0]
    except:
        usage();sys.exit(1)

    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    print 'input is '+k+' file: '+File

    try:    matFile = argv[1]
    except: matFile = os.path.splitext(File)[0]+'.mat'
    print 'writing >>> '+matFile

    #####
    h5file = h5py.File(File,'r')
    if k in single_dataset_hdf5_file:
        data = h5file[k].get(k)[:]
    
        V = {}
        V['time_range']=''
        try:
            V['x_first'] = float(atr['X_FIRST'])
            V['y_first'] = float(atr['Y_FIRST'])
            V['x_step'] = float(atr['X_STEP'])
            V['y_step'] = float(atr['Y_STEP'])
            V['x_unit'] = atr['X_UNIT']
            V['y_unit'] = atr['Y_UNIT']
        except:
            V['x_first']=1
            V['y_first']=1
            V['x_step']=1
            V['y_step']=1
            V['x_unit']=''
            V['y_unit']=''
  
        try:  V['wavelength']=float(atr['WAVELENGTH'])
        except:  print 'WAVELENGTH was not found'
        try:  V['sat_height']=float(atr['HEIGHT'])
        except:  print 'HEIGHT was not found'
    
        try:  V['near_range']=float(atr['STARTING_RANGE'])
        except:  print 'STARTING_RANGE was not found'
    
        V['far_range']=''
    
        try:  V['near_LookAng']=float(atr['LOOK_REF1'])
        except:  print 'LOOK_REF1 was not found'
        try:  V['far_LookAng']=float(atr['LOOK_REF2'])
        except:  print 'LOOK_REF2 was not found'
       
        V['earth_radius']=''
        V['Unit']='m/yr'
        V['bperptop']=''
        V['bperpbot']=''
        V['sat']=''
        try:  V['width']=int(atr['WIDTH'])
        except:  print 'WIDTH was not found'
    
        try:  V['file_length']=int(atr['FILE_LENGTH'])
        except:  print 'FILE_LENGTH was not found'
        V['t']=''
        V['date']=''
        V['date_years']=''
        try:     V['sat']   = atr['satellite']
        except:  V['sat']   = ''

        ########################################################
        V['data']=data
        sio.savemat(matFile, {k: V})


    elif 'timeseries' in k:
        epochList = sorted(h5file['timeseries'].keys())
        data_dict={}
        for epoch in epochList:
            print epoch
            d = h5file['timeseries'].get(epoch)
            ts={}
            ts['data'] = d[0:d.shape[0],0:d.shape[1]] 
            try:
                ts['x_first']=float(atr['X_FIRST'])
                ts['y_first']=float(atr['Y_FIRST'])
                ts['x_step']=float(atr['X_STEP'])
                ts['y_step']=float(atr['Y_STEP'])
                ts['x_unit']=atr['X_UNIT']
                ts['y_unit']=atr['Y_UNIT']
            except:
                ts['x_first']=1
                ts['y_first']=1
                ts['x_step']=1
                ts['y_step']=1
                ts['x_unit']=''
                ts['y_unit']=''        
    
            ts['wavelength']=float(atr['WAVELENGTH'])
            ts['sat_height']=float(atr['HEIGHT'])
            ts['near_range']=float(atr['STARTING_RANGE'])
            ts['far_range']=float(atr['STARTING_RANGE1'])
            ts['near_LookAng']=float(atr['LOOK_REF1'])
            ts['far_LookAng']=float(atr['LOOK_REF2'])
            ts['earth_radius']=float(atr['EARTH_RADIUS'])
            ts['Unit']='m'
            ts['bperptop']=float(atr['P_BASELINE_TOP_HDR'])
            ts['bperpbot']=float(atr['P_BASELINE_BOTTOM_HDR'])
            ts['sat']=atr['PLATFORM']
            ts['width']=int(atr['WIDTH'])
            ts['file_length']=int(atr['FILE_LENGTH'])
            ts['t']=np.round((yyyymmdd2years(epoch)-yyyymmdd2years(epochList[0]))*365)
            ts['date']=epoch
            ts['date_years']=yyyymmdd2years(epoch)
              
            data_dict['t'+str(epoch)]=ts  #

        data_dict['Number_of_epochs']=len(epochList)
        data_dict['epoch_dates']=epochList
        sio.savemat(matFile, {k: data_dict})

    h5file.close()
    return
Exemplo n.º 27
0
def main(argv):
    ##### Read Inputs
    inps = cmdLineParse()
    inps.file = ut.get_file_list(inps.file)
    date12_orig = pnet.get_date12_list(inps.file[0])
    print 'input file(s) to be modified: ' + str(inps.file)
    print 'number of interferograms: ' + str(len(date12_orig))
    atr = readfile.read_attribute(inps.file[0])

    # Update inps if template is input
    if inps.template_file:
        inps = read_template2inps(inps.template_file, inps)

    if inps.reset:
        print '----------------------------------------------------------------------------'
        for file in inps.file:
            reset_pairs(file)

        mean_coh_txt_file = os.path.splitext(
            inps.coherence_file)[0] + '_spatialAverage.txt'
        if os.path.isfile(mean_coh_txt_file):
            rmCmd = 'rm ' + mean_coh_txt_file
            print rmCmd
            os.system(rmCmd)

        return

    if all(not i for i in [inps.reference_file, inps.max_temp_baseline, inps.max_perp_baseline,\
                           inps.exclude_ifg_index, inps.exclude_date, inps.coherence_based, inps.start_date, inps.end_date]):
        # Display network for manually modification when there is no other modification input.
        print 'No input option found to remove interferogram'
        print 'To manually modify network, please use --manual option '
        return

    # Convert index : input to continous index list
    if inps.exclude_ifg_index:
        ifg_index = list(inps.exclude_ifg_index)
        inps.exclude_ifg_index = []
        for index in ifg_index:
            index_temp = [int(i) for i in index.split(':')]
            index_temp.sort()
            if len(index_temp) == 2:
                for j in range(index_temp[0], index_temp[1] + 1):
                    inps.exclude_ifg_index.append(j)
            elif len(index_temp) == 1:
                inps.exclude_ifg_index.append(int(index))
            else:
                print 'Unrecoganized input: ' + index
        inps.exclude_ifg_index = sorted(inps.exclude_ifg_index)
        if max(inps.exclude_ifg_index) > len(date12_orig):
            raise Exception('Input index out of range!\n'+\
                            'input index:'+str(inps.exclude_ifg_index)+'\n'+\
                            'index range of file: '+str(len(date12_orig)))

    ##### Get date12_to_rmv
    date12_to_rmv = []

    # 1. Update date12_to_rmv from reference file
    if inps.reference_file:
        date12_to_keep = pnet.get_date12_list(inps.reference_file,
                                              check_drop_ifgram=True)
        print '----------------------------------------------------------------------------'
        print 'use reference pairs info from file: ' + inps.reference_file
        print 'number of interferograms in reference: ' + str(
            len(date12_to_keep))
        print 'date12 not in reference file:'
        date12_to_rmv_temp = []
        for date12 in date12_orig:
            if date12 not in date12_to_keep:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.1 Update date12_to_rmv from coherence file
    if inps.coherence_based and os.path.isfile(inps.coherence_file):
        print '----------------------------------------------------------------------------'
        print 'use coherence-based network modification from coherence file: ' + inps.coherence_file
        # check mask AOI in lalo
        if inps.aoi_geo_box and inps.trans_file:
            print 'input AOI in (lon0, lat1, lon1, lat0): ' + str(
                inps.aoi_geo_box)
            inps.aoi_pix_box = subset.bbox_geo2radar(inps.aoi_geo_box, atr,
                                                     inps.trans_file)
        if inps.aoi_pix_box:
            # check mask AOI within the data coverage
            inps.aoi_pix_box = subset.check_box_within_data_coverage(
                inps.aoi_pix_box, atr)
            print 'input AOI in (x0,y0,x1,y1): ' + str(inps.aoi_pix_box)

        # Calculate spatial average coherence
        coh_list, coh_date12_list = ut.spatial_average(inps.coherence_file, inps.mask_file,\
                                                           inps.aoi_pix_box, saveList=True)

        # MST network
        if inps.keep_mst:
            print 'Get minimum spanning tree (MST) of interferograms with inverse of coherence.'
            print 'date12 with 1) average coherence < ' + str(
                inps.min_coherence) + ' AND 2) not in MST network: '
            mst_date12_list = pnet.threshold_coherence_based_mst(
                coh_date12_list, coh_list)
        else:
            print 'date12 with average coherence < ' + str(inps.min_coherence)
            mst_date12_list = []

        date12_to_rmv_temp = []
        for i in range(len(coh_date12_list)):
            date12 = coh_date12_list[i]
            if coh_list[
                    i] < inps.min_coherence and date12 not in mst_date12_list:
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.2 Update date12_to_rmv from perp baseline threshold
    if inps.max_perp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with perpendicular spatial baseline > ' + str(
            inps.max_perp_baseline) + ' meters'
        ifg_bperp_list = pnet.igram_perp_baseline_list(inps.file[0])
        date12_to_rmv_temp = []
        for i in range(len(ifg_bperp_list)):
            if ifg_bperp_list[i] > inps.max_perp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.3 Update date12_to_rmv from temp baseline threshold
    if inps.max_temp_baseline:
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with temporal baseline > ' + str(
            inps.max_temp_baseline) + ' days'
        date8_list = ptime.ifgram_date_list(inps.file[0])
        date6_list = ptime.yymmdd(date8_list)
        tbase_list = ptime.date_list2tbase(date8_list)[0]
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            idx1 = date6_list.index(date1)
            idx2 = date6_list.index(date2)
            t_diff = tbase_list[idx2] - tbase_list[idx1]
            if t_diff > inps.max_temp_baseline:
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.4 Update date12_to_rmv from exclude_ifg_index
    if inps.exclude_ifg_index:
        print '----------------------------------------------------------------------------'
        print 'drop date12/pair with the following index number:'
        for index in inps.exclude_ifg_index:
            date12 = date12_orig[index - 1]
            date12_to_rmv.append(date12)
            print str(index) + '    ' + date12

    # 2.5 Update date12_to_rmv from exclude_date
    if inps.exclude_date:
        inps.exclude_date = ptime.yymmdd(inps.exclude_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs including the following dates: \n' + str(
            inps.exclude_date)
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date1, date2 = date12_orig[i].split('-')
            if (date1 in inps.exclude_date) or (date2 in inps.exclude_date):
                date12 = date12_orig[i]
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 2.6 Update date12_to_rmv from start_date
    if inps.start_date:
        inps.start_date = ptime.yymmdd(inps.start_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than start-date: ' + inps.start_date
        min_date = int(ptime.yyyymmdd(inps.start_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) < min_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date)
        print date12_to_rmv_temp

    # 2.7 Update date12_to_rmv from end_date
    if inps.end_date:
        inps.end_date = ptime.yymmdd(inps.end_date)
        print '----------------------------------------------------------------------------'
        print 'Drop pairs with date earlier than end-date: ' + inps.end_date
        max_date = int(ptime.yyyymmdd(inps.end_date))
        date12_to_rmv_temp = []
        for i in range(len(date12_orig)):
            date12 = date12_orig[i]
            if any(
                    int(j) > max_date
                    for j in ptime.yyyymmdd(date12.split('-'))):
                date12_to_rmv.append(date12)
                date12_to_rmv_temp.append(date12)
        print date12_to_rmv_temp

    # 3. Manually drop pairs
    if inps.disp_network:
        date12_click = manual_select_pairs_to_remove(inps.file[0])
        for date12 in list(date12_click):
            if date12 not in date12_orig:
                date12_click.remove(date12)
        print 'date12 selected to remove:'
        print date12_click
        date12_to_rmv += date12_click

    # 4. drop duplicate date12 and sort in order
    date12_to_rmv = sorted(list(set(date12_to_rmv)))
    date12_keep = sorted(list(set(date12_orig) - set(date12_to_rmv)))
    print '----------------------------------------------------------------------------'
    print 'number of interferograms to remove: ' + str(len(date12_to_rmv))
    print 'number of interferograms kept     : ' + str(len(date12_keep))

    ##### Calculated date12_to_drop v.s. existing date12_to_drop
    # Get list of date12 of interferograms already been marked to drop
    k = readfile.read_attribute(inps.file[0])['FILE_TYPE']
    h5 = h5py.File(inps.file[0], 'r')
    ifgram_list_all = sorted(h5[k].keys())
    ifgram_list_keep = ut.check_drop_ifgram(h5,
                                            atr,
                                            ifgram_list_all,
                                            print_msg=False)
    ifgram_list_dropped = sorted(
        list(set(ifgram_list_all) - set(ifgram_list_keep)))
    date12_list_dropped = ptime.list_ifgram2date12(ifgram_list_dropped)
    h5.close()

    if date12_to_rmv == date12_list_dropped and inps.mark_attribute:
        print 'Calculated date12 to drop is the same as exsiting marked input file, skip update file attributes.'
        date12_to_rmv = []

    ##### Update date12 to drop
    if date12_to_rmv:
        ##### Update Input Files with date12_to_rmv
        Modified_CoherenceFile = 'Modified_coherence.h5'
        for File in inps.file:
            Modified_File = modify_file_date12_list(File, date12_to_rmv,
                                                    inps.mark_attribute)

            k = readfile.read_attribute(File)['FILE_TYPE']
            # Update Mask File
            if k == 'interferograms':
                print 'update mask file for input ' + k + ' file based on ' + Modified_File
                inps.mask_file = 'mask.h5'
                print 'writing >>> ' + inps.mask_file
                ut.nonzero_mask(Modified_File, inps.mask_file)

            elif k == 'coherence':
                inps.coherence_file = Modified_File
                print 'update average spatial coherence for input ' + k + ' file based on: ' + Modified_File
                outFile = 'averageSpatialCoherence.h5'
                print 'writing >>> ' + outFile
                ut.temporal_average(Modified_File, outFile)

                # Touch spatial average txt file of coherence if it's existed
                coh_spatialAverage_file = os.path.splitext(
                    Modified_File)[0] + '_spatialAverage.txt'
                if os.path.isfile(coh_spatialAverage_file):
                    touchCmd = 'touch ' + coh_spatialAverage_file
                    print touchCmd
                    os.system(touchCmd)

    # Plot result
    if inps.plot:
        print '\nplot modified network and save to file.'
        plotCmd = 'plot_network.py ' + inps.coherence_file + ' --coherence ' + inps.coherence_file + ' --nodisplay'
        if inps.template_file:
            plotCmd += ' --template ' + inps.template_file
        print plotCmd
        os.system(plotCmd)

    print 'Done.'
    return
Exemplo n.º 28
0
def main(argv):
    inps = cmdLineParse()

    ##### Check default input arguments
    # default output filename
    if not inps.outfile:
        inps.outfile = os.path.splitext(
            inps.timeseries_file)[0] + '_tropHgt.h5'

    # Basic info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    pix_num = length * width

    # default DEM file
    if not inps.dem_file:
        if 'X_FIRST' in atr.keys():
            inps.dem_file = ['demGeo_tight.h5', 'demGeo.h5']
        else:
            inps.dem_file = ['demRadar.h5']
    try:
        inps.dem_file = ut.get_file_list(inps.dem_file)[0]
    except:
        inps.dem_file = None
        sys.exit('ERROR: No DEM file found!')

    # default Mask file
    if not inps.mask_file:
        if 'X_FIRST' in atr.keys():
            inps.mask_file = 'geo_maskTempCoh.h5'
        else:
            inps.mask_file = 'maskTempCoh.h5'
        if not os.path.isfile(inps.mask_file):
            inps.mask_file = None
            sys.exit('ERROR: No mask file found!')

    ##### Read Mask
    print 'reading mask from file: ' + inps.mask_file
    mask = readfile.read(inps.mask_file, epoch='mask')[0].flatten(1)
    ndx = mask != 0
    msk_num = np.sum(ndx)
    print 'total            pixel number: %d' % pix_num
    print 'estimating using pixel number: %d' % msk_num

    ##### Read DEM
    print 'read DEM from file: ' + inps.dem_file
    dem = readfile.read(inps.dem_file, epoch='height')[0]

    ref_y = int(atr['ref_y'])
    ref_x = int(atr['ref_x'])
    dem -= dem[ref_y, ref_x]

    print 'considering the incidence angle of each pixel ...'
    inc_angle = ut.incidence_angle(atr, dimension=2)
    dem *= 1.0 / np.cos(inc_angle * np.pi / 180.0)

    ##### Design matrix for elevation v.s. phase
    dem = dem.flatten(1)
    if inps.poly_order == 1:
        A = np.vstack((dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem, np.ones(pix_num))).T
    elif inps.poly_order == 2:
        A = np.vstack((dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**2, dem, np.ones(pix_num))).T
    elif inps.poly_order == 3:
        A = np.vstack((dem[ndx]**3, dem[ndx]**2, dem[ndx], np.ones(msk_num))).T
        B = np.vstack((dem**3, dem**2, dem, np.ones(pix_num))).T
    print 'polynomial order: %d' % inps.poly_order

    A_inv = np.linalg.pinv(A)

    ##### Calculate correlation coefficient
    print 'Estimating the tropospheric effect between the differences of the subsequent epochs and DEM'

    h5 = h5py.File(inps.timeseries_file)
    date_list = sorted(h5[k].keys())
    date_num = len(date_list)
    print 'number of acquisitions: ' + str(date_num)
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]

    print '----------------------------------------------------------'
    print 'correlation of DEM with each time-series epoch:'
    corr_array = np.zeros(date_num)
    par_dict = {}
    for i in range(date_num):
        date = date_list[i]
        if date == ref_date:
            cc = 0.0
            par = np.zeros(inps.poly_order + 1)
        else:
            data = h5[k].get(date)[:].flatten(1)

            C = np.zeros((2, msk_num))
            C[0, :] = dem[ndx]
            C[1, :] = data[ndx]
            cc = np.corrcoef(C)[0, 1]

            corr_array[i] = cc
            if inps.threshold and np.abs(cc) < inps.threshold:
                par = np.zeros(inps.poly_order + 1)
            else:
                par = np.dot(A_inv, data[ndx])
        print '%s: %.2f' % (date, cc)
        par_dict[date] = par

    average_phase_height_corr = np.nansum(np.abs(corr_array)) / (date_num - 1)
    print '----------------------------------------------------------'
    print 'Average Correlation of DEM with time-series epochs: %.2f' % average_phase_height_corr

    # Correlation of DEM with Difference of subsequent epochs (Not used for now)
    corr_diff_dict = {}
    par_diff_dict = {}
    for i in range(date_num - 1):
        date1 = date_list[i]
        date2 = date_list[i + 1]
        date12 = date1 + '-' + date2

        data1 = h5[k].get(date1)[:].flatten(1)
        data2 = h5[k].get(date2)[:].flatten(1)
        data_diff = data2 - data1

        C_diff = np.zeros((2, msk_num))
        C_diff[0, :] = dem[ndx]
        C_diff[1, :] = data_diff[ndx]
        cc_diff = np.corrcoef(C_diff)[0, 1]

        corr_diff_dict[date12] = cc_diff
        par = np.dot(A_inv, data_diff[ndx])
        par_diff_dict[date12] = par

    ##### Correct and write time-series file
    print '----------------------------------------------------------'
    print 'removing the stratified tropospheric delay from each epoch'
    print 'writing >>> ' + inps.outfile
    h5out = h5py.File(inps.outfile, 'w')
    group = h5out.create_group(k)

    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        data = h5[k].get(date)[:]

        if date != ref_date:
            par = par_dict[date]
            trop_delay = np.reshape(np.dot(B, par), [width, length]).T
            trop_delay -= trop_delay[ref_y, ref_x]
            data -= trop_delay

        dset = group.create_dataset(date, data=data, compression='gzip')
        prog_bar.update(i + 1, suffix=date)

    for key, value in atr.iteritems():
        group.attrs[key] = value

    prog_bar.close()
    h5out.close()
    h5.close()

    print 'Done.'
    return inps.outfile
Exemplo n.º 29
0
def modify_file_date12_list(File,
                            date12_to_rmv,
                            mark_attribute=False,
                            outFile=None):
    '''Update multiple group hdf5 file using date12 to remove
    Inputs:
        File          - multi_group HDF5 file, i.e. unwrapIfgram.h5, coherence.h5
        date12_to_rmv - list of string indicating interferograms in YYMMDD-YYMMDD format
        mark_attribute- bool, if True, change 'drop_ifgram' attribute only; otherwise, write
                        resutl to a new file
        outFile       - string, output file name
    Output:
        outFile       - string, output file name, if mark_attribute=True, outFile = File
    '''
    k = readfile.read_attribute(File)['FILE_TYPE']
    print '----------------------------------------------------------------------------'
    print 'file: ' + File

    if mark_attribute:
        print "set drop_ifgram to 'yes' for all interferograms to remove, and 'no' for all the others."
        h5 = h5py.File(File, 'r+')
        ifgram_list = sorted(h5[k].keys())
        for ifgram in ifgram_list:
            if h5[k][ifgram].attrs['DATE12'] in date12_to_rmv:
                h5[k][ifgram].attrs['drop_ifgram'] = 'yes'
            else:
                h5[k][ifgram].attrs['drop_ifgram'] = 'no'
        h5.close()
        outFile = File

    else:
        date12_orig = pnet.get_date12_list(File)
        date12_to_write = sorted(list(set(date12_orig) - set(date12_to_rmv)))
        print 'number of interferograms in file      : ' + str(
            len(date12_orig))
        print 'number of interferograms to keep/write: ' + str(
            len(date12_to_write))
        print 'list   of interferograms to keep/write: '
        print date12_to_write
        date12Num = len(date12_to_write)

        if not outFile:
            outFile = 'Modified_' + os.path.basename(File)
        print 'writing >>> ' + outFile
        h5out = h5py.File(outFile, 'w')
        gg = h5out.create_group(k)

        h5 = h5py.File(File, 'r')
        igramList = sorted(h5[k].keys())
        date12_list = ptime.list_ifgram2date12(igramList)
        prog_bar = ptime.progress_bar(maxValue=date12Num, prefix='writing: ')
        for i in range(date12Num):
            date12 = date12_to_write[i]
            idx = date12_orig.index(date12)
            igram = igramList[idx]

            data = h5[k][igram].get(igram)[:]
            group = gg.create_group(igram)
            dset = group.create_dataset(igram, data=data, compression='gzip')
            for key, value in h5[k][igram].attrs.iteritems():
                group.attrs[key] = value
            group.attrs['drop_ifgram'] = 'no'
            prog_bar.update(i + 1, suffix=date12_list[i])
        prog_bar.close()
        h5.close()
        h5out.close()
        print 'finished writing >>> ' + outFile

    return outFile
Exemplo n.º 30
0
def main(argv):
    plt.switch_backend('Agg')

    cbar_bin_num  = 9
    cbar_label    = 'Mean LOS velocity'
    color_map     = 'jet'
    data_alpha    = 0.7
    disp_opposite = 'no'
    disp_colorbar = 'yes'
    rewrapping    = 'no'
    fig_dpi       = 500
    #fig_size      = [6.0,9.0]
    fig_unit      = 'mm/yr'
    disp_ref      = 'yes'
    ref_size      = 5
    dispDisplacement = 'no'

    if len(sys.argv)>2:
        try:   opts, args = getopt.getopt(argv,"f:m:M:d:c:w:i:r:",['noreference','fig-size',\
                                               'ref-size=','cbar-label=','displacement','cbar-bin-num='])
        except getopt.GetoptError:  usage() ; sys.exit(1)

        for opt,arg in opts:
            if   opt == '-f':        File = arg
            elif opt == '-m':        Vmin = float(arg)
            elif opt == '-M':        Vmax = float(arg)
            elif opt == '-d':        epoch_date    = arg
            elif opt == '-c':        color_map     = arg
            elif opt == '-i':        disp_opposite = arg
            elif opt == '-w':        rewrapping    = arg
            elif opt == '-r':        fig_dpi = int(arg)
            elif opt == '--cbar-bin-num' :   cbar_bin_num     = int(arg)
            elif opt == '--cbar-label'   :   cbar_label       = arg
            elif opt == '--displacement' :   dispDisplacement = 'yes'
            elif opt == '--fig-size'     :   fig_size = [float(i) for i in arg.split(',')][0:2]
            elif opt == '--ref-size'     :   ref_size = int(arg)
            elif opt == '--noreference'  :   disp_ref = 'no'

    elif len(sys.argv)==2:
        if argv[0]=='-h':               usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):   File = argv[0]
        else:                           usage(); sys.exit(1)
    else:                             usage(); sys.exit(1)

    #######################################################
    ###################  Prepare Data  ####################
    ## prepare: data, North, East, South, West

    ext = os.path.splitext(File)[1].lower()
    atr = readfile.read_attribute(File)
    k = atr['FILE_TYPE']
    #print '\n*************** Output to KMZ file ****************'
    print 'Input file is '+k

    if ext == '.h5':
        try:      h5file=h5py.File(File,'r')
        except:   usage() ; sys.exit(1)
        outName=File.split('.')[0]

        if k in ('interferograms','wrapped','coherence'):
            ifgramList=h5file[k].keys()
            for i in range(len(ifgramList)):
                if epoch_date in ifgramList[i]:
                    epoch_number = i
            print ifgramList[epoch_number]
            outName = ifgramList[epoch_number]
            #outName=epoch_date

            dset = h5file[k][ifgramList[epoch_number]].get(ifgramList[epoch_number])
            data = dset[0:dset.shape[0],0:dset.shape[1]]

            if k == 'wrapped':
                print 'No wrapping for wrapped interferograms. Set rewrapping=no'
                rewrapping = 'no'
                Vmin = -np.pi    
                Vmax = np.pi

        elif 'timeseries' in k:
            epochList=h5file['timeseries'].keys()
            for i in range(len(epochList)):
                if epoch_date in epochList[i]:
                    epoch_number = i

            #### Out name
            try:    ref_date = atr['ref_date']
            except: ref_date = ut.yyyymmdd(atr['DATE'])[0]
            #ref_date=h5file['timeseries'].attrs['ref_date']
            if len(epoch_date)==8:  outName=ref_date[2:]+'-'+epoch_date[2:]
            else:                   outName=ref_date[2:]+'-'+epoch_date

            dset = h5file['timeseries'].get(epochList[epoch_number])
            data = dset[0:dset.shape[0],0:dset.shape[1]]

        ### one dataset format: velocity, mask, temporal_coherence, rmse, std, etc.
        else:
            dset = h5file[k].get(k)
            data=dset[0:dset.shape[0],0:dset.shape[1]]
            if disp_opposite in('yes','Yes','Y','y','YES'):
                data=-1*data

            try:
                xref=h5file[k].attrs['ref_x']
                yref=h5file[k].attrs['ref_y']
            except: pass

    elif ext in ['.unw','.cor','.hgt','.trans','.dem']:
        if   ext in ['.unw','.cor','.hgt','.trans']:
            a,data,atr = readfile.read_float32(File)
            outName = File
            if ext in ['.unw']:
                if dispDisplacement == 'yes':
                    print 'show displacement'
                    phase2range = -float(atr['WAVELENGTH']) / (4*np.pi)
                    data *= phase2range
                    atr['UNIT'] = 'm'
                    rewrapping == 'no'
                    fig_unit = 'mm'
                if rewrapping == 'yes':
                    data = rewrap(data,atr)
                    fig_unit = 'radian'
        elif ext == '.dem':
            data,atr = readfile.read_real_int16(File)
            outName = File
        if   ext in ['.hgt','.dem']:     fig_unit = 'm'
        elif ext in ['.cor','.trans']:   fig_unit = ' '
    else: sys.exit('Do not support '+ext+' file!')


    ########################################################
    if rewrapping=='yes':
        data=rewrap(data)
        Vmin = -np.pi    #[-pi,pi] for wrapped interferograms
        Vmax =  np.pi
    else:
        try:     Vmin
        except:  Vmin = np.nanmin(data)
        try:     Vmax
        except:  Vmax = np.nanmax(data)

    try:
        lon_step = float(atr['X_STEP'])
        lat_step = float(atr['Y_STEP'])
        lon_unit = atr['Y_UNIT']
        lat_unit = atr['X_UNIT']
        West     = float(atr['X_FIRST'])
        North    = float(atr['Y_FIRST'])
        South    = North+lat_step*(data.shape[0]-1)
        East     = West +lon_step*(data.shape[1]-1)
        geocoord = 'yes'
        print 'Geocoded'
    except:
        print '%%%%%%%%%%'
        print 'Error:\nThe input file is not geocoded\n'
        print '%%%%%%%%%%'
        usage();sys.exit(1)


    #######################################################
    ###################  Output KMZ  ######################

    ############### Make PNG file
    print 'Making png file ...'   
    length = data.shape[0]
    width  = data.shape[1]
    try:fig_size
    except:
        fig_size_0 = 6.0           ## min figure dimension: 6.0
        ratio = float(length)/float(width)
        fig_size = [fig_size_0,fig_size_0*ratio]
    print 'figure size:  %.1f, %.1f'%(fig_size[0],fig_size[1])
    ccmap = plt.get_cmap(color_map)
    fig = plt.figure(figsize=fig_size,frameon=False)
    ax = fig.add_axes([0., 0., 1., 1.])
    ax.set_axis_off()

    aspect = width/(length*1.0)
    try:     ax.imshow(data,aspect='auto',cmap=ccmap,vmax=Vmax,vmin=Vmin)
    except:  ax.imshow(data,aspect='auto',cmap=ccmap)

    if disp_ref == 'yes':
        try:
            xref = int(atr['ref_x'])
            yref = int(atr['ref_y'])
            ax.plot(xref,yref,'ks',ms=ref_size)
            print 'show reference point'
        except: print 'Cannot find reference point info!'

    ax.set_xlim([0,width])
    ax.set_ylim([length,0])

    figName = outName + '.png'
    print 'writing '+figName
    plt.savefig(figName, pad_inches=0.0, transparent=True, dpi=fig_dpi)

    ############### Making colorbar
    pc = plt.figure(figsize=(1,8))
    axc = pc.add_subplot(111)
    if   fig_unit in ['mm','mm/yr']: v_scale = 1000
    elif fig_unit in ['cm','cm/yr']: v_scale = 100
    elif fig_unit in ['m',  'm/yr']: v_scale = 1
    norm = mpl.colors.Normalize(vmin=Vmin*v_scale, vmax=Vmax*v_scale)
    clb  = mpl.colorbar.ColorbarBase(axc,cmap=ccmap,norm=norm, orientation='vertical')

    #clb.set_label(fig_unit)
    clb.set_label(cbar_label+' ['+fig_unit+']')
    clb.locator = mpl.ticker.MaxNLocator(nbins=cbar_bin_num)
    clb.update_ticks()

    pc.subplots_adjust(left=0.2,bottom=0.3,right=0.4,top=0.7)
    pc.patch.set_facecolor('white')
    pc.patch.set_alpha(0.7)
    pc.savefig('colorbar.png',bbox_inches='tight',facecolor=pc.get_facecolor(),dpi=300)

    ############## Generate KMZ file
    print 'generating kml file ...'
    try:     doc = KML.kml(KML.Folder(KML.name(atr['PROJECT_NAME'])))
    except:  doc = KML.kml(KML.Folder(KML.name('PySAR product')))
    slc = KML.GroundOverlay(KML.name(figName),KML.Icon(KML.href(figName)),\
                            KML.altitudeMode('clampToGround'),\
                            KML.LatLonBox(KML.north(str(North)),KML.south(str(South)),\
                                          KML.east( str(East)), KML.west( str(West))))
    doc.Folder.append(slc)

    #############################
    print 'adding colorscale ...'
    cb_rg = min(North-South, East-West)
    cb_N = (North+South)/2.0 + 0.5*0.5*cb_rg
    cb_W = East  + 0.1*cb_rg
    slc1 = KML.GroundOverlay(KML.name('colorbar'),KML.Icon(KML.href('colorbar.png')),\
                             KML.altitude('2000'),KML.altitudeMode('absolute'),\
                             KML.LatLonBox(KML.north(str(cb_N)),KML.south(str(cb_N-0.5*cb_rg)),\
                                           KML.west( str(cb_W)),KML.east( str(cb_W+0.14*cb_rg))))
    doc.Folder.append(slc1)

    #############################
    kmlstr = etree.tostring(doc, pretty_print=True) 
    kmlname = outName + '.kml'
    print 'writing '+kmlname
    kmlfile = open(kmlname,'w')
    kmlfile.write(kmlstr)
    kmlfile.close()

    kmzName = outName + '.kmz'
    print 'writing '+kmzName
    cmdKMZ = 'zip ' + kmzName +' '+ kmlname +' ' + figName + ' colorbar.png'
    os.system(cmdKMZ)

    cmdClean = 'rm '+kmlname;      print cmdClean;    os.system(cmdClean)
    cmdClean = 'rm '+figName;      print cmdClean;    os.system(cmdClean)
    cmdClean = 'rm colorbar.png';  print cmdClean;    os.system(cmdClean)