Esempio n. 1
0
def read_template2inps(templateFile, inps=None):
    '''Update inps with options from templateFile'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(templateFile)
    key_list = template.keys()

    key = 'pysar.reference.date'
    if key in key_list:
        inps.ref_date = template[key]

    prefix = 'pysar.residualStd.'
    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.mask_file = 'maskTempCoh.h5'
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'ramp'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.ramp_type = 'quadratic'
        else:
            inps.ramp_type = 'no'

    return inps
Esempio n. 2
0
def read_seed_template2inps(template_file, inps=None):
    '''Read seed/reference info from template file and update input namespace'''
    if not inps:
        inps = cmdLineParse([''])
    
    template = readfile.read_template(template_file)
    templateKeyList = template.keys()
    
    if not inps.ref_y or not inps.ref_x:
        if 'pysar.seed.yx' in templateKeyList:
            inps.ref_y, inps.ref_x = [int(i) for i in template['pysar.seed.yx'].split(',')]
        elif 'pysar.reference.yx' in templateKeyList:
            try:  inps.ref_y, inps.ref_x = [int(i) for i in template['pysar.reference.yx'].split(',')]
            except:  pass
        else: print 'No y/x input from template'
    
    if not inps.ref_lat or not inps.ref_lon:
        if 'pysar.seed.lalo' in templateKeyList:
            inps.ref_lat, inps.ref_lon = [float(i) for i in template['pysar.seed.lalo'].split(',')]
        elif 'pysar.reference.lalo' in templateKeyList:
            try:  inps.ref_lat, inps.ref_lon = [float(i) for i in template['pysar.reference.lalo'].split(',')]
            except:  pass
        else: print 'No lat/lon input from template'
    
    return inps
Esempio n. 3
0
def read_template2inps(template_file, inps=None):
    '''Read input template file into inps.ex_date'''
    if not inps:
        inps = cmdLineParse()
    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Read template option
    prefix = 'pysar.topoError.'

    key = prefix + 'polyOrder'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.poly_order = 2
        else:
            inps.poly_order = int(value)

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.ex_date = []
        else:
            inps.ex_date = value.replace(',', ' ').split()

    key = prefix + 'stepFuncDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.step_date = ptime.yyyymmdd(value)
        else:
            inps.step_date = None

    return inps
Esempio n. 4
0
def main(argv):

    ##### Check Inputs
    if not argv or argv[0] in ['-h','--help']:
        usage()
        sys.exit(1)
    if len(argv) < 2:  print('\nAt lease 2 inputs are needed.\n'); sys.exit(1)

    ##### Read Original Attributes
    print '************ Add / Update HDF5 File Attributes *************'
    File = argv[0]
    atr  = readfile.read_attribute(File)
    print 'Input file is '+atr['PROCESSOR']+' '+atr['FILE_TYPE']+': '+File

    ##### Read New Attributes
    atr_new = dict()
    for i in range(1,len(argv)):
        if os.path.isfile(argv[i]):
            atr_tmp = readfile.read_template(argv[i])
            atr_new.update(atr_tmp)
        else:
            atr_tmp = argv[i].split('=')
            atr_new[atr_tmp[0].strip()] = atr_tmp[1].strip()
    print 'The following attributes will be added/updated:'
    info.print_attributes(atr_new)

    ##### Update h5 File
    k = atr['FILE_TYPE']
    h5 = h5py.File(File,'r+')
    for key, value in atr_new.iteritems():
        h5[k].attrs[key] = value
    h5.close
    print 'Done.'

    return
Esempio n. 5
0
def read_template2inps(template_file, inps=None):
    '''Read input template file into inps.ex_date'''
    if not inps:
        inps = cmdLineParse()
    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Read template option
    prefix = 'pysar.velocity.'
    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.ex_date = ['exclude_date.txt']
        elif value == 'no':
            inps.ex_date = []
        else:
            inps.ex_date = value.replace(',', ' ').split()

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.min_date = ptime.yyyymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_date = ptime.yyyymmdd(value)

    return inps
Esempio n. 6
0
def main(argv):
    parser = build_parser()
    parseArgs = parser.parse_args()

    username = parseArgs.user
    password = parseArgs.password
    host = parseArgs.host
    db = parseArgs.db
    working_dir = parseArgs.folder

    # make sure we have a final / so the below code doesn't break
    if working_dir[-1] != "/":
        working_dir += "/"

    unavco_name = parseArgs.unavco_name
    attributes_file = working_dir + "add_Attribute.txt"
    attributes = readfile.read_template(attributes_file)
    dbController = InsarDatabaseController(username, password, host, db)
    dbController.connect()

    for key in attributes:
        print "Setting attribute " + key + " to " + attributes[key]
        if key == "plotAttributes":
            dbController.add_plot_attribute(unavco_name, key, attributes[key])
        else:
            dbController.add_attribute(unavco_name, key, attributes[key])

    dbController.index_table_on("extra_attributes", "area_id", "area_id_idx")
    dbController.close()
Esempio n. 7
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    print 'read options from template file: '+os.path.basename(template_file)
    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix+'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    key = prefix+'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.mask_file = 'mask.h5'
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    return inps
def main(argv):
    parser = build_parser()
    parseArgs = parser.parse_args()

    username = parseArgs.user
    password = parseArgs.password
    host = parseArgs.host
    db = parseArgs.db
    working_dir = parseArgs.folder


    # make sure we have a final / so the below code doesn't break
    if working_dir[-1] != "/":
        working_dir += "/"

    unavco_name = parseArgs.unavco_name
    attributes_file = working_dir + "add_Attribute.txt"
    attributes = readfile.read_template(attributes_file)
    dbController = InsarDatabaseController(username, password, host, db)    
    dbController.connect()

    for key in attributes:
        print "Setting attribute " + key + " to " + attributes[key]
        if key == "plotAttributes":
            dbController.add_plot_attribute(unavco_name, key, attributes[key])
        else:
            dbController.add_attribute(unavco_name, key, attributes[key])

    dbController.index_table_on("extra_attributes", "area_id", "area_id_idx")
    dbController.close()
Esempio n. 9
0
def read_template2inps(template_file, inps):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.timeseriesInv.'

    key = prefix + 'residualNorm'
    if key in key_list and template[key] in ['L1']:
        inps.resid_norm = 'L1'
    else:
        inps.resid_norm = 'L2'

    key = prefix + 'coherenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.coherence_file = 'coherence.h5'
        elif value in ['no']:
            inps.coherence_file = None
        else:
            inps.coherence_file = value

    key = prefix + 'minCoherence'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.min_coherence = 0.2
        else:
            inps.min_coherence = float(value)

    key = prefix + 'maxCoherence'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.max_coherence = 0.85
        else:
            inps.max_coherence = float(value)

    key = prefix + 'weightFunc'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.weight_function = 'no'
        elif value.startswith('norm'):
            inps.weight_function = 'normal'
        elif value.startswith('lin'):
            inps.weight_function = 'linear'
        elif value.startswith('var'):
            inps.weight_function = 'variance'
        else:
            print 'Un-recognized input for %s = %s' % (key, value)
            sys.exit(-1)

    return inps
Esempio n. 10
0
def update_inps_from_template(inps, template_file):
    '''Update inps.ex_date with input template file'''    
    tmpl = readfile.read_template(template_file)
    if 'pysar.network.dropDate' in tmpl.keys():
        dropDateList = tmpl['pysar.network.dropDate'].replace(',',' ').split()
        if dropDateList:
            inps.ex_date += list(set(dropDate) - set(inps.ex_date))
    return inps
Esempio n. 11
0
def read_template2inps(templateFile, inps=None):
    '''Update inps with pysar.residualRms.* option from templateFile'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(templateFile)
    key_list = template.keys()

    prefix = 'pysar.residualRms.'

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.mask_file = 'maskTempCoh.h5'
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'ramp'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.ramp_type = 'quadratic'
        else:
            inps.ramp_type = value

    key = prefix + 'threshold'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.min_rms = 0.02
        else:
            inps.min_rms = float(value)

    key = prefix + 'saveRefDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'yes']:
            inps.save_reference_date = True
        else:
            inps.save_reference_date = False

    key = prefix + 'saveExcludeDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'yes']:
            inps.save_exclude_date = True
        else:
            inps.save_exclude_date = False

    return inps
Esempio n. 12
0
def main(argv):

    ##### Check Inputs
    if not argv or argv[0] in ['-h', '--help']:
        usage()
        sys.exit(1)
    if len(argv) < 2 or not argv[1]:
        raise Exception('\nAt lease 2 inputs are needed.\n')

    ##### Read Original Attributes
    #print '************ Add / Update HDF5 File Attributes *************'
    File = argv[0]
    atr = readfile.read_attribute(File)
    print 'Input file is ' + atr['PROCESSOR'] + ' ' + atr[
        'FILE_TYPE'] + ': ' + File

    ##### Read New Attributes
    atr_new = dict()
    for i in range(1, len(argv)):
        if os.path.isfile(argv[i]):
            atr_tmp = readfile.read_template(argv[i])
            atr_new.update(atr_tmp)
        else:
            atr_tmp = argv[i].split('=')
            atr_new[atr_tmp[0].strip()] = atr_tmp[1].strip()
    print "The following attributes will be added/updated, or removed if new value is 'None':"
    info.print_attributes(atr_new)

    ext = os.path.splitext(File)[1]
    ##### Update h5 File
    if ext in ['.h5', '.he5']:
        File = ut.add_attribute(File, atr_new)
    else:
        if not ut.update_attribute_or_not(atr_new, atr):
            print 'All updated (removed) attributes already exists (do not exists) and have the same value, skip update.'
        else:
            for key, value in atr_new.iteritems():
                # delete the item is new value is None
                if value == 'None':
                    try:
                        atr.pop(key)
                    except:
                        pass
                else:
                    atr[key] = value
            if atr['PROCESSOR'] == 'roipac':
                print 'writing >>> ' + File + '.rsc'
                writefile.write_roipac_rsc(atr, File + '.rsc')

    return File
Esempio n. 13
0
def read_seed_template2inps(template_file, inps=None):
    '''Read seed/reference info from template file and update input namespace'''
    if not inps:
        inps = cmdLineParse([''])

    template = readfile.read_template(template_file)
    key_list = template.keys()

    prefix = 'pysar.reference.'

    key = prefix + 'yx'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.ref_y, inps.ref_x = [int(i) for i in value.split(',')]

    key = prefix + 'lalo'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.ref_lat, inps.ref_lon = [float(i) for i in value.split(',')]

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'coherenceFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.coherence_file = 'averageSpatialCoherence.h5'
        else:
            inps.coherence_file = value

    key = prefix + 'minCoherence'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.min_coherence = 0.85
        else:
            inps.min_coherence = float(value)

    return inps
Esempio n. 14
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    print 'read options from tempalte file: ' + os.path.basename(
        inps.template_file)
    template = readfile.read_template(inps.template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.unwrapError.'

    key = prefix + 'method'
    if key in key_list:
        value = template[key]
        if value in ['bridging', 'phase_closure']:
            inps.method = value
        elif value not in ['auto', 'no']:
            inps.method = None
        else:
            print 'Unrecognized input for %s: %s' % (key, value)

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.mask_file = value

    key = prefix + 'yx'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            yx = value.replace(';', ' ').replace(',', ' ').split()
            yx = [int(i) for i in yx]
            inps.y = yx[0::2]
            inps.x = yx[1::2]

    key = prefix + 'ramp'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.ramp_type = 'plane'
        elif value in ['plane', 'quadratic']:
            inps.ramp_type = value
        else:
            print 'Unrecognized input for %s: %s' % (key, value)

    return inps
Esempio n. 15
0
def update_inps_with_template(inps, template_file):
    template_dict = readfile.read_template(inps.template_file)
    keyList = template_dict.keys()

    if not inps.max_temp_baseline and 'pysar.network.maxTempBaseline' in keyList:
        inps.max_temp_baseline = float(
            template_dict['pysar.network.maxTempBaseline'])
    if not inps.max_perp_baseline and 'pysar.network.maxPerpBaseline' in keyList:
        inps.max_perp_baseline = float(
            template_dict['pysar.network.maxPerpBaseline'])

    if not inps.drop_date and 'pysar.network.dropDate' in keyList:
        inps.drop_date = [
            i for i in template_dict['pysar.network.dropDate'].replace(
                ',', ' ').split()
        ]
    if not inps.drop_ifg_index and 'pysar.network.dropIfgramIndex' in keyList:
        inps.drop_ifg_index = [
            i for i in template_dict['pysar.network.dropDate'].replace(
                ',', ' ').split()
        ]

    if not inps.reference_file and 'pysar.network.reference' in keyList:
        inps.reference_file = template_dict['pysar.network.reference']

    # Coherence-Based
    if 'pysar.network.coherenceBase' in keyList:
        if not inps.coherence_file and template_dict[
                'pysar.network.coherenceBase'].lower() in ['yes', 'y', 'auto']:
            # Search coherence file from input files
            k_list = [
                readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file
            ]
            try:
                cohFileIdx = k_list.index('coherence')
            except:
                sys.exit(
                    "ERROR: No coherence found in input files, cannot use coherence-based approach without it."
                )
            inps.coherence_file = inps.file[cohFileIdx]

            # Search mask file
            if not inps.mask_file and os.path.isfile('Mask.h5'):
                inps.mask_file = 'Mask.h5'

    return inps
Esempio n. 16
0
def read_template2inps(template_file, inps):
    '''Read input template options into Namespace inps'''
    print 'read input option from template file: '+template_file
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.geocode.'

    key = prefix+'resolution'
    if key in key_list:
        value = template[key]
        if value not in ['auto']:
            inps.lalo_step = value

    return inps
Esempio n. 17
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    print 'read options from template file: ' + os.path.basename(template_file)
    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = value

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            try:
                inps.mask_file = ut.get_file_list(['maskLand.h5',
                                                   'mask.h5'])[0]
            except:
                inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'minCoherence'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.coh_thres = 0.7
        else:
            inps.coh_thres = float(value)

    return inps
Esempio n. 18
0
def read_subset_template2box(templateFile):
    '''Read pysar.subset.lalo/yx option from template file into box type
    Return None if not specified.
    '''
    tmpl = readfile.read_template(templateFile)
    try:
        sub = [i.strip() for i in tmpl['pysar.subset.lalo'].split(',')]
        sub_lat = sorted([float(i.strip()) for i in sub[0].split(':')])
        sub_lon = sorted([float(i.strip()) for i in sub[1].split(':')])
        geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
    except:
        geo_box = None
    try:
        sub = [i.strip() for i in tmpl['pysar.subset.yx'].split(',')]
        sub_y = sorted([int(i.strip()) for i in sub[0].split(':')])
        sub_x = sorted([int(i.strip()) for i in sub[1].split(':')])
        pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])
    except:
        pix_box = None
    return pix_box, geo_box
Esempio n. 19
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    print 'read options from template file: ' + os.path.basename(template_file)
    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.save.hdfEos5.'

    key = prefix + 'update'
    if key in key_list and template[key] == 'yes':
        inps.update = True

    key = prefix + 'subset'
    if key in key_list and template[key] == 'yes':
        inps.subset = True

    return inps
Esempio n. 20
0
def main(argv):
    inps = cmdLineParse()
    # output filename
    ext = os.path.splitext(inps.ifgram_file)[1]
    if not inps.outfile:
        inps.outfile = os.path.splitext(inps.ifgram_file)[0] + '_unwCor' + ext

    # read template file
    if inps.template_file:
        template = readfile.read_template(inps.template_file)
        key = 'pysar.unwrapError.yx'
        if key in template.keys():
            print 'read ' + key + ' option from template file: ' + inps.template_file
            yx = template[key].replace(';', ' ').replace(',', ' ').split()
            yx = [int(i) for i in yx]
            inps.y = yx[0::2]
            inps.x = yx[1::2]

    # Memthod
    if inps.y and inps.x:
        inps.method = 'bridging'
    else:
        inps.method = 'phase_closure'
    print 'unwrapping error correction using method: ' + inps.method

    #####
    if inps.method == 'phase_closure':
        inps.outfile = unwrap_error_correction_phase_closure(
            inps.ifgram_file, inps.mask_file, inps.outfile)

    elif inps.method == 'bridging':
        inps.outfile = unwrap_error_correction_bridging(inps.ifgram_file, inps.mask_file, inps.y, inps.x,\
                                                        inps.ramp_type, inps.outfile)[0]

    print 'Done.'
    return inps.outfile
Esempio n. 21
0
def main(argv):
    try:
        templateFile = argv[1]
    except:
        Usage()
        sys.exit(1)

    from pysar._pysar_utilities import check_variable_name

    templateContents = readfile.read_template(templateFile)
    projectName = os.path.basename(templateFile).partition(".")[0]

    try:
        processProjectDir = argv[2]
        tssarProjectDir = argv[3]
    except:
        if os.getenv("PARENTDIR"):
            processProjectDir = os.getenv("SCRATCHDIR") + "/" + projectName + "/PROCESS"
            tssarProjectDir = os.getenv("SCRATCHDIR") + "/" + projectName + "/TSSAR"
        else:
            processProjectDir = os.getenv("PROCESSDIR") + "/" + projectName
            tssarProjectDir = os.getenv("TSSARDIR") + "/" + projectName
    print "\n*************** Loading Data into PySAR ****************"
    print "PROCESS directory: " + processProjectDir
    print "TSSAR   directory: " + tssarProjectDir
    if not os.path.isdir(tssarProjectDir):
        os.mkdir(tssarProjectDir)

    ########### Use defaults if paths not given in template file #########
    import h5py
    import numpy as np

    optionName = {}
    optionName["interferograms"] = "pysar.inputFiles"
    optionName["coherence"] = "pysar.corFiles"
    optionName["wrapped"] = "pysar.wrappedFiles"
    optionName["geomap"] = "pysar.geomap"
    optionName["demGeo"] = "pysar.dem.geoCoord"
    optionName["demRdr"] = "pysar.dem.radarCoord"

    try:
        igramPath = templateContents["pysar.inputFiles"]
        igramPath = check_variable_name(igramPath)
    except:
        igramPath = processProjectDir + "/DONE/IFGRAM*/filt_*.unw"
    print "Path pattern for unwrapped interferogram: " + igramPath
    # except: igramPath = os.getenv('SCRATCHDIR') + '/' + projectName + '/PROCESS/DONE/IFGRAM*/filt_*.unw'

    try:
        corPath = templateContents["pysar.corFiles"]
        corPath = check_variable_name(corPath)
    except:
        corPath = processProjectDir + "/DONE/IFGRAM*/filt_*rlks.cor"
    print "Path pattern for coherence:               " + corPath

    try:
        wrapPath = templateContents["pysar.wrappedFiles"]
        wrapPath = check_variable_name(wrapPath)
    except:
        wrapPath = processProjectDir + "/DONE/IFGRAM*/filt_*rlks.int"
    print "Path pattern for wrapped interferogram:   " + wrapPath

    # try:    demRdrPath = templateContents['pysar.dem.radarCoord'];  demRdrPath = check_variable_name(demRdrPath)
    # except:
    #  demRdrList=glob.glob(demRdrPath)

    ###########################################################################
    ######################### Unwrapped Interferograms ########################

    try:
        if os.path.isfile(tssarProjectDir + "/LoadedData.h5"):
            print "\nLoadedData.h5" + "  already exists."
            sys.exit(1)
        igramList = glob.glob(igramPath)
        igramList = sorted(igramList)
        k = "interferograms"
        check_number(k, optionName[k], igramList)  # number check
        print "loading interferograms ..."
        igramList, mode_width, mode_length = check_size(k, igramList)  # size check
        igramList = sorted(igramList)

        h5file = tssarProjectDir + "/LoadedData.h5"
        f = h5py.File(h5file, "w")
        gg = f.create_group("interferograms")
        MaskZero = np.ones([int(mode_length), int(mode_width)])
        for igram in igramList:
            if not os.path.basename(igram) in f:
                print "Adding " + igram
                group = gg.create_group(os.path.basename(igram))
                amp, unw, unwrsc = readfile.read_float32(igram)

                MaskZero *= amp

                dset = group.create_dataset(os.path.basename(igram), data=unw, compression="gzip")
                for key, value in unwrsc.iteritems():
                    group.attrs[key] = value

                d1, d2 = unwrsc["DATE12"].split("-")
                baseline_file = os.path.dirname(igram) + "/" + d1 + "_" + d2 + "_baseline.rsc"
                baseline = readfile.read_roipac_rsc(baseline_file)
                for key, value in baseline.iteritems():
                    group.attrs[key] = value
                group.attrs["PROJECT_NAME"] = projectName
                group.attrs["UNIT"] = "radian"
            else:
                print os.path.basename(h5file) + " already contains " + os.path.basename(igram)

        Mask = np.ones([int(mode_length), int(mode_width)])
        Mask[MaskZero == 0] = 0
        # gm = f.create_group('mask')
        # dset = gm.create_dataset('mask', data=Mask, compression='gzip')
        f.close()

        ############## Mask file ###############
        print "writing to Mask.h5\n"
        # Mask=np.ones([int(mode_length),int(mode_width)])
        # Mask[MaskZero==0]=0
        h5file = tssarProjectDir + "/Mask.h5"
        h5mask = h5py.File(h5file, "w")
        group = h5mask.create_group("mask")
        dset = group.create_dataset(os.path.basename("mask"), data=Mask, compression="gzip")
        for key, value in unwrsc.iteritems():
            group.attrs[key] = value
        h5mask.close()

    except:
        print "No unwrapped interferogram is loaded.\n"

    ########################################################################
    ############################# Coherence ################################
    try:
        if os.path.isfile(tssarProjectDir + "/Coherence.h5"):
            print "\nCoherence.h5" + "  already exists."
            sys.exit(1)
        corList = glob.glob(corPath)
        corList = sorted(corList)
        k = "coherence"
        check_number(k, optionName[k], corList)  # number check
        print "loading coherence files ..."
        corList, mode_width, mode_length = check_size(k, corList)  # size check
        corList = sorted(corList)

        h5file = tssarProjectDir + "/Coherence.h5"
        fcor = h5py.File(h5file, "w")
        gg = fcor.create_group("coherence")
        meanCoherence = np.zeros([int(mode_length), int(mode_width)])
        for cor in corList:
            if not os.path.basename(cor) in fcor:
                print "Adding " + cor
                group = gg.create_group(os.path.basename(cor))
                amp, unw, unwrsc = readfile.read_float32(cor)

                meanCoherence += unw
                dset = group.create_dataset(os.path.basename(cor), data=unw, compression="gzip")
                for key, value in unwrsc.iteritems():
                    group.attrs[key] = value

                d1, d2 = unwrsc["DATE12"].split("-")
                baseline_file = os.path.dirname(cor) + "/" + d1 + "_" + d2 + "_baseline.rsc"
                baseline = readfile.read_roipac_rsc(baseline_file)
                for key, value in baseline.iteritems():
                    group.attrs[key] = value
                group.attrs["PROJECT_NAME"] = projectName
                group.attrs["UNIT"] = "1"
            else:
                print os.path.basename(h5file) + " already contains " + os.path.basename(cor)
        # fcor.close()

        ########### mean coherence file ###############
        meanCoherence = meanCoherence / (len(corList))
        print "writing meanCoherence group to the coherence h5 file"
        gc = fcor.create_group("meanCoherence")
        dset = gc.create_dataset("meanCoherence", data=meanCoherence, compression="gzip")

        print "writing average_spatial_coherence.h5\n"
        h5file_CorMean = tssarProjectDir + "/average_spatial_coherence.h5"
        fcor_mean = h5py.File(h5file_CorMean, "w")
        group = fcor_mean.create_group("mask")
        dset = group.create_dataset(os.path.basename("mask"), data=meanCoherence, compression="gzip")
        for key, value in unwrsc.iteritems():
            group.attrs[key] = value
        fcor_mean.close()

        fcor.close()

    except:
        print "No correlation file is loaded.\n"

    ##############################################################################
    ########################## Wrapped Interferograms ############################

    try:
        if os.path.isfile(tssarProjectDir + "/Wrapped.h5"):
            print "\nWrapped.h5" + "  already exists."
            sys.exit(1)
        wrapList = glob.glob(wrapPath)
        wrapList = sorted(wrapList)
        k = "wrapped"
        check_number(k, optionName[k], wrapList)  # number check
        print "loading wrapped phase ..."
        wrapList, mode_width, mode_length = check_size(k, wrapList)  # size check
        wrapList = sorted(wrapList)

        h5file = tssarProjectDir + "/Wrapped.h5"
        fw = h5py.File(h5file, "w")
        gg = fw.create_group("wrapped")
        for wrap in wrapList:
            if not os.path.basename(wrap) in fw:
                print "Adding " + wrap
                group = gg.create_group(os.path.basename(wrap))
                amp, unw, unwrsc = readfile.read_complex_float32(wrap)

                dset = group.create_dataset(os.path.basename(wrap), data=unw, compression="gzip")
                for key, value in unwrsc.iteritems():
                    group.attrs[key] = value

                d1, d2 = unwrsc["DATE12"].split("-")
                baseline_file = os.path.dirname(wrap) + "/" + d1 + "_" + d2 + "_baseline.rsc"
                baseline = readfile.read_roipac_rsc(baseline_file)
                for key, value in baseline.iteritems():
                    group.attrs[key] = value
                group.attrs["PROJECT_NAME"] = projectName
                group.attrs["UNIT"] = "radian"
            else:
                print os.path.basename(h5file) + " already contains " + os.path.basename(wrap)
        fw.close()
        print "Writed " + str(len(wrapList)) + " wrapped interferograms to " + h5file + "\n"

    except:
        print "No wrapped interferogram is loaded.\n"

    ##############################################################################
    ################################# geomap.trans ###############################

    try:
        geomapPath = tssarProjectDir + "/geomap*.trans"
        geomapList = glob.glob(geomapPath)
        if len(geomapList) > 0:
            print "\ngeomap*.trans" + "  already exists."
            sys.exit(1)

        geomapPath = templateContents["pysar.geomap"]
        geomapPath = check_variable_name(geomapPath)
        geomapList = glob.glob(geomapPath)

        cpCmd = "cp " + geomapList[0] + " " + tssarProjectDir
        print cpCmd
        os.system(cpCmd)
        cpCmd = "cp " + geomapList[0] + ".rsc " + tssarProjectDir
        print cpCmd + "\n"
        os.system(cpCmd)
    except:
        # print "*********************************"
        print "no geomap file is loaded.\n"
        # print "*********************************\n"

    ##############################################################################
    ##################################  DEM  #####################################

    try:
        demRdrPath = tssarProjectDir + "/radar*.hgt"
        demRdrList = glob.glob(demRdrPath)
        if len(demRdrList) > 0:
            print "\nradar*.hgt" + "  already exists."
            sys.exit(1)

        demRdrPath = templateContents["pysar.dem.radarCoord"]
        demRdrPath = check_variable_name(demRdrPath)
        demRdrList = glob.glob(demRdrPath)

        cpCmd = "cp " + demRdrList[0] + " " + tssarProjectDir
        print cpCmd
        os.system(cpCmd)
        cpCmd = "cp " + demRdrList[0] + ".rsc " + tssarProjectDir
        print cpCmd + "\n"
        os.system(cpCmd)
    except:
        # print "*********************************"
        print "no DEM (radar coordinate) file is loaded.\n"
        # print "*********************************"

    try:
        demGeoPath = tssarProjectDir + "/*.dem"
        demGeoList = glob.glob(demGeoPath)
        if len(demGeoList) > 0:
            print "\n*.dem" + "  already exists."
            sys.exit(1)

        demGeoPath = templateContents["pysar.dem.geoCoord"]
        demGeoPath = check_variable_name(demGeoPath)
        demGeoList = glob.glob(demGeoPath)

        cpCmd = "cp " + demGeoList[0] + " " + tssarProjectDir
        print cpCmd
        os.system(cpCmd)
        cpCmd = "cp " + demGeoList[0] + ".rsc " + tssarProjectDir
        print cpCmd + "\n"
        os.system(cpCmd)
    except:
        # print "*********************************"
        print "no DEM (geo coordinate) file is loaded.\n"
Esempio n. 22
0
def main(argv):

    global outName
    parallel = 'no'

    ############### Check Inputs ###############
    if len(sys.argv)>3:
        try:
            opts, args = getopt.getopt(argv,'f:l:L:o:t:x:y:r:',['lat=','lon=','row=','col=',\
                                            'parallel','outfill=','outfill-nan','outfill-zero'])
        except getopt.GetoptError:
            print 'Error while getting args'
            Usage() ; sys.exit(1)

        for opt,arg in opts:
            if   opt == '-f':   File         = arg.split(',')
            elif opt == '-o':   outName      = arg
            elif opt == '-t':   templateFile = arg
            elif opt == '-r':   refFile      = arg
            elif opt in ['-x','--col']  :   sub_x   = [int(i)   for i in arg.split(':')];    sub_x.sort()
            elif opt in ['-y','--row']  :   sub_y   = [int(i)   for i in arg.split(':')];    sub_y.sort()
            elif opt in ['-l','--lat']  :   sub_lat = [float(i) for i in arg.split(':')];  sub_lat.sort()
            elif opt in ['-L','--lon']  :   sub_lon = [float(i) for i in arg.split(':')];  sub_lon.sort()
            elif opt in '--parallel'    :   parallel = 'yes'
            elif opt in '--outfill'     :   out_fill = float(arg)
            elif opt in '--outfill-nan' :   out_fill = np.nan
            elif opt in '--outfill-zero':   out_fill = 0.0

    elif len(sys.argv)==3:
        File         = argv[0].split(',')
        templateFile = argv[1]
    elif len(sys.argv)==2:
        if argv[0] in ['-h','--help']:  Usage(); sys.exit()
        else: print '\nERROR: A minimum of 3 inputs is needed.\n'; Usage(); sys.exit()
    else: Usage(); sys.exit(1)

    ##### Check Input file Info
    print '\n**************** Subset *********************'
    fileList = ut.get_file_list(File)
    print 'number of file: '+str(len(fileList))
    print fileList
    atr = readfile.read_attributes(fileList[0])

    if len(fileList) == 1 and parallel == 'yes':
        print 'parallel is disabled for one input file.'
        parallel = 'no'

    ################## Subset Setting ###########
    try:
        atr['X_FIRST']
        print 'geo coordinate'
    except:
        print 'radar coordinate'
    ## Read Subset Inputs
    try:
        templateFile
        template = readfile.read_template(templateFile)
    except: pass

    try:
        refFile
        atr_ref = readfile.read_attributes(refFile)
        box_ref = geo_box(atr_ref)
        lat_ref = [box_ref[3],box_ref[1]]
        lon_ref = [box_ref[0],box_ref[2]]
    except: pass

    try:
        sub_lat
        sub_lon
    except:
        try:
            sub_lat = lat_ref
            sub_lon = lon_ref
        except:
            try:
                sub = template['pysar.subset.lalo'].split(',')
                sub_lat = [float(i) for i in sub[0].split(':')];  sub_lat.sort()
                sub_lon = [float(i) for i in sub[1].split(':')];  sub_lon.sort()
            except: pass; #print 'No pysar.subset.lalo option found in template file!'

    try:
        sub_y
        sub_x
    except:
        try:
            sub = template['pysar.subset.yx'].split(',')
            sub_y = [int(i) for i in sub[0].split(':')];  sub_y.sort()
            sub_x = [int(i) for i in sub[1].split(':')];  sub_x.sort()
        except: pass; #print 'No pysar.subset.yx option found in template file!'

    ## Check Subset Inputs Existed or not
    try:     sub_y
    except:
        try: sub_x
        except:
            try: sub_lat
            except:
                try: sub_lon
                except: print 'ERROR: no subset is setted.'; Usage(); sys.exit(1)

    ##### Subset range radar to geo
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    print 'input file length: '+str(length)
    print 'input file width : '+str(width)

    try: sub_y = coord_geo2radar(sub_lat,atr,'latitude')
    except:
        try:    sub_y
        except: sub_y = [0,length]
    try: sub_x = coord_geo2radar(sub_lon,atr,'longitude')
    except:
        try:    sub_x
        except: sub_x = [0,width]

    ##### Check subset range
    try:
        out_fill
    except:
        sub_y,sub_x = check_subset_range(sub_y,sub_x,atr)
        out_fill = np.nan
        if sub_y[1]-sub_y[0] == length and sub_x[1]-sub_x[0] == width:
            print 'Input subset range == data size, no need to subset.'
            sys.exit(0)

    ################### Subset #######################
    if parallel == 'no':
        for file in fileList:
            print '-------------------------------------------'
            print 'subseting  : '+file
            try:    subset_file(file,sub_x,sub_y,out_fill,outName)
            except: subset_file(file,sub_x,sub_y,out_fill)

    else:
        print '-------------------------'
        print 'parallel subseting ...'
        print '-------------------------'
        from joblib import Parallel, delayed
        import multiprocessing
        num_cores = multiprocessing.cpu_count()
        Parallel(n_jobs=num_cores)(delayed(subset_file)(file,sub_x,sub_y,out_fill) for file in fileList)

    print 'Done.'
Esempio n. 23
0
def main(argv):
    start = time.time()
    inps = cmdLineParse()

    #########################################
    # Initiation
    #########################################
    print LOGO
    # Read template
    inps.project_name = os.path.splitext(os.path.basename(
        inps.template_file))[0]
    print 'Project name: ' + inps.project_name
    inps.template_file = os.path.abspath(inps.template_file)
    template = readfile.read_template(inps.template_file)
    for key in template.keys():
        if template[key].lower() == 'default': template[key] = 'auto'
        if template[key].lower() in ['off', 'false']: template[key] = 'no'
        if template[key].lower() in ['on', 'true']: template[key] = 'yes'
    if 'pysar.deramp' in template.keys():
        template['pysar.deramp'] = template['pysar.deramp'].lower().replace(
            '-', '_')
    if 'pysar.troposphericDelay.method' in template.keys():
        template['pysar.troposphericDelay.method'] = template[
            'pysar.troposphericDelay.method'].lower().replace('-', '_')

    # work directory
    if not inps.work_dir:
        if pysar.miami_path and 'SCRATCHDIR' in os.environ:
            inps.work_dir = os.getenv(
                'SCRATCHDIR') + '/' + inps.project_name + "/PYSAR"
            print 'Use file/dir structure in University of Miami.'+\
                  '(To turn it off, change miami_path value to False in pysar/__init__.py)'
        else:
            inps.work_dir = os.getcwd()
    else:
        inps.work_dir = os.path.abspath(inps.work_dir)

    if not os.path.isdir(inps.work_dir): os.mkdir(inps.work_dir)
    os.chdir(inps.work_dir)
    print "Go to work directory: " + inps.work_dir

    #########################################
    # Loading Data
    #########################################
    print '\n*************** Load Data ****************'
    loadCmd = 'load_data.py ' + inps.template_file + ' --dir ' + inps.work_dir
    print loadCmd
    os.system(loadCmd)

    print '--------------------------------------------'
    ## Find initial files name/path - required files
    # 1. Unwrapped interferograms
    inps.ifgram_file = 'unwrapIfgram.h5'
    try:
        inps.ifgram_file = glob.glob(inps.work_dir + '/' + inps.ifgram_file)[0]
    except:
        inps.ifgram_file = None
    if inps.ifgram_file: print 'Unwrapped interferograms: ' + inps.ifgram_file
    else: sys.exit('\nERROR: No interferograms file found!\n')

    # 2. Mask
    inps.mask_file = 'Mask.h5'
    try:
        inps.mask_file = glob.glob(inps.work_dir + '/' + inps.mask_file)[0]
    except:
        inps.mask_file = None
    if not inps.mask_file:
        print 'No mask file found. Creating one using non-zero pixels in file: ' + inps.ifgram_file
        inps.mask_file = ut.nonzero_mask(inps.ifgram_file, inps.mask_file)
    print 'Mask: ' + inps.mask_file

    ## Find initial files name/path - recommended files (None if not found)
    # 3. Spatial coherence for each interferograms
    inps.coherence_file = 'coherence.h5'
    try:
        inps.coherence_file = glob.glob(inps.work_dir + '/' +
                                        inps.coherence_file)[0]
    except:
        inps.coherence_file = None
    if inps.coherence_file: print 'Coherences: ' + inps.coherence_file
    else:
Esempio n. 24
0
def main(argv):

    ########################## Check Inputs ################################################
    ## Default value
    Masking   = 'no'
    save_mask = 'no'
  
    if len(sys.argv) > 4:
        try: opts, args = getopt.getopt(argv,'h:f:m:o:s:t:y:',['help','save-mask'])
        except getopt.GetoptError:  print 'Error while getting args!\n';  Usage(); sys.exit(1)
  
        for opt,arg in opts:
            if   opt in ['-h','--help']:    Usage(); sys.exit()
            elif opt in '-f':    File     = arg
            elif opt in '-m':    maskFile = arg
            elif opt in '-o':    outName  = arg
            elif opt in '-s':    surfType = arg.lower()
            elif opt in '-t':    templateFile = arg
            elif opt in '-y':    ysub = [int(i) for i in arg.split(',')]
            elif opt in '--save-mask'  :    save_mask = 'yes'
  
    elif len(sys.argv) in [3,4]:
        File          = argv[0]
        surfType      = argv[1].lower()
        try: maskFile = argv[2]
        except: pass
    else: Usage(); sys.exit(1)
  
    print '\n*************** Phase Ramp Removal ***********************'
    ## Multiple Surfaces
    try:
        ysub
        if not len(ysub)%2 == 0:
            print 'ERROR: -y input has to have even length!'
            sys.exit(1)
        surfNum = len(ysub)/2
    except:
        surfNum = 1
    print 'phase ramp number: '+str(surfNum)
  
    ## Tempate File
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass

    try:        surfType
    except:
        try:    surfType = templateContents['pysar.orbitError.method']
        except: surfType = 'plane'; print 'No ramp type input, use plane as default'
    print 'phase ramp type  : '+surfType
  
    ## Input File(s)
    fileList = glob.glob(File)
    fileList = sorted(fileList)
    print 'input file(s): '+str(len(fileList))
    print fileList
  
    atr = readfile.read_attributes(fileList[0])
    length = int(atr['FILE_LENGTH'])
    width  = int(atr['WIDTH'])

    ## Output File(s)
    if   len(fileList) >  1:    outName = ''
    elif len(fileList) == 0:    print 'ERROR: Cannot find input file(s)!';  sys.exit(1)
    else:    ## Customized output name only works for single file input
Esempio n. 25
0
def main(argv):

    method = 'triangular_consistency'  ## or 'bonding_point'
    ramp_type = 'plane'
    save_rampCor = 'yes'
    plot_bonding_points = 'yes'

    ##### Check Inputs
    if len(sys.argv) > 2:
        try:
            opts, args = getopt.getopt(argv, 'h:f:m:x:y:o:t:',
                                       ['ramp=', 'no-ramp-save'])
        except getopt.GetoptError:
            print 'Error while getting args'
            usage()
            sys.exit(1)

        for opt, arg in opts:
            if opt in ['-h', '--help']:
                usage()
                sys.exit()
            elif opt in '-f':
                File = arg
            elif opt in '-m':
                maskFile = arg
            elif opt in '-o':
                outName = arg
            elif opt in '-x':
                x = [int(i) for i in arg.split(',')]
                method = 'bonding_point'
            elif opt in '-y':
                y = [int(i) for i in arg.split(',')]
                method = 'bonding_point'
            elif opt in '-t':
                templateFile = arg
            elif opt in '--ramp':
                ramp_type = arg.lower()
            elif opt in '--no-ramp-save':
                save_rampCor = 'no'

    elif len(sys.argv) == 2:
        if argv[0] in ['-h', '--help']:
            usage()
            sys.exit()
        elif os.path.isfile(argv[0]):
            File = argv[0]
            maskFile = argv[1]
        else:
            print 'Input file does not existed: ' + argv[0]
            sys.exit(1)

    else:
        usage()
        sys.exit(1)

    ##### Check template file
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except:
        pass

    try:
        yx = [
            int(i) for i in templateContents['pysar.unwrapError.yx'].split(',')
        ]
        x = yx[1::2]
        y = yx[0::2]
        method = 'bonding_point'
    except:
        pass

    ##### Read Mask File
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:
        maskFile
    except:
        try:
            maskFile = templateContents['pysar.mask.file']
        except:
            if os.path.isfile('Modified_Mask.h5'):
                maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):
                maskFile = 'Mask.h5'
            else:
                print 'No mask found!'
                sys.exit(1)
    try:
        Mask, Matr = readfile.read(maskFile)
        print 'mask: ' + maskFile
    except:
        print 'Can not open mask file: ' + maskFile
        sys.exit(1)

    ##### Output file name
    ext = os.path.splitext(File)[1]
    try:
        outName
    except:
        outName = File.split('.')[0] + '_unwCor' + ext

    print '\n**************** Unwrapping Error Correction ******************'

    ####################  Triangular Consistency (Phase Closure)  ####################
    if method == 'triangular_consistency':
        print 'Phase unwrapping error correction using Triangular Consistency / Phase Closure'

        h5file = h5py.File(File)
        ifgramList = h5file['interferograms'].keys()
        sx = int(h5file['interferograms'][ifgramList[0]].attrs['WIDTH'])
        sy = int(h5file['interferograms'][ifgramList[0]].attrs['FILE_LENGTH'])
        curls, Triangles, C = ut.get_triangles(h5file)
        A, B = ut.design_matrix(h5file)
        ligram, lv = np.shape(B)
        lcurls = np.shape(curls)[0]
        print 'Number of all triangles: ' + str(lcurls)
        print 'Number of interferograms: ' + str(ligram)
        #print curls

        curlfile = 'curls.h5'
        if not os.path.isfile(curlfile):
            ut.generate_curls(curlfile, h5file, Triangles, curls)

        thr = 0.50
        curls = np.array(curls)
        n1 = curls[:, 0]
        n2 = curls[:, 1]
        n3 = curls[:, 2]

        numPixels = sy * sx
        print 'reading interferograms...'
        data = np.zeros((ligram, numPixels), np.float32)
        for ni in range(ligram):
            dset = h5file['interferograms'][ifgramList[ni]].get(ifgramList[ni])
            d = dset[0:dset.shape[0], 0:dset.shape[1]]
            data[ni] = d.flatten(1)

        print np.shape(data)
        print 'reading curls ...'
        h5curl = h5py.File(curlfile)
        curlList = h5curl['interferograms'].keys()
        curlData = np.zeros((lcurls, numPixels), np.float32)
        for ni in range(lcurls):
            dset = h5curl['interferograms'][curlList[ni]].get(curlList[ni])
            d = dset[0:dset.shape[0], 0:dset.shape[1]]
            curlData[ni] = d.flatten(1)
        pi = np.pi
        EstUnwrap = np.zeros((ligram, numPixels), np.float32)

        #try:
        #    maskFile=argv[1]
        #    h5Mask=h5py.File(maskFile)
        #    dset = h5Mask['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        #except:
        #    dset = h5file['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]

        Mask = Mask.flatten(1)

        for ni in range(numPixels):
            #dU = np.zeros([ligram,1])
            #print np.shape(dU)
            #print np.shape(data[:,ni])

            if Mask[ni] == 1:
                dU = data[:, ni]
                #nan_ndx = dataPoint == 0.
                unwCurl = np.array(curlData[:, ni])
                #print unwCurl

                ind = np.abs(unwCurl) >= thr
                N1 = n1[ind]
                N2 = n2[ind]
                N3 = n3[ind]
                indC = np.abs(unwCurl) < thr
                Nc1 = n1[indC]
                Nc2 = n2[indC]
                Nc3 = n3[indC]

                N = np.hstack([N1, N2, N3])
                UniN = np.unique(N)
                Nc = np.hstack([Nc1, Nc2, Nc3])
                UniNc = np.unique(Nc)

                inter = list(set(UniNc) & set(UniN))  # intersetion
                UniNc = list(UniNc)
                for x in inter:
                    UniNc.remove(x)

                D = np.zeros([len(UniNc), ligram])
                for i in range(len(UniNc)):
                    D[i, UniNc[i]] = 1

                AAA = np.vstack([-2 * pi * C, D])
                #AAA1=np.hstack([AAA,np.zeros([AAA.shape[0],lv])])
                #AAA2=np.hstack([-2*pi*np.eye(ligram),B])
                #AAAA=np.vstack([AAA1,AAA2])
                AAAA = np.vstack([AAA, 0.25 * np.eye(ligram)])

                #print '************************'
                #print np.linalg.matrix_rank(C)
                #print np.linalg.matrix_rank(AAA)
                #print np.linalg.matrix_rank(AAAA)
                #print '************************'

                #LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0]))# + list(dU)
                #ind=np.isnan(AAA)
                #M1=pinv(AAA)
                #M=np.dot(M1,LLL)
                #EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi

                ##########
                # with Tikhonov regularization:
                AAAA = np.vstack([AAA, 0.25 * np.eye(ligram)])
                LLL = list(np.dot(C, dU)) + list(np.zeros(
                    np.shape(UniNc)[0])) + list(np.zeros(ligram))
                ind = np.isnan(AAAA)
                M1 = pinv(AAAA)
                M = np.dot(M1, LLL)
                EstUnwrap[:, ni] = np.round(M[0:ligram]) * 2.0 * np.pi
                #print M[0:ligram]
                #print np.round(M[0:ligram])

            else:
                EstUnwrap[:, ni] = np.zeros([ligram])
                if not np.remainder(ni, 10000):
                    print 'Processing point: %7d of %7d ' % (ni, numPixels)

        ##### Output
        dataCor = data + EstUnwrap
        unwCorFile = File.replace('.h5', '') + '_unwCor.h5'
        print 'writing >>> ' + unwCorFile
        h5unwCor = h5py.File(unwCorFile, 'w')
        gg = h5unwCor.create_group('interferograms')
        for i in range(ligram):
            group = gg.create_group(ifgramList[i])
            dset = group.create_dataset(ifgramList[i],
                                        data=np.reshape(
                                            dataCor[i, :], [sx, sy]).T,
                                        compression='gzip')
            for key, value in h5file['interferograms'][
                    ifgramList[i]].attrs.iteritems():
                group.attrs[key] = value

        try:
            MASK = h5file['mask'].get('mask')
            gm = h5unwCor.create_group('mask')
            dset = gm.create_dataset('mask', data=MASK, compression='gzip')
        except:
            pass

        h5unwCor.close()
        h5file.close()
        h5curl.close()

    ####################  Bonding Points (Spatial Continuity)  ####################
    elif method == 'bonding_point':
        print 'Phase unwrapping error correction using Bonding Points / Spatial Continuity'

        ##### Read Bridge Points Info
        try:
            x
            y
            if len(x) != len(y) or np.mod(len(x), 2) != 0:
                print 'Wrong number of bridge points input: ' + str(
                    len(x)) + ' for x, ' + str(len(y)) + ' for y'
                usage()
                sys.exit(1)
        except:
            print 'Error in reading bridge points info!'
            usage()
            sys.exit(1)
        for i in range(0, len(x)):
            if Mask[y[i], x[i]] == 0:
                print '\nERROR: Connecting point (' + str(y[i]) + ',' + str(
                    x[i]) + ') is out of masked area! Select them again!\n'
                sys.exit(1)

        print 'Number of bonding point pairs: ' + str(len(x) / 2)
        print 'Bonding points coordinates:\nx: ' + str(x) + '\ny: ' + str(y)

        ## Plot Connecting Pair of Points
        if plot_bonding_points == 'yes':
            point_yx = ''
            line_yx = ''
            n_bridge = len(x) / 2
            for i in range(n_bridge):
                pair_yx = str(y[2 * i]) + ',' + str(x[2 * i]) + ',' + str(
                    y[2 * i + 1]) + ',' + str(x[2 * i + 1])
                if not i == n_bridge - 1:
                    point_yx += pair_yx + ','
                    line_yx += pair_yx + ';'
                else:
                    point_yx += pair_yx
                    line_yx += pair_yx

            try:
                plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                           '" --nodisplay -o bonding_points.png -f '+maskFile
                print plot_cmd
                os.system(plot_cmd)
            except:
                pass

        ##### Ramp Info
        ramp_mask = Mask == 1
        print 'estimate phase ramp during the correction'
        print 'ramp type: ' + ramp_type
        if save_rampCor == 'yes':
            outName_ramp = os.path.basename(outName).split(
                ext)[0] + '_' + ramp_type + ext

        ########## PySAR ##########
        if ext == '.h5':
            ##### Read
            try:
                h5file = h5py.File(File, 'r')
            except:
                print 'ERROR: Cannot open input file: ' + File
                sys.exit(1)
            k = h5file.keys()
            if 'interferograms' in k:
                k[0] = 'interferograms'
                print 'Input file is ' + k[0]
            else:
                print 'Input file - ' + File + ' - is not interferograms.'
                usage()
                sys.exit(1)
            igramList = sorted(h5file[k[0]].keys())

            #### Write
            h5out = h5py.File(outName, 'w')
            gg = h5out.create_group(k[0])
            print 'writing >>> ' + outName

            if save_rampCor == 'yes':
                h5out_ramp = h5py.File(outName_ramp, 'w')
                gg_ramp = h5out_ramp.create_group(k[0])
                print 'writing >>> ' + outName_ramp

            ##### Loop
            print 'Number of interferograms: ' + str(len(igramList))
            for igram in igramList:
                print igram
                data = h5file[k[0]][igram].get(igram)[:]

                data_ramp, ramp = rm.remove_data_surface(
                    data, ramp_mask, ramp_type)
                #ramp = data_ramp - data
                data_rampCor = phase_bonding(data_ramp, Mask, x, y)
                dataCor = data_rampCor - ramp

                group = gg.create_group(igram)
                dset = group.create_dataset(igram,
                                            data=dataCor,
                                            compression='gzip')
                for key, value in h5file[k[0]][igram].attrs.iteritems():
                    group.attrs[key] = value

                if save_rampCor == 'yes':
                    group_ramp = gg_ramp.create_group(igram)
                    dset = group_ramp.create_dataset(igram,
                                                     data=data_rampCor,
                                                     compression='gzip')
                    for key, value in h5file[k[0]][igram].attrs.iteritems():
                        group_ramp.attrs[key] = value

            try:
                mask = h5file['mask'].get('mask')
                gm = h5out.create_group('mask')
                dset = gm.create_dataset('mask',
                                         data=mask[0:mask.shape[0],
                                                   0:mask.shape[1]],
                                         compression='gzip')
            except:
                print 'no mask group found.'

            h5file.close()
            h5out.close()
            if save_rampCor == 'yes':
                h5out_ramp.close()

        ########## ROI_PAC ##########
        elif ext == '.unw':
            print 'Input file is ' + ext
            a, data, atr = readfile.read_float32(File)

            data_ramp, ramp = rm.remove_data_surface(data, ramp_mask,
                                                     ramp_type)
            #ramp = data_ramp - data
            data_rampCor = phase_bonding(data_ramp, Mask, x, y)
            dataCor = data_rampCor - ramp

            writefile.write(dataCor, atr, outName)
            if save_rampCor == 'yes':
                writefile.write(data_rampCor, atr, outName_ramp)

        else:
            print 'Un-supported file type: ' + ext
            usage()
            sys.exit(1)
Esempio n. 26
0
def load_data_from_template(inps):
    '''Load dataset for PySAR time series using input template'''
    ##------------------------------------ Read Input Path -------------------------------------##
    # Initial value
    inps.unw = None
    inps.cor = None
    #inps.int = None
    inps.lut = None
    inps.dem_radar = None
    inps.dem_geo = None

    # 1.1 Read template contents (support multiple template files input)
    inps.template_file = [os.path.abspath(i) for i in inps.template_file]
    # Move default template file pysarApp_template.txt to the end of list, so that it has highest priority
    default_template_file = [i for i in inps.template_file if 'pysarApp' in i]
    if default_template_file:
        inps.template_file.remove(default_template_file[0])
        inps.template_file.append(default_template_file[0])
    template = dict()
    # Read file by file
    for File in inps.template_file:
        temp_dict = readfile.read_template(File)
        for key, value in temp_dict.iteritems():
            temp_dict[key] = ut.check_variable_name(value)
        template.update(temp_dict)
    keyList = template.keys()

    # Project Name
    if not inps.project_name:
        inps.template_filename = [
            os.path.basename(i) for i in inps.template_file
        ]
        try:
            inps.template_filename.remove('pysarApp_template.txt')
        except:
            pass
        if inps.template_filename:
            inps.project_name = os.path.splitext(inps.template_filename[0])[0]

    for key in [
            'processor', 'processing_software', 'unavco.processing_software',
            'pysar.insarProcessor'
    ]:
        if key in keyList:
            value = template[key]
            if value == 'auto':
                inps.insarProcessor = 'roipac'
            else:
                inps.insarProcessor = value

    print '--------------------------------------------'
    print 'InSAR processing software: ' + inps.insarProcessor
    if 'pysar.unwrapFiles' in keyList: inps.unw = template['pysar.unwrapFiles']
    if 'pysar.corFiles' in keyList: inps.cor = template['pysar.corFiles']
    if 'pysar.lookupFile' in keyList: inps.lut = template['pysar.lookupFile']
    if 'pysar.demFile.radarCoord' in keyList:
        inps.dem_radar = template['pysar.demFile.radarCoord']
    if 'pysar.demFile.geoCoord' in keyList:
        inps.dem_geo = template['pysar.demFile.geoCoord']

    # Check existed single dataset files
    inps_tmp = argparse.Namespace()
    inps_tmp = ut.check_loaded_dataset(inps.timeseries_dir,
                                       inps_tmp,
                                       print_msg=False)
    if (not inps.lut or inps.lut == 'auto') and inps_tmp.lookup_file:
        inps.lut = inps_tmp.lookup_file
    if (not inps.dem_radar
            or inps.dem_radar == 'auto') and inps_tmp.dem_radar_file:
        inps.dem_radar = inps_tmp.dem_radar_file
    if (not inps.dem_geo or inps.dem_geo == 'auto') and inps_tmp.dem_geo_file:
        inps.dem_geo = inps_tmp.dem_geo_file

    # 1.2 Auto Setting for Geodesy Lab - University of Miami
    if pysar.miami_path and 'SCRATCHDIR' in os.environ and inps.project_name:
        inps = auto_path_miami(inps, template)

    # 1.3 get snap_connect.byt path if .unw is input
    if inps.unw:
        inps.snap_connect = inps.unw.split('.unw')[0] + '_snap_connect.byt'
    else:
        inps.snap_connect = None

    # PYSAR directory
    if not inps.timeseries_dir:
        inps.timeseries_dir = os.getcwd()
    if not os.path.isdir(inps.timeseries_dir):
        os.makedirs(inps.timeseries_dir)
    #print "PySAR working directory: "+inps.timeseries_dir

    # TEMPLATE file directory (to support relative path input)
    inps.template_dir = os.path.dirname(inps.template_file[-1])
    os.chdir(inps.template_dir)
    print 'Go to TEMPLATE directory: ' + inps.template_dir
    print 'unwrapped interferograms to load: ' + str(inps.unw)
    #print 'wrapped   interferograms to load: '+str(inps.int)
    print 'spatial coherence  files to load: ' + str(inps.cor)
    print 'lookup table        file to load: ' + str(inps.lut)
    print 'DEM file in radar  coord to load: ' + str(inps.dem_radar)
    print 'DEM file in geo    coord to load: ' + str(inps.dem_geo)

    ##------------------------------------ Loading into HDF5 ---------------------------------------##
    # required - unwrapped interferograms
    inps.ifgram_file = load_file(inps.unw,
                                 vars(inps),
                                 file_type='interferograms')
    inps.coherence_file = load_file(inps.cor,
                                    vars(inps),
                                    file_type='coherence')
    #inps.wrap_ifgram_file = load_file(inps.int, vars(inps), file_type='wrapped')
    if inps.snap_connect:
        inps.snap_connect_file = load_file(inps.snap_connect, vars(inps))

    # optional but recommend files - single_dataset file
    inps.lookup_file = load_file(inps.lut, vars(inps), file_type='geometry')
    inps.dem_radar_file = load_file(inps.dem_radar,
                                    vars(inps),
                                    file_type='dem')
    inps.dem_geo_file = load_file(inps.dem_geo, vars(inps), file_type='dem')

    os.chdir(inps.timeseries_dir)
    print 'Go back to PYSAR directory: ' + inps.timeseries_dir
    return inps
Esempio n. 27
0
def read_template2inps(template_file, inps):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.networkInversion.'

    key = prefix+'residualNorm'
    if key in key_list and template[key] in ['L1']:
        inps.resid_norm = 'L1'
    else:
        inps.resid_norm = 'L2'

    key = prefix+'coherenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.coherence_file = 'coherence.h5'
        elif value in ['no']:
            inps.coherence_file = None
        else:
            inps.coherence_file = value

    key = prefix+'weightFunc'
    if key in key_list:
        value = template[key]
        if value in ['auto','no']:
            inps.weight_function = 'no'
        elif value.startswith(('lin','coh','cor')):
            inps.weight_function = 'linear'
        elif value.startswith('var'):
            inps.weight_function = 'variance'
        elif value.startswith(('fim','fisher')):
            inps.weight_function = 'fim'
        else:
            print 'Un-recognized input for %s = %s' % (key, value)
            sys.exit(-1)

    key = prefix+'waterMaskFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            maskFile = None
            #atr = readfile.read_attribute(inps.ifgram_file)
            #if 'Y_FIRST' in atr.keys():
            #    maskFile = 'geometryGeo.h5'
            #else:
            #    maskFile = 'geometryRadar.h5'
        else:
            maskFile = value
            try:
                data = readfile.read(maskFile, epoch='mask')[0]
                inps.water_mask_file = maskFile
            except:
                print 'Can not found mask dataset in file: %s' % (maskFile)
                print 'Ignore this input water mask file option and continue.'

    return inps
Esempio n. 28
0
def main(argv):

    if len(sys.argv)>2:
        try:   opts, args = getopt.getopt(argv,"f:E:m:M:h:o:t:")
        except getopt.GetoptError:   Usage() ; sys.exit(1)
    
        for opt,arg in opts:
            if   opt == '-f':    timeSeriesFile   = arg
            elif opt == '-E':    datesNot2include = arg.replace(' ','').split(',')
            elif opt == '-m':    minDate          = arg
            elif opt == '-M':    maxDate          = arg
            elif opt == '-o':    outName          = arg
            elif opt == '-t':    templateFile     = arg
  
    elif len(sys.argv)==2:
        if   argv[0]=='-h':  Usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):   timeSeriesFile = argv[0]
        else:  Usage(); sys.exit(1)
    else:  Usage(); sys.exit(1)    
  
    ##### Read excluded date list Input
    try:  datesNot2include
    except:
        try:
            templateFile
            templateContents = readfile.read_template(templateFile)
            datesNot2include = templateContents['pysar.drop.date'].replace(' ','').split(',')
        except: pass

    ##############################################################
    print '\n********** Inversion: Time Series to Velocity ***********'
    atr = readfile.read_attributes(timeSeriesFile)
    k = atr['FILE_TYPE']
    print 'input file: '+k
    if not k == 'timeseries':  print 'Input file is not timeseries!'; sys.exit(1)
    print "Loading time series file: " + timeSeriesFile
    h5timeseries = h5py.File(timeSeriesFile)
    dateList1 = h5timeseries[k].keys()
    dateList1 = sorted(dateList1)
  
    ##############################################################
    print '--------------------------------------------'
    print 'Dates from input file: '+str(len(dateList1))
    print dateList1
  
    try:
        datesNot2include
        if os.path.isfile(datesNot2include[0]):
            try:  datesNot2include = ptime.read_date_list(datesNot2include[0])
            except:  print 'Can not read date list file: '+datesNot2include[0]
        print '--------------------------------------------'
        print 'Date excluded: '+str(len(datesNot2include))
        print datesNot2include
    except:
        datesNot2include=[]
  
    try:
        minDate
        minDateyy=yyyymmdd2years(minDate)
        print 'minimum date: '+minDate
        for date in dateList1:
            yy=yyyymmdd2years(date)
            if yy < minDateyy:
                print '  remove date: '+date
                datesNot2include.append(date)
    except: pass
  
    try:
        maxDate
        maxDateyy=yyyymmdd2years(maxDate) 
        print 'maximum date: '+maxDate
        for date in dateList1:
            yy=yyyymmdd2years(date)
            if yy > maxDateyy:
                print '  remove date: '+date
                datesNot2include.append(date)
    except: pass

    try:
        dateList=[]
        for date in dateList1:
            if date not in datesNot2include:
                dateList.append(date)
    except:  pass

    print '--------------------------------------------'
    if len(dateList) == len(dateList1):
        print 'using all dates to calculate the vlocity'
    else:
        print 'Dates used to estimate the velocity: '+str(len(dateList))
        print dateList
    print '--------------------------------------------'

    ##############################################################
    dateIndex={}
    for ni in range(len(dateList)):
        dateIndex[dateList[ni]]=ni
    tbase=[]
    d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5])
    
    for ni in range(len(dateList)):
        d2 = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5])
        diff = d2-d1
        tbase.append(diff.days)
  
    dates=[]
    for ni in range(len(dateList)):
        d = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5])
        dates.append(d)
  
    ###########################################
    print 'Calculating Velocity'
  
    datevector=[]
    for i in range(len(dates)):
        datevector.append(np.float(dates[i].year) + np.float(dates[i].month-1)/12 + np.float(dates[i].day-1)/365)
  
    B=np.ones([len(datevector),2])
    B[:,0]=datevector
    #B1 = np.linalg.pinv(B)
    B1 = np.dot(np.linalg.inv(np.dot(B.T,B)),B.T)
    B1 = np.array(B1,np.float32)

    #########################################
    width  = int(atr['WIDTH'])
    length = int(atr['FILE_LENGTH'])
    lt     = len(dateList)
    timeseries = np.zeros((lt,length,width),np.float32)
    for date in dateList:
        timeseries[dateIndex[date]] = h5timeseries[k].get(date)
  
    numpixels=length*width
    
    Data=np.zeros([lt,numpixels])
    for i in range(lt):
        Data[i,:]=np.reshape(timeseries[i],[1,numpixels])
  
    x=np.dot(B1,Data)
    velocity=np.reshape(x[0,:],[length,width])

    #####################################################
    print 'Calculating rmse'
    Data_linear=np.dot(B,x)
    rmse=np.reshape(np.sqrt((np.sum((Data_linear-Data)**2,0))/lt),[length,width])
    # se=np.reshape((np.sum(np.abs(Data_linear-Data),0)/lt),[length,width])
    # rmse=np.reshape((np.sum((Data_linear-Data)**2,0))/lt,[length,width])
    ######################################################
    print 'Calculating the standard deviation of the estimated velocities'
    residual=Data_linear-Data
    s1=np.sqrt(np.sum(residual**2,0)/(lt-2))
    s2=np.sqrt(np.sum((datevector-np.mean(datevector))**2))
    se=np.reshape(s1/s2,[length,width])
    ######################################################
     
    # SSt=np.sum((Data-np.mean(Data,0))**2,0)
    # SSres=np.sum(residual**2,0)
    # SS_REG=SSt-SSres
    # Rsquared=np.reshape(SS_REG/SSt,[length,width])
    ######################################################  
    # covariance of the velocities
  
    ##### Output File Name
    try:    outName
    except:
        if not datesNot2include == []: outName = 'velocity_ex.h5'
        else:                          outName = 'velocity.h5'
    outName_rmse='rmse_'+outName
    outName_se='std_'+outName
    outName_Rsquared='R2_'+outName
  
    #####################################
    print '--------------------------------------'
    print 'writing to '+outName
    h5velocity = h5py.File(outName,'w')
    group=h5velocity.create_group('velocity')
    dset = group.create_dataset('velocity', data=velocity, compression='gzip')
    group.attrs['date1'] = datevector[0]
    group.attrs['date2'] = datevector[lt-1]
    
    for key , value in atr.iteritems():
        group.attrs[key]=value
    h5velocity.close()  
  
    #####################################
    print 'writing to '+outName_rmse
    h5file = outName_rmse
    h5rmse = h5py.File(h5file,'w')
    group=h5rmse.create_group('rmse')
    dset = group.create_dataset(os.path.basename('rmse'), data=rmse, compression='gzip')
    group.attrs['date1'] = datevector[0]
    group.attrs['date2'] = datevector[lt-1]
  
    for key , value in atr.iteritems():
        group.attrs[key]=value  

    #####################################
    print 'writing to '+outName_se
    h5se = h5py.File(outName_se,'w')
    group=h5se.create_group('rmse')
    dset = group.create_dataset('rmse', data=se, compression='gzip')
    group.attrs['date1'] = datevector[0]
    group.attrs['date2'] = datevector[lt-1]
  
    for key , value in atr.iteritems():
        group.attrs[key]=value
  
    # print 'writing to '+outName_Rsquared
    # h5rsquared = h5py.File(outName_Rsquared,'w')
    # group=h5rsquared.create_group('rmse')
    # dset = group.create_dataset('rmse', data=Rsquared, compression='gzip')
    # group.attrs['date1'] = datevector[0]
    # group.attrs['date2'] = datevector[lt-1]
  
  
    # h5rsquared.close()
    h5se.close()
    h5rmse.close()
    h5timeseries.close()
    print 'Done.'
Esempio n. 29
0
def main(argv):

  lineWidth   = 2
  fontSize    = 12
  markerColor = 'orange'
  markerSize  = 16
  networkDisplay = 'no'

  if len(sys.argv)>2:

    try:
      opts, args = getopt.getopt(argv,"h:f:C:s:w:m:c:t:b:d:l:n:N:T:")
    except getopt.GetoptError:
      Usage() ; sys.exit(1)

    for opt,arg in opts:
      if opt in ("-h","--help"):
        Usage();  sys.exit()
      elif opt == '-f':        igramsFile     = arg
      elif opt == '-C':        corFile        = arg
      elif opt == '-s':        fontSize       = int(arg)
      elif opt == '-w':        lineWidth      = int(arg)
      elif opt == '-m':        markerSize     = int(arg)
      elif opt == '-c':        markerColor    = arg
      elif opt == '-t':        temp_thr       = float(arg)
      elif opt == '-b':        base_thr       = float(arg)
      elif opt == '-d':        dates2Rmv      = arg
      elif opt == '-l':        ifgrams_to_rmv = arg
      elif opt == '-n':        networkDisplay = arg
      elif opt == '-N':        ifgrams_Number_to_rmv = arg.split()
      elif opt == '-T':        templateFile   = arg

    try:  igramsFile
    except:  Usage() ; sys.exit(1)

  elif len(sys.argv)==2:
    igramsFile = argv[0]
    networkDisplay = 'yes'
  else:   Usage() ; sys.exit(1)

  ## display network for modification, if no other limit setted
  try:
    temp_thr
    base_trh
    dates2Rmv
    ifgrams_to_rmv
    ifgrams_Number_to_rmv
    networkDisplay = 'yes'
  except: pass

###########################################################
  h5file = h5py.File(igramsFile)
  if h5file.keys()[0] != 'interferograms':
      print 'Input file should be interferograms'
      Usage() ; sys.exit(1)
  ifgramList=h5file['interferograms'].keys()

  try:     ifgrams_to_rmv
  except:  ifgrams_to_rmv=[]

###########################################################

  #####  T - templateFile, pysar.dropIfgIndex
  try:
    templateFile
    import pysar._readfile as readfile
    template = readfile.read_template(templateFile)
    drop_ifg_index = template['pysar.dropIfgIndex'].split(',')
    print 'drop interferogram index:'
    print drop_ifg_index
    try:    ifgrams_Number_to_rmv
    except: ifgrams_Number_to_rmv = []
    for index in drop_ifg_index:
       index_temp = [int(i) for i in index.split(':')];    index_temp.sort()
       if   len(index_temp)==2:
           for j in range(index_temp[0],index_temp[1]+1):  ifgrams_Number_to_rmv.append(str(j))
       elif len(index_temp)==1:                            ifgrams_Number_to_rmv.append(index)
       else: print 'Unrecoganized input: '+index
  except: pass

  #####  N - interferogram number list
  try:
    for i in ifgrams_Number_to_rmv:
       print i+'    '+ifgramList[int(i)]
       ifgrams_to_rmv.append(ifgramList[int(i)])
  except: pass

  #####  b - perpendicular baseline limit
  try:
    base_thr
    print 'interferograms with the spatial baseline longer than '+ str(base_thr)+' m is removed'
    for ifgram in  ifgramList:
       Baseline = (float(h5file['interferograms'][ifgram].attrs['P_BASELINE_BOTTOM_HDR'])+\
                   float(h5file['interferograms'][ifgram].attrs['P_BASELINE_TOP_HDR']))/2
       if abs(Baseline) > base_thr:
         if not ifgram in ifgrams_to_rmv:   ifgrams_to_rmv.append(ifgram)
  except:    print 'No Spatial Baseline threshold applied'

  ##### d - dates to remove
  try:
    dates2Rmv
    print 'interferograms with any of following dates will be removed: '+ dates2Rmv
    for ifgram in  ifgramList:
      date1,date2 = h5file['interferograms'][ifgram].attrs['DATE12'].split('-')
      if (date1 in dates2Rmv) or (date2 in dates2Rmv):
         if not ifgram in ifgrams_to_rmv:   ifgrams_to_rmv.append(ifgram)
  except:   print 'No specific dates selected to remove'

  ##### t - temporal baseline limit
  tbase,dateList,dateDict,dateList1=ut.date_list(h5file)
  try:
    temp_thr
    print 'Applying the temporal baseline threshold with threshold of '+str(temp_thr)+' days'
    for ifgram in  ifgramList:
       date1,date2 = h5file['interferograms'][ifgram].attrs['DATE12'].split('-')      
       ind1 = dateList1.index(date1)
       ind2 = dateList1.index(date2)
       dt=tbase[ind2]-tbase[ind1]
       if dt>temp_thr:
          if not ifgram in ifgrams_to_rmv:
            ifgrams_to_rmv.append(ifgram)
  except:
    print 'No Temporal Baseline threshold applied'

############################################################
############################################################
  if networkDisplay=='yes':
  
    tbase,dateList,dateDict,dateList1=ut.date_list(h5file)
    dateIndex={}
    for ni in range(len(dateList)):
      dateIndex[dateList[ni]]=ni
    tbase=[]
    d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5])

    for ni in range(len(dateList)):
      d2 = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5])
      diff = d2-d1
      tbase.append(diff.days)

    dates=[]
    for ni in range(len(dateList)):
      d = datetime.datetime(*time.strptime(dateList[ni],"%Y%m%d")[0:5])
      dates.append(d)

    datevector=[]
    for i in range(len(dates)):
      datevector.append(np.float(dates[i].year) + np.float(dates[i].month-1)/12 + np.float(dates[i].day-1)/365)
    datevector2=[round(i,2) for i in datevector]

##################################################  
    Bp = ut.Baseline_timeseries(igramsFile)
#############################################################
 
    ifgramList = h5file['interferograms'].keys()
    igram_pairs=np.zeros([len(ifgramList),2],np.int)
    i=0
    for ifgram in  ifgramList:
      date1,date2 = h5file['interferograms'][ifgram].attrs['DATE12'].split('-')
      igram_pairs[i][0]=dateList1.index(date1)
      igram_pairs[i][1]=dateList1.index(date2)
      i=i+1

############################################################
    import matplotlib.pyplot as plt
    fig1 = plt.figure(1)
    ax1=fig1.add_subplot(111)

    ax1.cla()
    # ax1.plot(dates,Bp, 'o',ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    print tbase
    ax1.plot(tbase,Bp, 'o',ms=markerSize, lw=lineWidth, alpha=0.7, mfc=markerColor)
    for ni in range(len(ifgramList)):
      ax1.plot(array([tbase[igram_pairs[ni][0]],tbase[igram_pairs[ni][1]]]),\
               array([Bp[igram_pairs[ni][0]],Bp[igram_pairs[ni][1]]]),'k',lw=4) 
    # ax1.fmt_xdata = DateFormatter('%Y-%m-%d %H:%M:%S')
    ax1.set_ylabel('Bperp [m]',fontsize=fontSize)
    ax1.set_xlabel('Time [years]',fontsize=fontSize)
    ts=datevector[0]+0.2
    te=datevector[-1]+0.2
    ys=int(ts)
    ye=int(te)
    ms=int((ts-ys)*12)
    me=int((te-ye)*12)
    if ms>12:       ys =ys+1;       ms=1
    if me>12:       ye =ye+1;       me=1
    if ms<1:        ys =ys-1;       ms=12
    if me<1:        ye =ye-1;       me=12

    dss=datetime.datetime(ys,ms,1,0,0)
    dee=datetime.datetime(ye,me,1,0,0)
    ax1.set_ylim(min(Bp)-0.4*abs(min(Bp)),max(Bp)+0.4*max(Bp))

    xticklabels = getp(gca(), 'xticklabels')
    yticklabels = getp(gca(), 'yticklabels')
    setp(yticklabels, 'color', 'k', fontsize=fontSize)
    setp(xticklabels, 'color', 'k', fontsize=fontSize)

##########################################  
    x=[]
    y=[]
    Master_index_torremove=[]
    Slave_index_torremove=[]
    a_tbase=array(tbase)
    a_Bp=array(Bp)
    def onclick(event):
      if event.button==1:
        print 'click'
        xClick = event.xdata
        yClick = event.ydata
        idx=nearest_neighbor(xClick,yClick, a_tbase, a_Bp)       
        xr = a_tbase[idx]
        yr = a_Bp[idx]
        ix=tbase.index(xr)+1
        print ix
        x.append(xr)
        y.append(yr)
        if mod(len(x),2)==0:
           Master_index_torremove.append(tbase.index(xr))
           ax1.plot([x[len(x)-1],x[len(x)-2]],[y[len(x)-1],y[len(x)-2]],'r',lw=4)
        else:
           Slave_index_torremove.append(tbase.index(xr))
      plt.show()
    cid = fig1.canvas.mpl_connect('button_press_event', onclick)


    plt.show()
    print Master_index_torremove
    print Slave_index_torremove

    if len(Master_index_torremove) == len(Slave_index_torremove):
       R=np.vstack((Master_index_torremove,Slave_index_torremove))
    else:
       R=np.vstack((Master_index_torremove[:-1],Slave_index_torremove))

    R=np.vstack((Master_index_torremove,Slave_index_torremove)) 
    R.sort(0)
    print R
    print dateList1
    numIgrams_rmv=np.shape(R)[1]
    for ifgram in  ifgramList:
       date1,date2 = h5file['interferograms'][ifgram].attrs['DATE12'].split('-')
       for i in range(numIgrams_rmv):
           if dateList1[R[0][i]]==date1 and dateList1[R[1][i]]==date2:
               ifgrams_to_rmv.append(ifgram)

  else:
    print 'No network display.'
############################################################
############################################################

  print 'The list of interferograms to remove:' 
  print ifgrams_to_rmv
  igramsFile_modified='Modified_'+igramsFile
  h5filem = h5py.File(igramsFile_modified,'w')
  gg = h5filem.create_group('interferograms')
  ifgram=ifgramList[0]
  unw = h5file['interferograms'][ifgram].get(ifgram)
  MaskZero=np.ones([unw.shape[0],unw.shape[1]])

  print 'writing the modified interferogram file ...'
  for ifgram in  ifgramList:
     if not ifgram in ifgrams_to_rmv:
        print ifgram
        unwSet = h5file['interferograms'][ifgram].get(ifgram)
        unw = unwSet[0:unwSet.shape[0],0:unwSet.shape[1]]        
        MaskZero=unw*MaskZero
        group = gg.create_group(ifgram)
        dset = group.create_dataset(ifgram, data=unw, compression='gzip')
        for key, value in h5file['interferograms'][ifgram].attrs.iteritems():
           group.attrs[key] = value

  Mask=np.ones([unwSet.shape[0],unwSet.shape[1]])
  Mask[MaskZero==0]=0

  # updating Coherence file
  # convert ifgrams_to_rmv to cor_to_rmv
  date12_to_rmv=[]
  for igram in ifgrams_to_rmv:
     date12_to_rmv.append(igram.split('-sim')[0].split('filt_')[-1])

  try:
     corFile
     h5fileCor=h5py.File(corFile)
     corList=h5fileCor['coherence'].keys()

     corFile_modified='Modified_'+corFile
     h5fileCorm=h5py.File(corFile_modified,'w')
     gc = h5fileCorm.create_group('coherence')
     print 'writing the modified coherence file ...'
     for cor in corList:
        date12=cor.split('-sim')[0].split('filt_')[-1]
        if not date12 in date12_to_rmv:
           print cor
           unwSet = h5fileCor['coherence'][cor].get(cor)
           unw = unwSet[0:unwSet.shape[0],0:unwSet.shape[1]]
           group = gc.create_group(cor)
           dset = group.create_dataset(cor, data=unw, compression='gzip')
           for key, value in h5fileCor['coherence'][cor].attrs.iteritems():
              group.attrs[key] = value  
  except:
     print 'No coherence file to be updated.'

########################################################################

  print 'writing Modified_Mask.h5'
  
  h5mask = h5py.File('Modified_Mask.h5','w')
  group=h5mask.create_group('mask')
  dset = group.create_dataset(os.path.basename('mask'), data=Mask, compression='gzip')
  h5mask.close()      

  gm = h5filem.create_group('mask')
  dset = gm.create_dataset('mask', data=Mask, compression='gzip')

  h5file.close()
  h5filem.close()
Esempio n. 30
0
def main(argv):
    inps = cmdLineParse()
    #print '\n*************** Loading ROI_PAC Data into PySAR ****************'
    inps.project_name = os.path.splitext(os.path.basename(inps.template_file))[0]
    print 'project: '+inps.project_name
    
    ##### 1. Read file path
    # Priority: command line input > template > auto setting
    # Read template contents into inps Namespace
    inps.template_file = os.path.abspath(inps.template_file)
    template_dict = readfile.read_template(inps.template_file)
    for key, value in template_dict.iteritems():
        template_dict[key] = ut.check_variable_name(value)
    keyList = template_dict.keys()
    
    if not inps.unw and 'pysar.unwrapFiles'     in keyList:   inps.unw = template_dict['pysar.unwrapFiles']
    if not inps.cor and 'pysar.corFiles'        in keyList:   inps.cor = template_dict['pysar.corFiles']
    if not inps.int and 'pysar.wrapFiles'       in keyList:   inps.int = template_dict['pysar.wrapFiles']
    if not inps.geomap    and 'pysar.geomap'    in keyList:   inps.geomap    = template_dict['pysar.geomap']
    if not inps.dem_radar and 'pysar.dem.radarCoord' in keyList:   inps.dem_radar = template_dict['pysar.dem.radarCoord']
    if not inps.dem_geo   and 'pysar.dem.geoCoord'   in keyList:   inps.dem_geo   = template_dict['pysar.dem.geoCoord']

    # Auto Setting for Geodesy Lab - University of Miami 
    if pysar.miami_path and 'SCRATCHDIR' in os.environ:
        inps = auto_path_miami(inps, template_dict)

    # TIMESERIES directory for PySAR
    if not inps.timeseries_dir:
        inps.timeseries_dir = os.getcwd()
    if not os.path.isdir(inps.timeseries_dir):
        os.mkdir(inps.timeseries_dir)
    print "PySAR working directory: "+inps.timeseries_dir
    
    # TEMPLATE file directory (to support relative path input)
    inps.template_dir = os.path.dirname(inps.template_file)
    os.chdir(inps.template_dir)
    print 'Go to TEMPLATE directory: '+inps.template_dir

    # Get all file list
    inps.snap_connect = []
    if inps.unw:
        print 'unwrapped interferograms: '+str(inps.unw)
        inps.snap_connect = inps.unw.split('.unw')[0]+'_snap_connect.byt'
        inps.snap_connect = sorted(glob.glob(inps.snap_connect))
        inps.unw = sorted(glob.glob(inps.unw))
    if inps.int:
        print 'wrapped   interferograms: '+str(inps.int)
        inps.int = sorted(glob.glob(inps.int))
    if inps.cor:
        print 'coherence files: '+str(inps.cor)
        inps.cor = sorted(glob.glob(inps.cor))
    
    try:    inps.geomap = glob.glob(inps.geomap)[0]
    except: inps.geomap = None
    try:    inps.dem_radar = glob.glob(inps.dem_radar)[-1]
    except: inps.dem_radar = None
    try:    inps.dem_geo = glob.glob(inps.dem_geo)[0]
    except: inps.dem_geo = None
    print 'geomap file: '+str(inps.geomap)
    print 'DEM file in radar coord: '+str(inps.dem_radar)
    print 'DEM file in geo   coord: '+str(inps.dem_geo)

    ##### 2. Load data into hdf5 file
    inps.ifgram_file     = inps.timeseries_dir+'/unwrapIfgram.h5'
    inps.coherence_file  = inps.timeseries_dir+'/coherence.h5'
    inps.wrapIfgram_file = inps.timeseries_dir+'/wrapIfgram.h5'
    inps.snap_connect_file = inps.timeseries_dir+'/snaphuConnectComponent.h5'
    inps.mask_file = inps.timeseries_dir+'/Mask.h5'
    inps.spatial_coherence_file = inps.timeseries_dir+'/average_spatial_coherence.h5'
    
    # 2.1 multi_group_hdf5_file
    # Unwrapped Interferograms
    if inps.unw:
        unwList = load_roipac2multi_group_h5('interferograms', inps.unw, inps.ifgram_file, vars(inps))[1]
        # Update mask only when update unwrapIfgram.h5
        if unwList:
            print 'Generate mask from amplitude of interferograms'
            roipac_nonzero_mask(inps.unw, inps.mask_file)
    elif os.path.isfile(inps.ifgram_file):
        print os.path.basename(inps.ifgram_file)+' already exists, no need to re-load.'
    else:
        sys.exit('ERROR: Cannot load/find unwrapped interferograms!')

    # Optional
    if inps.snap_connect:
        load_roipac2multi_group_h5('snaphu_connect_component', inps.snap_connect, inps.snap_connect_file, vars(inps))

    # Coherence
    if inps.cor:
        cohFile,corList = load_roipac2multi_group_h5('coherence', inps.cor, inps.coherence_file, vars(inps))
        if corList:
            meanCohCmd = 'temporal_average.py '+cohFile+' '+inps.spatial_coherence_file
            print meanCohCmd
            os.system(meanCohCmd)
    elif os.path.isfile(inps.coherence_file):
        print os.path.basename(inps.coherence_file)+' already exists, no need to re-load.'
    else:
        print 'WARNING: Cannot load/find coherence.'

    # Wrapped Interferograms
    if inps.int:
        load_roipac2multi_group_h5('wrapped', inps.int, inps.wrapIfgram_file, vars(inps))
    elif os.path.isfile(inps.wrapIfgram_file):
        print os.path.basename(inps.wrapIfgram_file)+' already exists, no need to re-load.'
    else:
        print "WARNING: Cannot load/find wrapped interferograms. It's okay, continue without it ..."

    # 2.2 single dataset file
    if inps.geomap:
        copy_roipac_file(inps.geomap, inps.timeseries_dir)

    if inps.dem_radar:
        copy_roipac_file(inps.dem_radar, inps.timeseries_dir)

    if inps.dem_geo:
        copy_roipac_file(inps.dem_geo, inps.timeseries_dir)
Esempio n. 31
0
def main(argv):

    method    = 'triangular_consistency'    ## or 'bonding_point'
    ramp_type = 'plane'
    save_rampCor = 'yes'
    plot_bonding_points = 'yes'
  
    ##### Check Inputs
    if len(sys.argv)>2:
        try: opts, args = getopt.getopt(argv,'h:f:m:x:y:o:t:',['ramp=','no-ramp-save'])
        except getopt.GetoptError:  print 'Error while getting args';  Usage(); sys.exit(1)
  
        for opt,arg in opts:
            if   opt in ['-h','--help']:    Usage(); sys.exit()
            elif opt in '-f':    File     = arg
            elif opt in '-m':    maskFile = arg
            elif opt in '-o':    outName  = arg
            elif opt in '-x':    x = [int(i) for i in arg.split(',')];    method = 'bonding_point'
            elif opt in '-y':    y = [int(i) for i in arg.split(',')];    method = 'bonding_point'
            elif opt in '-t':    templateFile = arg
            elif opt in '--ramp'         :  ramp_type    = arg.lower()
            elif opt in '--no-ramp-save' :  save_rampCor = 'no'
  
    elif len(sys.argv)==2:
        if argv[0] in ['-h','--help']:    Usage();  sys.exit()
        elif os.path.isfile(argv[0]):     File = argv[0];  maskFile = argv[1]
        else:    print 'Input file does not existed: '+argv[0];  sys.exit(1)
  
    else:  Usage(); sys.exit(1)
  
    ##### Check template file
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass
  
    try:
        yx = [int(i) for i in templateContents['pysar.unwrapError.yx'].split(',')]
        x = yx[1::2]
        y = yx[0::2]
        method = 'bonding_point'
    except: pass

    ##### Read Mask File 
    ## Priority:
    ## Input mask file > pysar.mask.file > existed Modified_Mask.h5 > existed Mask.h5
    try:       maskFile
    except:
        try:    maskFile = templateContents['pysar.mask.file']
        except:
            if   os.path.isfile('Modified_Mask.h5'):  maskFile = 'Modified_Mask.h5'
            elif os.path.isfile('Mask.h5'):           maskFile = 'Mask.h5'
            else: print 'No mask found!'; sys.exit(1)
    try:    Mask,Matr = readfile.read(maskFile);   print 'mask: '+maskFile
    except: print 'Can not open mask file: '+maskFile; sys.exit(1)
  
    ##### Output file name
    ext = os.path.splitext(File)[1]
    try:    outName
    except: outName = File.split('.')[0]+'_unwCor'+ext
  
    print '\n**************** Unwrapping Error Correction ******************'

    ####################  Triangular Consistency (Phase Closure)  ####################
    if method == 'triangular_consistency':
        print 'Phase unwrapping error correction using Triangular Consistency / Phase Closure'
  
        h5file=h5py.File(File)
        ifgramList = h5file['interferograms'].keys()
        sx = int(h5file['interferograms'][ifgramList[0]].attrs['WIDTH'])
        sy = int(h5file['interferograms'][ifgramList[0]].attrs['FILE_LENGTH'])
        curls,Triangles,C=ut.get_triangles(h5file)
        A,B = ut.design_matrix(h5file)   
        ligram,lv=np.shape(B)
        lcurls=np.shape(curls)[0]
        print 'Number of all triangles: '+  str(lcurls)
        print 'Number of interferograms: '+ str(ligram)
        #print curls
  
        curlfile='curls.h5'
        if not os.path.isfile(curlfile):
            ut.generate_curls(curlfile,h5file,Triangles,curls)
         
        thr=0.50
        curls=np.array(curls);   n1=curls[:,0];   n2=curls[:,1];   n3=curls[:,2]
  
        numPixels=sy*sx
        print 'reading interferograms...'   
        data = np.zeros((ligram,numPixels),np.float32)
        for ni in range(ligram):
            dset=h5file['interferograms'][ifgramList[ni]].get(ifgramList[ni])
            d = dset[0:dset.shape[0],0:dset.shape[1]]
            data[ni] = d.flatten(1)   
  
        print np.shape(data)
        print 'reading curls ...' 
        h5curl=h5py.File(curlfile)
        curlList=h5curl['interferograms'].keys()
        curlData = np.zeros((lcurls,numPixels),np.float32)
        for ni in range(lcurls):
            dset=h5curl['interferograms'][curlList[ni]].get(curlList[ni])
            d = dset[0:dset.shape[0],0:dset.shape[1]]
            curlData[ni] = d.flatten(1)
        pi=np.pi
        EstUnwrap=np.zeros((ligram,numPixels),np.float32)
  
        #try:
        #    maskFile=argv[1]
        #    h5Mask=h5py.File(maskFile)
        #    dset = h5Mask['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        #except:
        #    dset = h5file['mask'].get('mask')
        #    Mask=dset[0:dset.shape[0],0:dset.shape[1]]
        
        Mask=Mask.flatten(1)

        from scipy.linalg import pinv as pinv
        for ni in range(numPixels):
            #dU = np.zeros([ligram,1])
            #print np.shape(dU)
            #print np.shape(data[:,ni])
  
            if Mask[ni]==1:
                dU = data[:,ni]
                #nan_ndx = dataPoint == 0.
                unwCurl = np.array(curlData[:,ni])
                #print unwCurl
  
                ind  = np.abs(unwCurl)>=thr;      N1 =n1[ind];      N2 =n2[ind];      N3 =n3[ind]
                indC = np.abs(unwCurl)< thr;      Nc1=n1[indC];     Nc2=n2[indC];     Nc3=n3[indC]
  
                N =np.hstack([N1, N2, N3]);       UniN =np.unique(N)
                Nc=np.hstack([Nc1,Nc2,Nc3]);      UniNc=np.unique(Nc)
  
                inter=list(set(UniNc) & set(UniN)) # intersetion
                UniNc= list(UniNc)
                for x in inter:
                    UniNc.remove(x)
  
                D=np.zeros([len(UniNc),ligram])
                for i in range(len(UniNc)):
                    D[i,UniNc[i]]=1
  
                AAA=np.vstack([-2*pi*C,D])
                #AAA1=np.hstack([AAA,np.zeros([AAA.shape[0],lv])])
                #AAA2=np.hstack([-2*pi*np.eye(ligram),B]) 
                #AAAA=np.vstack([AAA1,AAA2])
                AAAA=np.vstack([AAA,0.25*np.eye(ligram)])
  
                #print '************************'
                #print np.linalg.matrix_rank(C)
                #print np.linalg.matrix_rank(AAA) 
                #print np.linalg.matrix_rank(AAAA)
                #print '************************'
  
                #LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0]))# + list(dU)
                #ind=np.isnan(AAA)
                #M1=pinv(AAA)      
                #M=np.dot(M1,LLL)
                #EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi
  
                ##########
                # with Tikhonov regularization:
                AAAA=np.vstack([AAA,0.25*np.eye(ligram)])
                LLL=list(np.dot(C,dU)) + list(np.zeros(np.shape(UniNc)[0])) + list(np.zeros(ligram))
                ind=np.isnan(AAAA)
                M1=pinv(AAAA)
                M=np.dot(M1,LLL)
                EstUnwrap[:,ni]=np.round(M[0:ligram])*2.0*np.pi
                #print M[0:ligram]
                #print np.round(M[0:ligram])
  
            else:
                EstUnwrap[:,ni]=np.zeros([ligram])
                if not np.remainder(ni,10000): print 'Processing point: %7d of %7d ' % (ni,numPixels)

        ##### Output
        dataCor = data+EstUnwrap
        unwCorFile=File.replace('.h5','')+'_unwCor.h5';  print 'writing >>> '+unwCorFile
        h5unwCor=h5py.File(unwCorFile,'w') 
        gg = h5unwCor.create_group('interferograms') 
        for i in range(ligram):
            group = gg.create_group(ifgramList[i])
            dset = group.create_dataset(ifgramList[i], data=np.reshape(dataCor[i,:],[sx,sy]).T, compression='gzip')
            for key, value in h5file['interferograms'][ifgramList[i]].attrs.iteritems():
                group.attrs[key] = value
  
        try:
            MASK=h5file['mask'].get('mask')
            gm = h5unwCor.create_group('mask')
            dset = gm.create_dataset('mask', data=MASK, compression='gzip')
        except: pass
  
        h5unwCor.close()
        h5file.close()
        h5curl.close() 


    ####################  Bonding Points (Spatial Continuity)  ####################
    elif method == 'bonding_point':
        print 'Phase unwrapping error correction using Bonding Points / Spatial Continuity'
  
        ##### Read Bridge Points Info
        try:
            x
            y
            if len(x) != len(y) or np.mod(len(x),2) != 0:
                print 'Wrong number of bridge points input: '+str(len(x))+' for x, '+str(len(y))+' for y'
                Usage();  sys.exit(1)
        except: print 'Error in reading bridge points info!';  Usage();  sys.exit(1)
        for i in range(0,len(x)):
            if Mask[y[i],x[i]] == 0:
                print '\nERROR: Connecting point ('+str(y[i])+','+str(x[i])+') is out of masked area! Select them again!\n'
                sys.exit(1)
  
        print 'Number of bonding point pairs: '+str(len(x)/2)
        print 'Bonding points coordinates:\nx: '+str(x)+'\ny: '+str(y)
  
        ## Plot Connecting Pair of Points
        if plot_bonding_points == 'yes':
            point_yx = ''
            line_yx  = ''
            n_bridge = len(x)/2
            for i in range(n_bridge):
                pair_yx = str(y[2*i])+','+str(x[2*i])+','+str(y[2*i+1])+','+str(x[2*i+1])
                if not i == n_bridge-1:
                    point_yx += pair_yx+','
                    line_yx  += pair_yx+';'
                else:
                    point_yx += pair_yx
                    line_yx  += pair_yx

            try:
                plot_cmd = 'view.py --point-yx="'+point_yx+'" --line-yx="'+line_yx+\
                           '" --nodisplay -o bonding_points.png -f '+maskFile
                print plot_cmd
                os.system(plot_cmd)
            except: pass


        ##### Ramp Info
        ramp_mask = Mask==1
        print 'estimate phase ramp during the correction'
        print 'ramp type: '+ramp_type
        if save_rampCor == 'yes':
            outName_ramp = os.path.basename(outName).split(ext)[0]+'_'+ramp_type+ext
  
        ########## PySAR ##########
        if ext == '.h5':
            ##### Read
            try:     h5file=h5py.File(File,'r')
            except:  print 'ERROR: Cannot open input file: '+File; sys.exit(1)
            k=h5file.keys()
            if 'interferograms' in k: k[0] = 'interferograms';  print 'Input file is '+k[0]
            else: print 'Input file - '+File+' - is not interferograms.';  Usage();  sys.exit(1)
            igramList = h5file[k[0]].keys()
            igramList = sorted(igramList)
  
            #### Write
            h5out = h5py.File(outName,'w')
            gg = h5out.create_group(k[0])
            print 'writing >>> '+outName
  
            if save_rampCor == 'yes':
                h5out_ramp = h5py.File(outName_ramp,'w')
                gg_ramp = h5out_ramp.create_group(k[0])
                print 'writing >>> '+outName_ramp
  
            ##### Loop
            print 'Number of interferograms: '+str(len(igramList))
            for igram in igramList:
                print igram
                data = h5file[k[0]][igram].get(igram)[:]
  
                data_ramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
                #ramp = data_ramp - data
                data_rampCor = phase_bonding(data_ramp,Mask,x,y)
                dataCor = data_rampCor - ramp
  
                group = gg.create_group(igram)
                dset = group.create_dataset(igram, data=dataCor, compression='gzip')
                for key, value in h5file[k[0]][igram].attrs.iteritems():
                    group.attrs[key]=value
  
                if save_rampCor == 'yes':
                    group_ramp = gg_ramp.create_group(igram)
                    dset = group_ramp.create_dataset(igram, data=data_rampCor, compression='gzip')
                    for key, value in h5file[k[0]][igram].attrs.iteritems():
                        group_ramp.attrs[key]=value
  
            try:
                mask = h5file['mask'].get('mask');
                gm = h5out.create_group('mask')
                dset = gm.create_dataset('mask', data=mask[0:mask.shape[0],0:mask.shape[1]], compression='gzip')
            except: print 'no mask group found.'
  
            h5file.close()
            h5out.close()
            if save_rampCor == 'yes':
                h5out_ramp.close()

        ########## ROI_PAC ##########
        elif ext == '.unw':
            print 'Input file is '+ext
            a,data,atr = readfile.read_float32(File);
  
            data_ramp,ramp = rm.remove_data_surface(data,ramp_mask,ramp_type)
            #ramp = data_ramp - data
            data_rampCor = phase_bonding(data_ramp,Mask,x,y)
            dataCor = data_rampCor - ramp
  
            writefile.write(dataCor, atr, outName)
            if save_rampCor == 'yes':
                writefile.write(data_rampCor,atr,outName_ramp)
  
        else: print 'Un-supported file type: '+ext;  Usage();  sys.exit(1)
Esempio n. 32
0
def main(argv):

    global method_default
    ##### Referencing methods
    method_default = 'max_coherence'
    #method = 'manual'
    #method = 'max_coherence'        ## Use phase on point with max coherence [default]
    #method = 'global_average'       ## Use Nan Mean of phase on all pixels
    #method = 'random'
    #maskFile = 'Mask.h5'

    global SeedingDone
    
    ############################## Check Inputs ##############################
    if len(sys.argv) > 2:
        try:  opts, args = getopt.getopt(argv,"h:c:f:m:y:x:l:L:t:o:r:",\
                                         ['manual','max-coherence','global-average','random'])
        except getopt.GetoptError:  Usage() ; sys.exit(1)

        for opt,arg in opts:
            if   opt in ("-h","--help"):   Usage();  sys.exit()
            elif opt == '-f':        File     = arg
            elif opt == '-m':        maskFile = arg
            elif opt == '-c':        corFile  = arg
            elif opt == '-o':        outFile  = arg

            elif opt == '-y':        ry       = int(arg)
            elif opt == '-x':        rx       = int(arg)
            elif opt == '-l':        rlat     = float(arg)
            elif opt == '-L':        rlon     = float(arg)
            elif opt == '-r':        refFile  = arg
            elif opt == '-t':        templateFile = arg

            elif opt == '--global-average' :  method = 'global_average'
            elif opt == '--manual'         :  method = 'manual'
            elif opt == '--max-coherence'  :  method = 'max_coherence'
            elif opt == '--random'         :  method = 'random'

    elif len(sys.argv)==2:
        if   argv[0]=='-h':            Usage(); sys.exit(1)
        elif os.path.isfile(argv[0]):  File = argv[0]
        else:  print 'Input file does not existed: '+argv[0];  sys.exit(1)
    elif len(sys.argv)<2:             Usage(); sys.exit(1)

    ##### Input File Info
    try:
        File
        atr = readfile.read_attributes(File)
        k = atr['FILE_TYPE']
        length = int(atr['FILE_LENGTH'])
        width  = int(atr['WIDTH'])
    except:  Usage() ; sys.exit(1)
    ext = os.path.splitext(File)[1].lower()

    try:    outFile
    except: outFile = 'Seeded_'+File
  
    ############################## Reference Point Input ####################
    try:
        refFile
        atr_ref = readfile.read_attributes(refFile)
    except: pass
  
    try:
        templateFile
        templateContents = readfile.read_template(templateFile)
    except: pass

    ### Priority
    ## lat/lon > y/x
    ## Direct Input > Reference File > Template File
    try:
        rlat
        rlon
    except:
        try:
            rlat = float(atr_ref['ref_lat'])
            rlon = float(atr_ref['ref_lon'])
        except:
            try: rlat,rlon = [float(i) for i in templateContents['pysar.seed.lalo'].split(',')]
            except: pass

    try:
        ry
        rx
    except:
        try:
            ry = int(atr_ref['ref_y'])
            rx = int(atr_ref['ref_x'])
        except:
            try: ry,rx       = [int(i)   for i in templateContents['pysar.seed.yx'].split(',')]
            except: pass

    ##### Check lalo / YX
    print '\n************** Reference Point ******************'
    try:
        rlat
        rlon
        y = sub.coord_geo2radar(rlat,atr,'lat')
        x = sub.coord_geo2radar(rlon,atr,'lon')
        0<= x <= width
        0<= y <= length
        rx = x
        ry = y
        print 'Reference point: lat = %.4f,   lon = %.4f'%(rlat,rlon)
        print '                 y   = %d,     x   = %d'%(ry,rx)
    except:
        print 'Skip input lat/lon reference point.'
        print 'Continue with the y/x reference point.'


    ######################### a. Read Mask File #########################
    ## Priority: Input mask file > pysar.mask.file 
    try:     maskFile
    except:
        try: maskFile = templateContents['pysar.mask.file']
        except:  print 'No mask found!';
    try:
        M,Matr = readfile.read(maskFile);
        print 'mask: '+maskFile
    except:
        print '---------------------------------------------------------'
        print 'WARNING: No mask, use the whole area as mask'
        print '---------------------------------------------------------'
        M = np.ones((length,width))

    ## Message
    try:
        rx
        ry
        0<= rx <= width
        0<= ry <= length
        if M[ry,rx] == 0:
            print 'Input point has 0 value in mask.'
    except: pass

    ######################### b. Stack ##################################
    stackFile = os.path.basename(File).split(ext)[0] + '_stack.h5'
    stack_file_exist = 'no'
    try:
        os.path.isfile(stackFile)
        stack,atrStack = readfile.read(stackFile)
        if width == int(atrStack['WIDTH']) and length == int(atrStack['FILE_LENGTH']):
            stack_file_exist = 'yes'
            print 'read stack from file: '+stackFile
    except: pass

    if stack_file_exist == 'no':
        print 'calculating stack of input file ...'
        stack = ut.stacking(File)
        atrStack = atr.copy()
        atrStack['FILE_TYPE'] = 'mask'
        writefile.write(stack,atrStack,stackFile)

    ## Message
    try:
        rx
        ry
        if stack[ry,rx] == 0:
            print 'Input point has nan value in data.'
    except: pass

    stack[M==0] = 0
    if np.nansum(M) == 0.0:
        print '\n*****************************************************'
        print   'ERROR:'
        print   'There is no pixel that has valid phase value in all datasets.' 
        print   'Check the file!'
        print   'Seeding failed'
        sys.exit(1)

    ######################### Check Method ##############################
    try:
        not stack[ry,rx] == 0
        method = 'input_coord'
    except:
        try:    method
        except: method = method_default
        print 'Skip input y/x reference point.'
        print 'Continue with '+method

    #h5file = h5py.File(File)

    ######################### Seeding ###################################
    ##### Sub-function
    def seed_method(method,File,stack,outFile,corFile=''):
        SeedingDone = 'no'
        next_method = method_default
        M = stack != 0

        if   method == 'manual':
            SeedingDone = seed_manual(File,stack,outFile)
            if SeedingDone == 'no':
                next_method = method_default
                print_warning(next_method)

        elif method == 'max_coherence':
            try:    SeedingDone = seed_max_coherence(File,M,outFile,corFile)
            except: SeedingDone = seed_max_coherence(File,M,outFile)
            if SeedingDone == 'no':
                next_method = 'random'
                print_warning(next_method)

        elif method == 'random':
            y,x = random_selection(stack)
            seed_xy(File,x,y,outFile)
            SeedingDone = 'yes'

        elif method == 'global_average':
            print '\n---------------------------------------------------------'
            print 'Automatically Seeding using Global Spatial Average Value '
            print '---------------------------------------------------------'
            print 'Calculating the global spatial average value for each epoch'+\
                  ' of all valid pixels ...'
            box = (0,0,width,length)
            meanList = ut.spatial_mean(File,M,box)
            seed_file(File,outFile,meanList,'','')
            SeedingDone = 'yes'

        return SeedingDone, next_method

    ##### Seeding
    SeedingDone = 'no'

    if method == 'input_coord':
        seed_xy(File,rx,ry,outFile)
        SeedingDone = 'yes'

    else:
        i = 0
        while SeedingDone == 'no' and i < 5:
            try:    SeedingDone,method = seed_method(method,File,stack,outFile,corFile)
            except: SeedingDone,method = seed_method(method,File,stack,outFile)
            i += 1
        if i >= 5:
            print 'ERROR: Seeding failed after more than '+str(i)+' times try ...'
            sys.exit(1)
Esempio n. 33
0
def read_template2inps(templateFile, inps=None):
    '''Read network options from template file into Namespace variable inps'''
    template_dict = readfile.read_template(templateFile)
    if not template_dict:
        print 'Empty template: '+templateFile
        return None
    keyList = template_dict.keys()

    if not inps:
        inps = cmdLineParse([''])

    # Read network option regardless of prefix
    for key in keyList:
        if 'selectPairs.'    in key:   template_dict[key.split('selectPairs.')[1]]    = template_dict[key]
        if 'pysar.network.'  in key:   template_dict[key.split('pysar.network.')[1]]  = template_dict[key]
        if 'select.network.' in key:   template_dict[key.split('select.network.')[1]] = template_dict[key]
    keyList = template_dict.keys()
    for key, value in template_dict.iteritems():
        if value.lower() in ['off','false','n']:  template_dict[key] = 'no'
        if value.lower() in ['on', 'true', 'y']:  template_dict[key] = 'yes'

    # Update inps value if not existed
    if not inps.method:
        if   'selectMethod' in keyList:  inps.method = template_dict['selectMethod']
        elif 'method'       in keyList:  inps.method = template_dict['method']
        else: inps.method = 'all'

    if not inps.perp_base_max:
        if 'perpBaseMax'  in keyList:  inps.perp_base_max = float(template_dict['perpBaseMax'])
        else: inps.perp_base_max = 500.0

    if not inps.temp_base_max:
        if 'lengthDayMax'   in keyList:  inps.temp_base_max = float(template_dict['lengthDayMax'])
        elif 'tempBaseMax'  in keyList:  inps.temp_base_max = float(template_dict['tempBaseMax'])
        else: inps.temp_base_max = 1800.0

    if not inps.temp_base_min:
        if 'lengthDayMin'   in keyList:  inps.temp_base_min = float(template_dict['lengthDayMin'])
        elif 'tempBaseMin'  in keyList:  inps.temp_base_min = float(template_dict['tempBaseMin'])
        else: inps.temp_base_min = 0.0

    if 'seasonal'     in keyList and template_dict['seasonal'].lower()     == 'no': inps.keep_seasonal = False
    if 'keepSeasonal' in keyList and template_dict['keepSeasonal'].lower() == 'no': inps.keep_seasonal = False

    if not inps.dop_overlap_min:
        if 'DopOverlapThresh'   in keyList:  inps.dop_overlap_min = float(template_dict['DopOverlapThresh'])
        elif 'dopOverlapThresh' in keyList:  inps.dop_overlap_min = float(template_dict['dopOverlapThresh'])
        elif 'dopOverlapMin'    in keyList:  inps.dop_overlap_min = float(template_dict['dopOverlapMin'])
        else: inps.dop_overlap_min = 15.0

    if not inps.reference_file and 'referenceFile' in keyList:  inps.reference_file = template_dict['referenceFile']
    if not inps.increment_num:
        if 'incrementNum'  in keyList:  inps.increment_num  = int(template_dict['incrementNum'])
        else: inps.increment_num = 3

    if not inps.temp_perp_list:
        if 'dayPerpList'    in keyList:  inps.temp_perp_list = template_dict['dayPerpList']
        elif 'tempPerpList' in keyList:  inps.temp_perp_list = template_dict['tempPerpList']
        else: inps.temp_perp_list = '16,1600;32,800;48,600;64,200'
        inps.temp_perp_list = [[float(j) for j in i.split(',')] for i in inps.temp_perp_list.split(';')]

    if not inps.exclude_date and 'excludeDate' in keyList:
        ex_date_list = [i for i in template_dict['excludeDate'].split(',')]
        inps.exclude_date = ptime.yymmdd(ex_date_list)

    if not inps.start_date and 'startDate' in keyList:
        inps.start_date = ptime.yyyymmdd(template_dict['startDate'])
    if not inps.end_date and 'endDate' in keyList:
        inps.end_date = ptime.yyyymmdd(template_dict['endDate'])

    if not inps.m_date and 'masterDate' in keyList:
        inps.m_date = ptime.yymmdd(template_dict['masterDate'])

    return inps
Esempio n. 34
0
def main(argv):
    start = time.time()

    try:
        templateFile = argv[1]
    except:
        Usage()
        sys.exit(1)

    ###########  Path  ############
    projectName = os.path.basename(templateFile).partition('.')[0]
    try:
        tssarProjectDir = os.getenv('TSSARDIR') + '/' + projectName
    except:
        tssarProjectDir = os.getenv(
            'SCRATCHDIR'
        ) + '/' + projectName + "/TSSAR"  # FA 7/2015: adopted for new directory structure
    print "\nprojectName: ", projectName
    print "QQ " + tssarProjectDir
    if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir)
    os.chdir(tssarProjectDir)

    ##########  Initial File Name  ########
    import h5py
    import pysar._pysar_utilities as ut
    import pysar._readfile as readfile
    template = readfile.read_template(templateFile)

    igramFile = 'LoadedData.h5'
    if os.path.isfile('Modified_' + igramFile):
        igramFile = 'Modified_' + igramFile
    corFile = 'Coherence.h5'
    if os.path.isfile('Modified_' + corFile): corFile = 'Modified_' + corFile
    maskFile = 'Mask.h5'
    if os.path.isfile('Modified_' + maskFile):
        maskFile = 'Modified_' + maskFile

    #########################################
    # Loading Data
    #########################################
    print '\n**********  Loading Data  *****************************'
    if os.path.isfile(igramFile):
        print igramFile + ' already exists.'
    else:
        loadCmd = 'load_data.py ' + templateFile
        print loadCmd
        os.system(loadCmd)
        #copyDemCmd='copy_dem_trans.py ' + templateFile
        #print copyDemCmd
        #os.system(copyDemCmd)

    if not os.path.isfile(igramFile):
        sys.exit('\nERROR: No interferograms file found!\n')

    ##########  Initial File Name - 2  ####
    try:
        demGeoFile = find_filename(template, 'pysar.dem.geoCoord')
    except:
        print '\nWARNING:\n    No geo coded DEM found! Might be a problem in tropospheric delay / orbital error correction!\n'
    try:
        demRdrFile = find_filename(template, 'pysar.dem.radarCoord')
    except:
        print '\nWARNING:\n    No radar coded DEM found! Will be a problem in tropospheric delay / orbital error correction!\n'
    try:
        geomapFile = find_filename(template, 'pysar.geomap')
    except:
        print '\nWARNING:\n    No geomap*.trans file found! Will be a problem in geocoding!\n'

    #########################################
    # Check the subset (Optional)
    #########################################
    print '\n**********  Subseting  ********************************'

    if 'pysar.subset.yx' in template.keys():
        print 'subseting data with y/x input...'
        subset = template['pysar.subset.yx'].split(',')

        print 'calculating bounding box in geo coordinate.'
        import numpy as np
        ysub = [float(i) for i in subset[0].split(':')]
        ysub.sort()
        xsub = [float(i) for i in subset[1].split(':')]
        xsub.sort()
        x = np.array([xsub[0], xsub[1], xsub[0], xsub[1]])
        y = np.array([ysub[0], ysub[0], ysub[1], ysub[1]])
        lat, lon, lat_res, lon_res = ut.radar2glob(x, y, igramFile, 1)
        buf = 10 * (np.max([lat_res, lon_res]))
        latsub = str(np.min(lat) - buf) + ':' + str(np.max(lat) + buf)
        lonsub = str(np.min(lon) - buf) + ':' + str(np.max(lon) + buf)
        print '    subset in y - ' + subset[
            0] + '\n    subset in x - ' + subset[1]
        print '    subset in lat - ' + latsub + '\n    subset in lon - ' + lonsub

        ## subset radar coded file
        igramFile = check_subset(igramFile, subset, option='yx')
        maskFile = check_subset(maskFile, subset, option='yx')
        try:
            demRdrFile = check_subset(demRdrFile, subset, option='yx')
        except:
            pass
        ## subset geo coded file
        try:
            demGeoFile = check_subset(demGeoFile, [latsub, lonsub],
                                      option='lalo')
        except:
            pass
        try:
            geomapFile = check_subset(geomapFile, [latsub, lonsub],
                                      option='lalo')
        except:
            pass

    elif 'pysar.subset.lalo' in template.keys():
        print 'subseting data with lat/lon input...'
        subset = template['pysar.subset.lalo'].split(',')

        print 'calculate bounding box in radar coordinate.'
        import numpy as np
        latsub = [float(i) for i in subset[0].split(':')]
        latsub.sort()
        lonsub = [float(i) for i in subset[1].split(':')]
        lonsub.sort()
        lat = np.array([latsub[0], latsub[0], latsub[1], latsub[1]])
        lon = np.array([lonsub[0], lonsub[1], lonsub[0], lonsub[1]])
        x, y, x_res, y_res = ut.glob2radar(lat, lon)
        buf = 10 * (np.max([x_res, y_res]))
        xsub = str(np.min(x) - buf) + ':' + str(np.max(x) + buf)
        ysub = str(np.min(y) - buf) + ':' + str(np.max(y) + buf)
        print '    subset in lat - ' + subset[
            0] + '\n    subset in lon - ' + subset[1]
        print '    subset in y - ' + ysub + '\n    subset in x - ' + xsub

        ## subset geo coded files
        try:
            demGeoFile = check_subset(demGeoFile, subset, option='lalo')
        except:
            pass
        try:
            geomapFile = check_subset(geomapFile, subset, option='lalo')
        except:
            pass
        ## subset radar coded files
        igramFile = check_subset(igramFile, [ysub, xsub], option='yx')
        maskFile = check_subset(maskFile, [ysub, xsub], option='yx')
        try:
            demRdrFile = check_subset(demRdrFile, [ysub, xsub], option='yx')
        except:
            pass

    else:
        print 'No Subset selected. Processing the whole area'

    #try:
    #  subset= template['pysar.subset.yx'].split(',')
    #  print 'subset in y - '+str(subset[0])
    #  print 'subset in x - '+str(subset[1])
    #  igramFile  = check_subset(igramFile, subset)
    #  corFile    = check_subset(corFile,   subset)
    #  maskFile   = check_subset(maskFile,  subset)
    #  demRdrFile = check_subset(demRdrFile,subset)
    #  demGeoFile = check_subset(demGeoFile,subset)
    #  #geomapFile = check_subset(geomapFile,subset)
    #except:  print   'No Subset selected. Processing the whole area'

    #########################################
    # Referencing Interferograms
    #########################################
    print '\n**********  Referencing Interferograms  ***************'
    if os.path.isfile('Seeded_' + igramFile):
        igramFile = 'Seeded_' + igramFile
        print igramFile + ' already exists.'
    else:
        print 'referncing all interferograms to the same pixel.'
        if 'pysar.seed.lalo' in template.keys():
            'Checking lat/lon refernce point'
            lat = template['pysar.seed.lalo'].split(',')[0]
            lon = template['pysar.seed.lalo'].split(',')[1]
            seedCmd = 'seed_data.py -f ' + igramFile + ' -l ' + lat + ' -L ' + lon
        elif 'pysar.seed.yx' in template.keys():
            'Checking y/x reference point'
            y = template['pysar.seed.yx'].split(',')[0]
            x = template['pysar.seed.yx'].split(',')[1]
            seedCmd = 'seed_data.py -f ' + igramFile + ' -y ' + y + ' -x ' + x
        else:
            seedCmd = 'seed_data.py -f ' + igramFile

        igramFile = 'Seeded_' + igramFile
        print seedCmd
        os.system(seedCmd)

############################################
# Unwrapping Error Correction (Optional)
#    based on the consistency of triplets
#    of interferograms
############################################
    print '\n**********  Unwrapping Error Correction  **************'
    outName = igramFile.split('.')[0] + '_unwCor.h5'
    try:
        template['pysar.unwrapError']
        if template['pysar.unwrapError'] in ('Y', 'y', 'yes', 'Yes', 'YES'):
            if os.path.isfile(outName):
                igramFile = outName
                print igramFile + ' exists.'
            else:
                print 'This might take a while depending on the size of your data set!'
                unwCmd = 'unwrap_error.py ' + igramFile
                os.system(unwCmd)
                igramFile = outName
        else:
            print 'No unwrapping error correction.'
    except:
        print 'No unwrapping error correction.'

    #########################################
    # Inversion of Interferograms
    ########################################
    print '\n**********  Time Series Inversion  ********************'
    if os.path.isfile('timeseries.h5'):
        print 'timeseries.h5 already exists, inversion is not needed.'
    else:
        invertCmd = 'igram_inversion.py ' + igramFile
        print invertCmd
        os.system(invertCmd)
    timeseriesFile = 'timeseries.h5'

    ##############################################
    # Temporal Coherence:
    #   A parameter to evaluate the consistency
    #   of timeseries with the interferograms
    ##############################################
    print '\n**********  Generate Temporal Coherence file  *********'
    if os.path.isfile('temporal_coherence.h5'):
        print 'temporal_coherence.h5 already exists.'
    else:
        tempcohCmd = 'temporal_coherence.py ' + igramFile + ' ' + timeseriesFile
        print tempcohCmd
        os.system(tempcohCmd)
    tempCohFile = 'temporal_coherence.h5'

    ##############################################
    # Update Mask based on temporal coherence (Optional)
    ##############################################
    print '\n**********  Update Mask based on Temporal Coherence  **'
    outName = maskFile.split('.')[0] + '_tempCoh.h5'
    try:
        template['pysar.mask']
        if template['pysar.mask'] in ('Y', 'yes', 'Yes', 'YES', 'y'):
            if os.path.isfile(outName):
                maskFile = outName
                print maskFile + ' already exists.'
            else:
                try:
                    cohT = template['pysar.mask.threshold']
                except:
                    cohT = '0.7'
                maskCmd = 'generate_mask.py -f ' + tempCohFile + ' -m ' + cohT + ' -M 1.0 -o ' + outName
                print maskCmd
                os.system(maskCmd)
                maskFile = outName
        else:
            print 'No mask update from temporal coherence'
    except:
        print 'No mask update from temporal coherence'

    ##############################################
    # Incident Angle
    ##############################################
    print '\n**********  Generate Incident Angle file  *************'
    if os.path.isfile('incidence_angle.h5'):
        print 'incidence_angle.h5 already exists.'
    else:
        inciCmd = 'incidence_angle.py -f ' + timeseriesFile
        print inciCmd
        os.system(inciCmd)

##############################################
# LOD (Local Oscillator Drift) Correction
#   when Satellite is Envisat and
#   Coordinate system is radar
##############################################
    print '\n**********  Local Oscillator Drift correction  ********'
    rdr_or_geo = ut.radar_or_geo(timeseriesFile)
    outName = timeseriesFile.split('.')[0] + '_LODcor.h5'
    if os.path.isfile(outName):
        timeseriesFile = outName
        print timeseriesFile + ' already exists.'
    else:
        h5file = h5py.File(timeseriesFile, 'r')
        platform = h5file['timeseries'].attrs['PLATFORM']
        if platform == 'ENVISAT':
            if rdr_or_geo == 'radar':
                LODcmd = 'lod.py ' + timeseriesFile
                print LODcmd
                os.system(LODcmd)
                timeseriesFile = outName
            else:
                print 'Cannot correct LOD for geocoded data.'
        else:
            print 'No need of LOD correction for ' + platform
        h5file.close()


##############################################
# Tropospheric Delay Correction (Optional)
##############################################
    print '\n**********  Tropospheric Correction  ******************'
    try:
        if (template['pysar.troposphericDelay'] in ('Y','y','yes','Yes','YES')) and\
            template['pysar.orbitError.method'] in ['BaseTropCor','basetropcor','base_trop_cor']:
            print '''
   +++++++++++++++++++++++++++++++++++++++++++++++++++
   WARNING:
       Orbital error correction was BaseTropCor.
       Tropospheric correction was already applied simultaneous with baseline error correction.
       Tropospheric correction can not be applied again.
       To apply the tropospheric correction separated from baseline error correction, \
          choose other existing options for orbital error correction.
    +++++++++++++++++++++++++++++++++++++++++++++++++++      
        '''
            template['pysar.troposphericDelay'] = 'no'
    except:
        print 'Checking the tropospheric delay correction ...'

    if template['pysar.troposphericDelay'] in ('Y', 'y', 'yes', 'Yes', 'YES'):
        if rdr_or_geo == 'radar': demFile = demRdrFile
        elif rdr_or_geo == 'geo': demFile = demGeoFile

        if not os.path.isfile(demFile):
            print '++++++++++++++++++++++++++++++++++++++++++++++'
            print 'ERROR:'
            print '    DEM file was not found!'
            print '    Continue without tropospheric correction ...'
            print '++++++++++++++++++++++++++++++++++++++++++++++'
        else:
            if template['pysar.troposphericDelay.method'] in ['height-correlation','height_correlation',\
                                                              'Height-Correlation','Height_Correlation']:
                print 'tropospheric delay correction with height-correlation approach'
                outName = timeseriesFile.split('.')[0] + '_tropCor.h5'
                if os.path.isfile(outName):
                    timeseriesFile = outName
                    print timeseriesFile + ' already exists.'
                else:
                    try:
                        poly_order = template[
                            'pysar.troposphericDelay.polyOrder']
                    except:
                        poly_order = '1'
                        print 'Deafult polynomial order for troposphreic correction = 1'
                    cmdTrop = 'tropcor_phase_elevation.py' + ' -f ' + timeseriesFile + ' -d ' + demFile + ' -p ' + str(
                        poly_order)
                    print cmdTrop
                    os.system(cmdTrop)
                    timeseriesFile = outName

            elif template['pysar.troposphericDelay.method'] in [
                    'pyaps', 'PyAPS', 'PYAPS'
            ]:
                print 'Atmospheric correction using Numerical Weather Models (using PyAPS software)'
                print 'reading DEM, source of NWM and acquisition time from template file'
                source_of_NWM = template[
                    'pysar.troposphericDelay.weatherModel']
                print 'Numerical Weather Model: ' + source_of_NWM
                outName = timeseriesFile.split(
                    '.')[0] + '_' + source_of_NWM + '.h5'
                if os.path.isfile(outName):
                    timeseriesFile = outName
                    print timeseriesFile + ' already exists.'
                else:
                    acquisition_time = template['pysar.acquisitionTime']
                    print 'acquisition time: ' + acquisition_time
                    cmdTrop = 'tropcor_pyaps.py -f '+timeseriesFile+' -d '+demFile+' -s '+source_of_NWM+\
                                              ' -h '+acquisition_time+' -i incidence_angle.h5'
                    print cmdTrop
                    os.system(cmdTrop)
                    #subprocess.Popen(cmdTrop).wait()
                    timeseriesFile = outName
            else:
                print 'ERROR: Unrecognized atmospheric correction method: ' + template[
                    'pysar.trop.method']
    else:
        print 'No atmospheric delay correction.'

    ##############################################
    # Topographic (DEM) Residuals Correction (Optional)
    ##############################################
    print '\n**********  Topographic (DEM) Error correction  *******'
    outName = timeseriesFile.split('.')[0] + '_demCor.h5'
    try:
        template['pysar.topoError']
        if template['pysar.topoError'] in ('Y', 'yes', 'Yes', 'YES', 'y'):
            if os.path.isfile(outName):
                timeseriesFile = outName
                print timeseriesFile + ' already exists.'
            else:
                print 'Correcting topographic residuals'
                topoCmd = 'dem_error.py ' + timeseriesFile + ' ' + igramFile
                print topoCmd
                os.system(topoCmd)
                timeseriesFile = outName
        else:
            print 'No correction for topographic residuals.'
    except:
        print 'No correction for topographic residuals.'

    ##############################################
    # Orbit Correction (Optional)
    ##############################################
    print '\n**********  Orbital Correction  ***********************'
    try:
        template['pysar.orbitError']
        if template['pysar.orbitError'] in ('Y', 'yes', 'Yes', 'YES', 'y'):
            try:
                orbit_error_method = template['pysar.orbitError.method']
                print 'orbit error correction method : ' + orbit_error_method
                outName = timeseriesFile.split(
                    '.')[0] + '_' + orbit_error_method + '.h5'
                if os.path.isfile(outName):
                    timeseriesFile = outName
                    print timeseriesFile + ' already exists.'

                else:
                    if orbit_error_method in [    'plane',     'plane_range',     'plane_azimuth',\
                                              'quadratic', 'quadratic_range', 'quadratic_azimuth']:
                        orbitCmd = 'remove_plane.py ' + timeseriesFile + ' ' + orbit_error_method  #+ ' Mask.h5'
                        print orbitCmd
                        os.system(orbitCmd)
                        timeseriesFile = outName

                    elif orbit_error_method in ['baselineCor', 'BaselineCor']:
                        try:
                            h5file = h5py.File(timeseriesFile, 'r')
                            daz = float(h5file['timeseries'].
                                        attrs['AZIMUTH_PIXEL_SIZE'])
                            orbitCmd = 'baseline_error.py ' + timeseriesFile  #+ ' Mask.h5'
                            print orbitCmd
                            os.system(orbitCmd)
                            timeseriesFile = outName
                        except:
                            print 'WARNING!'
                            print 'Skipping orbital error correction.'
                            print 'baselineCor method can only be applied in radar coordinate'

                    elif orbit_error_method in [
                            'BaseTropCor', 'basetropcor', 'base_trop_cor'
                    ]:
                        try:
                            h5file = h5py.File(timeseriesFile, 'r')
                            daz = float(h5file['timeseries'].
                                        attrs['AZIMUTH_PIXEL_SIZE'])
                            print 'Joint estimation of Baseline error and tropospheric delay [height-correlation approach]'
                            if rdr_or_geo == 'radar': demFile = demRdrFile
                            elif rdr_or_geo == 'geo': demFile = demGeoFile
                            try:
                                poly_order = template['pysar.trop.poly_order']
                            except:
                                poly_order = 1
                                print 'Deafult polynomial order for troposphreic correction = 1'
                            orbitCmd = 'baseline_trop.py ' + timeseriesFile + ' ' + demFile + ' ' + str(
                                poly_order) + ' range_and_azimuth'
                            print orbitCmd
                            os.system(orbitCmd)
                            timeseriesFile = outName
                        except:
                            print 'WARNING!'
                            print 'Skipping orbital error correction.'
                            print 'baselineCor method can only be applied in radar coordinate'

                    else:
                        print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++'
                        print 'WARNING!'
                        print 'Orbital error correction method was not recognized!'
                        print 'Possible options are:'
                        print '    quadratic, plane, quardatic_range, quadratic_azimiuth'
                        print '    plane_range, plane_azimuth,baselineCor,BaseTropCor'
                        print 'Continue without orbital errors correction...'
                        print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++'
            except:
                print 'No orbital errors correction.'
        else:
            print 'No orbital errors correction.'
    except:
        print 'No orbital errors correction.'

    #############################################
    # Velocity and rmse maps
    #############################################
    print '\n**********  Velocity estimation  **********************'
    if os.path.isfile('velocity.h5'):
        print 'velocity.h5 file already exists.'
    else:
        velCmd = 'timeseries2velocity.py ' + timeseriesFile
        print velCmd
        os.system(velCmd)
    velocityFile = 'velocity.h5'

    ############################################
    # Geocoding (Optional)
    ############################################
    print '\n**********  Geocoding  ********************************'
    try:
        template['pysar.geocode']
        if template['pysar.geocode'] in ('Y', 'y', 'yes', 'Yes', 'YES'):
            #geoTsFile      = check_geocode(timeseriesFile,geomapFile)
            geoVelocityFile = check_geocode(velocityFile, geomapFile)
            geoMaskFile = check_geocode(maskFile, geomapFile)
        else:
            print 'No geocoding applied'
    except:
        print 'No geocoding applied'

    #############################################
    # Masking (Optional)
    #############################################
    print '\n**********  Masking Velocity  *************************'
    velocityFile = check_mask(velocityFile, maskFile)
    try:
        geoVelocityFile = check_mask(geoVelocityFile, geoMaskFile)
    except:
        pass

    #############################################
    #                PySAR v1.0                 #
    #############################################
    s = time.time() - start
    m, s = divmod(s, 60)
    h, m = divmod(m, 60)
    print '\nTime used: %02d hours %02d mins %02d secs' % (h, m, s)
    print '\n###############################################'
    print 'End of PySAR processing!'
    print '################################################\n'
Esempio n. 35
0
def main(argv):
  start = time.time()

  try:     templateFile = argv[1]
  except:  Usage(); sys.exit(1)

  ###########  Path  ############
  projectName = os.path.basename(templateFile).partition('.')[0]
  try:     tssarProjectDir = os.getenv('TSSARDIR') +'/'+projectName
  except:  tssarProjectDir = os.getenv('SCRATCHDIR') + '/' + projectName + "/TSSAR"     # FA 7/2015: adopted for new directory structure
  print "\nprojectName: ", projectName
  print "QQ " + tssarProjectDir
  if not os.path.isdir(tssarProjectDir): os.mkdir(tssarProjectDir)
  os.chdir(tssarProjectDir)

  ##########  Initial File Name  ########
  import h5py
  import pysar._pysar_utilities as ut
  import pysar._readfile as readfile
  template = readfile.read_template(templateFile)

  igramFile = 'LoadedData.h5'
  if os.path.isfile('Modified_'+igramFile):  igramFile = 'Modified_'+igramFile
  corFile   = 'Coherence.h5'
  if os.path.isfile('Modified_'+corFile):    corFile   = 'Modified_'+corFile
  maskFile  = 'Mask.h5'
  if os.path.isfile('Modified_'+maskFile):   maskFile  = 'Modified_'+maskFile

#########################################
# Loading Data
#########################################
  print '\n**********  Loading Data  *****************************'
  if os.path.isfile(igramFile):
    print igramFile + ' already exists.'
  else:
    loadCmd='load_data.py ' + templateFile 
    print loadCmd
    os.system(loadCmd)
    #copyDemCmd='copy_dem_trans.py ' + templateFile
    #print copyDemCmd
    #os.system(copyDemCmd)

  if not os.path.isfile(igramFile): sys.exit('\nERROR: No interferograms file found!\n')

  ##########  Initial File Name - 2  ####
  try:  demGeoFile = find_filename(template, 'pysar.dem.geoCoord')
  except: print '\nWARNING:\n    No geo coded DEM found! Might be a problem in tropospheric delay / orbital error correction!\n'
  try:  demRdrFile = find_filename(template, 'pysar.dem.radarCoord')
  except: print '\nWARNING:\n    No radar coded DEM found! Will be a problem in tropospheric delay / orbital error correction!\n'
  try:  geomapFile = find_filename(template, 'pysar.geomap')
  except: print '\nWARNING:\n    No geomap*.trans file found! Will be a problem in geocoding!\n'


#########################################
# Check the subset (Optional)
#########################################
  print '\n**********  Subseting  ********************************'

  if   'pysar.subset.yx' in template.keys():
    print 'subseting data with y/x input...'
    subset = template['pysar.subset.yx'].split(',')

    print 'calculating bounding box in geo coordinate.'
    import numpy as np
    ysub = [float(i) for i in subset[0].split(':')];  ysub.sort()
    xsub = [float(i) for i in subset[1].split(':')];  xsub.sort()
    x = np.array([xsub[0],xsub[1],xsub[0],xsub[1]])
    y = np.array([ysub[0],ysub[0],ysub[1],ysub[1]])
    lat,lon,lat_res,lon_res = ut.radar2glob(x,y,igramFile,1)
    buf = 10*(np.max([lat_res,lon_res]))
    latsub = str(np.min(lat)-buf)+':'+str(np.max(lat)+buf)
    lonsub = str(np.min(lon)-buf)+':'+str(np.max(lon)+buf)
    print '    subset in y - '+subset[0]+'\n    subset in x - '+subset[1]
    print '    subset in lat - '+latsub +'\n    subset in lon - '+lonsub

    ## subset radar coded file
    igramFile  = check_subset(igramFile, subset, option='yx')
    maskFile   = check_subset(maskFile,  subset, option='yx')
    try:    demRdrFile = check_subset(demRdrFile,subset, option='yx')
    except: pass
    ## subset geo coded file
    try:    demGeoFile = check_subset(demGeoFile,[latsub,lonsub], option='lalo')
    except: pass
    try:    geomapFile = check_subset(geomapFile,[latsub,lonsub], option='lalo')
    except: pass

  elif 'pysar.subset.lalo' in template.keys():
    print 'subseting data with lat/lon input...'
    subset= template['pysar.subset.lalo'].split(',')

    print 'calculate bounding box in radar coordinate.'
    import numpy as np
    latsub = [float(i) for i in subset[0].split(':')];  latsub.sort()
    lonsub = [float(i) for i in subset[1].split(':')];  lonsub.sort()
    lat = np.array([latsub[0],latsub[0],latsub[1],latsub[1]])
    lon = np.array([lonsub[0],lonsub[1],lonsub[0],lonsub[1]])
    x,y,x_res,y_res = ut.glob2radar(lat,lon)
    buf = 10*(np.max([x_res,y_res]))
    xsub = str(np.min(x)-buf)+':'+str(np.max(x)+buf)
    ysub = str(np.min(y)-buf)+':'+str(np.max(y)+buf)
    print '    subset in lat - '+subset[0]+'\n    subset in lon - '+subset[1]
    print '    subset in y - '  +ysub     +'\n    subset in x - '+xsub

    ## subset geo coded files
    try:    demGeoFile = check_subset(demGeoFile, subset, option='lalo')
    except: pass
    try:    geomapFile = check_subset(geomapFile, subset, option='lalo')
    except: pass
    ## subset radar coded files
    igramFile  = check_subset(igramFile, [ysub,xsub], option='yx')
    maskFile   = check_subset(maskFile,  [ysub,xsub], option='yx')
    try:    demRdrFile = check_subset(demRdrFile,[ysub,xsub], option='yx')
    except: pass

  else: print 'No Subset selected. Processing the whole area'


  #try:
  #  subset= template['pysar.subset.yx'].split(',')
  #  print 'subset in y - '+str(subset[0])
  #  print 'subset in x - '+str(subset[1])
  #  igramFile  = check_subset(igramFile, subset)
  #  corFile    = check_subset(corFile,   subset)
  #  maskFile   = check_subset(maskFile,  subset)
  #  demRdrFile = check_subset(demRdrFile,subset)
  #  demGeoFile = check_subset(demGeoFile,subset)
  #  #geomapFile = check_subset(geomapFile,subset)
  #except:  print   'No Subset selected. Processing the whole area'


#########################################
# Referencing Interferograms
#########################################
  print '\n**********  Referencing Interferograms  ***************'
  if os.path.isfile('Seeded_'+igramFile):
    igramFile = 'Seeded_'+igramFile
    print igramFile + ' already exists.'
  else:
    print 'referncing all interferograms to the same pixel.'
    if 'pysar.seed.lalo' in template.keys():
       'Checking lat/lon refernce point' 
       lat= template['pysar.seed.lalo'].split(',')[0]
       lon= template['pysar.seed.lalo'].split(',')[1]
       seedCmd= 'seed_data.py -f ' + igramFile + ' -l ' +lat+ ' -L '+lon
    elif 'pysar.seed.yx' in template.keys():
       'Checking y/x reference point'
       y= template['pysar.seed.yx'].split(',')[0]
       x= template['pysar.seed.yx'].split(',')[1]
       seedCmd= 'seed_data.py -f ' + igramFile + ' -y ' +y+ ' -x '+x
    else: 
       seedCmd= 'seed_data.py -f ' + igramFile

    igramFile = 'Seeded_'+igramFile
    print seedCmd  
    os.system(seedCmd)


############################################
# Unwrapping Error Correction (Optional)
#    based on the consistency of triplets
#    of interferograms  
############################################
  print '\n**********  Unwrapping Error Correction  **************'
  outName = igramFile.split('.')[0]+'_unwCor.h5'
  try:
    template['pysar.unwrapError']
    if template['pysar.unwrapError'] in ('Y','y','yes','Yes','YES'):
      if os.path.isfile(outName):
        igramFile = outName
        print igramFile+' exists.'
      else:
        print 'This might take a while depending on the size of your data set!'
        unwCmd='unwrap_error.py '+igramFile
        os.system(unwCmd)
        igramFile = outName
    else:  print 'No unwrapping error correction.'
  except:  print 'No unwrapping error correction.'


#########################################
# Inversion of Interferograms
########################################
  print '\n**********  Time Series Inversion  ********************'
  if os.path.isfile('timeseries.h5'):
     print 'timeseries.h5 already exists, inversion is not needed.'
  else:
     invertCmd = 'igram_inversion.py '+ igramFile 
     print invertCmd
     os.system(invertCmd)
  timeseriesFile='timeseries.h5'


##############################################
# Temporal Coherence: 
#   A parameter to evaluate the consistency 
#   of timeseries with the interferograms
##############################################
  print '\n**********  Generate Temporal Coherence file  *********'
  if os.path.isfile('temporal_coherence.h5'):
     print 'temporal_coherence.h5 already exists.'
  else:
     tempcohCmd='temporal_coherence.py '+igramFile+' '+timeseriesFile
     print tempcohCmd
     os.system(tempcohCmd)
  tempCohFile = 'temporal_coherence.h5'


##############################################
# Update Mask based on temporal coherence (Optional)
##############################################
  print '\n**********  Update Mask based on Temporal Coherence  **'
  outName = maskFile.split('.')[0]+'_tempCoh.h5'
  try:
     template['pysar.mask']
     if template['pysar.mask'] in ('Y','yes','Yes','YES','y'):
        if os.path.isfile(outName):
           maskFile = outName
           print maskFile+' already exists.'
        else:
           try:    cohT = template['pysar.mask.threshold']
           except: cohT = '0.7'
           maskCmd='generate_mask.py -f '+tempCohFile+' -m '+ cohT +' -M 1.0 -o '+outName
           print maskCmd
           os.system(maskCmd)
           maskFile = outName
     else:  print 'No mask update from temporal coherence'
  except:   print 'No mask update from temporal coherence'


##############################################
# Incident Angle
##############################################
  print '\n**********  Generate Incident Angle file  *************'
  if os.path.isfile('incidence_angle.h5'):
     print 'incidence_angle.h5 already exists.'
  else:
     inciCmd = 'incidence_angle.py -f '+timeseriesFile
     print inciCmd
     os.system(inciCmd)


##############################################
# LOD (Local Oscillator Drift) Correction
#   when Satellite is Envisat and
#   Coordinate system is radar
##############################################
  print '\n**********  Local Oscillator Drift correction  ********'
  rdr_or_geo = ut.radar_or_geo(timeseriesFile)
  outName = timeseriesFile.split('.')[0]+'_LODcor.h5'
  if os.path.isfile(outName):
     timeseriesFile = outName
     print timeseriesFile+' already exists.'
  else:
    h5file   = h5py.File(timeseriesFile,'r')
    platform = h5file['timeseries'].attrs['PLATFORM']
    if platform == 'ENVISAT':
       if rdr_or_geo == 'radar':
          LODcmd='lod.py '+timeseriesFile
          print LODcmd
          os.system(LODcmd)
          timeseriesFile = outName
       else: print 'Cannot correct LOD for geocoded data.'
    else: print 'No need of LOD correction for '+platform
    h5file.close()


##############################################
# Tropospheric Delay Correction (Optional)
##############################################
  print '\n**********  Tropospheric Correction  ******************'
  try:
     if (template['pysar.troposphericDelay'] in ('Y','y','yes','Yes','YES')) and\
         template['pysar.orbitError.method'] in ['BaseTropCor','basetropcor','base_trop_cor']:
        print '''
   +++++++++++++++++++++++++++++++++++++++++++++++++++
   WARNING:
       Orbital error correction was BaseTropCor.
       Tropospheric correction was already applied simultaneous with baseline error correction.
       Tropospheric correction can not be applied again.
       To apply the tropospheric correction separated from baseline error correction, \
          choose other existing options for orbital error correction.
    +++++++++++++++++++++++++++++++++++++++++++++++++++      
        '''
        template['pysar.troposphericDelay']='no'
  except:  print 'Checking the tropospheric delay correction ...'

  if template['pysar.troposphericDelay'] in ('Y','y','yes','Yes','YES'):     
     if   rdr_or_geo == 'radar':  demFile = demRdrFile
     elif rdr_or_geo == 'geo':    demFile = demGeoFile

     if not os.path.isfile(demFile):
        print '++++++++++++++++++++++++++++++++++++++++++++++'
        print 'ERROR:'
        print '    DEM file was not found!'
        print '    Continue without tropospheric correction ...'
        print '++++++++++++++++++++++++++++++++++++++++++++++'
     else:
        if template['pysar.troposphericDelay.method'] in ['height-correlation','height_correlation',\
                                                          'Height-Correlation','Height_Correlation']:
           print 'tropospheric delay correction with height-correlation approach'
           outName = timeseriesFile.split('.')[0]+'_tropCor.h5'
           if os.path.isfile(outName):
              timeseriesFile = outName
              print timeseriesFile+' already exists.'
           else:
              try:     poly_order = template['pysar.troposphericDelay.polyOrder']
              except:  poly_order = '1';  print 'Deafult polynomial order for troposphreic correction = 1'
              cmdTrop = 'tropcor_phase_elevation.py'+' -f '+timeseriesFile+' -d '+demFile+' -p '+str(poly_order)
              print cmdTrop
              os.system(cmdTrop)
              timeseriesFile = outName

        elif template['pysar.troposphericDelay.method'] in ['pyaps','PyAPS','PYAPS']:
           print 'Atmospheric correction using Numerical Weather Models (using PyAPS software)'
           print 'reading DEM, source of NWM and acquisition time from template file'
           source_of_NWM = template['pysar.troposphericDelay.weatherModel']
           print 'Numerical Weather Model: '+source_of_NWM
           outName = timeseriesFile.split('.')[0]+'_'+source_of_NWM+'.h5'
           if os.path.isfile(outName):
              timeseriesFile = outName
              print timeseriesFile+' already exists.'
           else:
              acquisition_time = template['pysar.acquisitionTime']
              print 'acquisition time: '+acquisition_time
              cmdTrop = 'tropcor_pyaps.py -f '+timeseriesFile+' -d '+demFile+' -s '+source_of_NWM+\
                                        ' -h '+acquisition_time+' -i incidence_angle.h5'
              print cmdTrop
              os.system(cmdTrop)
              #subprocess.Popen(cmdTrop).wait()
              timeseriesFile = outName
        else:  print 'ERROR: Unrecognized atmospheric correction method: '+template['pysar.trop.method']
  else:  print 'No atmospheric delay correction.'


##############################################
# Topographic (DEM) Residuals Correction (Optional)
##############################################
  print '\n**********  Topographic (DEM) Error correction  *******'
  outName = timeseriesFile.split('.')[0]+'_demCor.h5'
  try:
     template['pysar.topoError']
     if template['pysar.topoError'] in ('Y','yes','Yes','YES','y'):
        if os.path.isfile(outName):
           timeseriesFile = outName
           print timeseriesFile+' already exists.'
        else:
           print 'Correcting topographic residuals'
           topoCmd='dem_error.py '+ timeseriesFile +' '+ igramFile
           print topoCmd
           os.system(topoCmd)
           timeseriesFile = outName
     else:  print 'No correction for topographic residuals.'
  except:   print 'No correction for topographic residuals.'


##############################################
# Orbit Correction (Optional)
##############################################
  print '\n**********  Orbital Correction  ***********************'
  try:
     template['pysar.orbitError']
     if template['pysar.orbitError'] in ('Y','yes','Yes','YES','y'):
        try:
           orbit_error_method=template['pysar.orbitError.method']
           print 'orbit error correction method : '+orbit_error_method
           outName = timeseriesFile.split('.')[0]+'_'+orbit_error_method+'.h5'
           if os.path.isfile(outName):
              timeseriesFile = outName
              print timeseriesFile+' already exists.'

           else:
              if orbit_error_method in [    'plane',     'plane_range',     'plane_azimuth',\
                                        'quadratic', 'quadratic_range', 'quadratic_azimuth']:
                 orbitCmd='remove_plane.py '+timeseriesFile+' '+orbit_error_method #+ ' Mask.h5'
                 print orbitCmd
                 os.system(orbitCmd)
                 timeseriesFile = outName

              elif orbit_error_method in ['baselineCor','BaselineCor']:
                 try:
                    h5file=h5py.File(timeseriesFile,'r')
                    daz=float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE'])
                    orbitCmd='baseline_error.py ' +timeseriesFile #+ ' Mask.h5'
                    print orbitCmd
                    os.system(orbitCmd)
                    timeseriesFile = outName
                 except:
                    print 'WARNING!'
                    print 'Skipping orbital error correction.'
                    print 'baselineCor method can only be applied in radar coordinate'

              elif orbit_error_method in ['BaseTropCor','basetropcor','base_trop_cor']:
                 try:
                    h5file=h5py.File(timeseriesFile,'r')
                    daz=float(h5file['timeseries'].attrs['AZIMUTH_PIXEL_SIZE']) 
                    print 'Joint estimation of Baseline error and tropospheric delay [height-correlation approach]'
                    if   rdr_or_geo == 'radar':  demFile = demRdrFile
                    elif rdr_or_geo == 'geo':    demFile = demGeoFile
                    try:     poly_order = template['pysar.trop.poly_order']
                    except:  poly_order = 1;  print 'Deafult polynomial order for troposphreic correction = 1'
                    orbitCmd='baseline_trop.py '+timeseriesFile+' '+ demFile +' '+ str(poly_order) +' range_and_azimuth'
                    print orbitCmd
                    os.system(orbitCmd)
                    timeseriesFile = outName
                 except:
                    print 'WARNING!'
                    print 'Skipping orbital error correction.'
                    print 'baselineCor method can only be applied in radar coordinate'

              else:
                 print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++'
                 print 'WARNING!'
                 print 'Orbital error correction method was not recognized!'
                 print 'Possible options are:'
                 print '    quadratic, plane, quardatic_range, quadratic_azimiuth'
                 print '    plane_range, plane_azimuth,baselineCor,BaseTropCor'
                 print 'Continue without orbital errors correction...'
                 print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++'
        except:  print 'No orbital errors correction.'
     else:       print 'No orbital errors correction.'
  except:        print 'No orbital errors correction.'


#############################################
# Velocity and rmse maps
#############################################
  print '\n**********  Velocity estimation  **********************'
  if os.path.isfile('velocity.h5'):
    print 'velocity.h5 file already exists.'
  else:
    velCmd='timeseries2velocity.py '+timeseriesFile
    print velCmd
    os.system(velCmd)
  velocityFile = 'velocity.h5'


############################################
# Geocoding (Optional)
############################################
  print '\n**********  Geocoding  ********************************'
  try:
     template['pysar.geocode']
     if template['pysar.geocode'] in ('Y','y','yes','Yes','YES'):
        #geoTsFile      = check_geocode(timeseriesFile,geomapFile)
        geoVelocityFile = check_geocode(velocityFile,  geomapFile)
        geoMaskFile     = check_geocode(maskFile,      geomapFile)
     else:  print 'No geocoding applied'
  except:   print 'No geocoding applied'


#############################################
# Masking (Optional)
#############################################
  print '\n**********  Masking Velocity  *************************'
  velocityFile    = check_mask(   velocityFile,    maskFile)
  try:    geoVelocityFile = check_mask(geoVelocityFile, geoMaskFile)
  except: pass

#############################################
#                PySAR v1.0                 #
#############################################
  s = time.time()-start;  m, s = divmod(s, 60);  h, m = divmod(m, 60)
  print '\nTime used: %02d hours %02d mins %02d secs' % (h, m, s)
  print '\n###############################################'
  print 'End of PySAR processing!'
  print '################################################\n'
Esempio n. 36
0
def read_template2inps(template_file, inps=None):
    '''Read input template options into Namespace inps'''
    if not inps:
        inps = cmdLineParse()

    template = readfile.read_template(inps.template_file)
    key_list = template.keys()

    # Coherence-based network modification
    prefix = 'pysar.network.'

    key = prefix + 'coherenceBased'
    if key in key_list and template[key] in ['auto', 'yes']:
        inps.coherence_based = True

    key = prefix + 'keepMinSpanTree'
    if key in key_list and template[key] in ['no']:
        inps.keep_mst = False

    key = prefix + 'coherenceFile'
    if key in key_list:
        if template[key] == 'auto':
            inps.coherence_file = 'coherence.h5'
        else:
            inps.coherence_file = template[key]

    # find coherence file from input files if inps.coherence_file does not exists.
    if inps.coherence_based and not os.path.isfile(inps.coherence_file):
        k_list = [readfile.read_attribute(f)['FILE_TYPE'] for f in inps.file]
        try:
            coh_file_idx = k_list.index('coherence')
        except ValueError:
            print 'No coherence file found! Can not use coherence-based method without it.'
        inps.coherence_file = inps.file[coh_file_idx]

    key = prefix + 'minCoherence'
    if key in key_list:
        if template[key] == 'auto':
            inps.min_coherence = 0.7
        else:
            inps.min_coherence = float(template[key])

    key = prefix + 'maskFile'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            try:
                inps.mask_file = ut.get_file_list(['maskLand.h5',
                                                   'mask.h5'])[0]
            except:
                inps.mask_file = None
        elif value == 'no':
            inps.mask_file = None
        else:
            inps.mask_file = value

    key = prefix + 'maskAoi.yx'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_pix_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_y = sorted([int(i.strip()) for i in tmp[0].split(':')])
            sub_x = sorted([int(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_pix_box = (sub_x[0], sub_y[0], sub_x[1], sub_y[1])

    key = prefix + 'maskAoi.lalo'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.aoi_geo_box = None
        else:
            tmp = [i.strip() for i in value.split(',')]
            sub_lat = sorted([float(i.strip()) for i in tmp[0].split(':')])
            sub_lon = sorted([float(i.strip()) for i in tmp[1].split(':')])
            inps.aoi_geo_box = (sub_lon[0], sub_lat[1], sub_lon[1], sub_lat[0])
            # Check lookup file
            if not inps.lookup_file:
                print 'Warning: no lookup table file found! Can not use ' + key + ' option without it.'
                print 'skip this option.'
                inps.aoi_pix_box = None

    ## Network Modification based on thresholds
    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_temp_baseline = float(value)

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.max_perp_baseline = float(value)

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_date = [i for i in value.replace(',', ' ').split()]

    key = prefix + 'excludeIfgIndex'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.exclude_ifg_index = [
                i for i in value.replace(',', ' ').split()
            ]

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.start_date = ptime.yymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value not in ['auto', 'no']:
            inps.end_date = ptime.yymmdd(value)

    return inps
Esempio n. 37
0
def read_template2inps(templateFile, inps=None):
    '''Read network options from template file into Namespace variable inps'''
    if not inps:
        inps = cmdLineParse()

    ##Read template file
    template = readfile.read_template(templateFile)
    key_list = template.keys()
    if not template:
        print 'Empty template: ' + templateFile
        return None
    prefix = 'select.network.'

    ##Extra keys
    #extra_key_list = ['masterDate','startDate','endDate']
    #for extra_key in extra_key_list:
    #    if extra_key in key_list:
    #        template[prefix+extra_key] = template[extra_key]

    #Check option prefix
    for i in ['selectPairs.']:
        if any(i in key for key in key_list):
            print '\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
            print 'WARNING: un-supported option prefix detected: selectPairs.'
            print "         Use selectNetwork. instead"
            print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n'

    if all(prefix not in key for key in key_list):
        print '\n+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++'
        print 'ERROR: no valid input option deteced in template file!'
        print 'Check the template below for supported options:'
        print '+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n'
        print TEMPLATE
        sys.exit(-1)

    ##Read template dict into inps namespace
    key = prefix + 'method'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.method = 'all'
        else:
            inps.method = value

    key = prefix + 'referenceFile'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.reference_file = None
        else:
            inps.reference_file = value

    key = prefix + 'perpBaseMax'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.perp_base_max = 500.0
        elif value == 'no':
            inps.perp_base_max = 1e5
        else:
            inps.perp_base_max = float(value)

    key = prefix + 'tempBaseMax'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.temp_base_max = 1800.0
        elif value == 'no':
            inps.temp_base_max = 3.65e5
        else:
            inps.temp_base_max = float(value)

    key = prefix + 'tempBaseMin'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.temp_base_min = 0.0
        else:
            inps.temp_base_min = float(value)

    key = prefix + 'keepSeasonal'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.keep_seasonal = False
        else:
            inps.keep_seasonal = True

    key = prefix + 'dopOverlapMin'
    if key in key_list:
        value = template[key]
        if value == 'auto':
            inps.dop_overlap_min = 15.0
        elif value == 'no':
            inps.dop_overlap_min = 0.0
        else:
            inps.dop_overlap_min = float(value)

    key = 'PLATFORM'
    if key in key_list and not inps.sensor:
        inps.sensor = template[key]

    key = 'COH_COLOR_JUMP'
    if key in key_list:
        inps.coh_thres = float(template[key])

    key = prefix + 'masterDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.m_date = None
        else:
            inps.m_date = ptime.yymmdd(value)

    key = prefix + 'startDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.start_date = None
        else:
            inps.start_date = ptime.yyyymmdd(value)

    key = prefix + 'endDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.end_date = None
        else:
            inps.end_date = ptime.yyyymmdd(value)

    key = prefix + 'excludeDate'
    if key in key_list:
        value = template[key]
        if value in ['auto', 'no']:
            inps.exclude_date = []
        else:
            inps.exclude_date = ptime.yyyymmdd([i for i in value.split(',')])

    key = prefix + 'incrementNum'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.increment_num = 3
        else:
            inps.increment_num = int(value)

    key = prefix + 'tempPerpList'
    if key in key_list:
        value = template[key]
        if value in ['auto']:
            inps.temp_perp_list = '16,1600;32,800;48,600;64,200'
        else:
            inps.temp_perp_list = value
    if isinstance(inps.temp_perp_list, basestring):
        inps.temp_perp_list = [[float(j) for j in i.split(',')]
                               for i in inps.temp_perp_list.split(';')]

    return inps