Exemple #1
0
def save_hdfeos5(inps, customTemplate=None):
    if not inps.geocoded:
        warnings.warn('Dataset is in radar coordinates, skip writting to HDF-EOS5 format.')
    else:
        # Add attributes from custom template to timeseries file
        if customTemplate is not None:
            ut.add_attribute(inps.timeseriesFile, customTemplate)

        # Save to HDF-EOS5 format
        print('--------------------------------------------')
        hdfeos5Cmd = ('save_hdfeos5.py {t} -c {c} -m {m} -g {g}'
                      ' -t {e}').format(t=inps.timeseriesFile,
                                        c=inps.tempCohFile,
                                        m=inps.maskFile,
                                        g=inps.geomFile,
                                        e=inps.templateFile)
        print(hdfeos5Cmd)
        atr = readfile.read_attribute(inps.timeseriesFile)
        SAT = sensor.get_unavco_mission_name(atr)
        try:
            inps.hdfeos5File = ut.get_file_list('{}_*.he5'.format(SAT))[0]
        except:
            inps.hdfeos5File = None
        if ut.run_or_skip(out_file=inps.hdfeos5File, in_file=[inps.timeseriesFile,
                                                              inps.tempCohFile,
                                                              inps.maskFile,
                                                              inps.geomFile]) == 'run':
            status = subprocess.Popen(hdfeos5Cmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while generating HDF-EOS5 time-series file.\n')
    return
Exemple #2
0
def extract_metadata4lookup_table(fname):
    """Read/extract attribute for .UTM_TO_RDC file from Gamma to ROI_PAC
    For example, it read input file, sim_150911-150922.UTM_TO_RDC, 
    find its associated par file, sim_150911-150922.utm.dem.par, read it, and
    convert to ROI_PAC style and write it to an rsc file, sim_150911-150922.UTM_TO_RDC.rsc"""

    # Check existed .rsc file
    rsc_file_list = ut.get_file_list(fname+'.rsc')
    if rsc_file_list:
        rsc_file = rsc_file_list[0]
        return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]
    atr['Y_UNIT'] = 'degrees'
    atr['X_UNIT'] = 'degrees'

    par_file = os.path.splitext(fname)[0]+'.utm.dem.par'

    par_dict = readfile.read_gamma_par(par_file)
    atr.update(par_dict)

    # Write to .rsc file
    rsc_file = fname+'.rsc'
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = dict()
    if not set(atr.items()).issubset(set(atr_orig.items())):
        atr_out = {**atr_orig, **atr}
        print('writing >>> '+os.path.basename(rsc_file))
        writefile.write_roipac_rsc(atr_out, out_file=rsc_file)
    return rsc_file
Exemple #3
0
def prepare_metadata(inps):
    inps.file = ut.get_file_list(inps.file, abspath=True)

    # check outfile and parallel option
    if inps.parallel:
        (num_cores,
         inps.parallel,
         Parallel,
         delayed) = ut.check_parallel(len(inps.file), print_msg=False)

    # multiple datasets files
    ext = os.path.splitext(inps.file[0])[1]
    if ext in ['.unw', '.cor', '.int']:
        if len(inps.file) == 1:
            extract_metadata4interferogram(inps.file[0])
        elif inps.parallel:
            Parallel(n_jobs=num_cores)(delayed(extract_metadata4interferogram)(file)
                                       for file in inps.file)
        else:
            for File in inps.file:
                extract_metadata4interferogram(File)

    # Single dataset files
    elif inps.file[0].endswith('.utm.dem'):
        for File in inps.file:
            atr_file = extract_metadata4dem_geo(File)
    elif inps.file[0].endswith(('.rdc.dem', '.hgt_sim')):
        for File in inps.file:
            atr_file = extract_metadata4dem_radar(File)
    elif ext in ['.UTM_TO_RDC']:
        for File in inps.file:
            atr_file = extract_metadata4lookup_table(File)
    return
Exemple #4
0
def _check_inps(inps):
    inps.file = ut.get_file_list(inps.file)
    if not inps.file:
        raise Exception('ERROR: no input file found!')
    elif len(inps.file) > 1:
        inps.outfile = None

    atr = readfile.read_attribute(inps.file[0])
    if 'Y_FIRST' in atr.keys() and inps.radar2geo:
        print('input file is already geocoded')
        print('to resample geocoded files into radar coordinates, use --geo2radar option')
        print('exit without doing anything.')
        sys.exit(0)
    elif 'Y_FIRST' not in atr.keys() and not inps.radar2geo:
        print('input file is already in radar coordinates, exit without doing anything')
        sys.exit(0)

    inps.lookupFile = ut.get_lookup_file(inps.lookupFile)
    if not inps.lookupFile:
        raise FileNotFoundError('No lookup table found! Can not geocode without it.')

    if inps.SNWE:
        inps.SNWE = tuple(inps.SNWE)

    inps.laloStep = [inps.latStep, inps.lonStep]
    if None in inps.laloStep:
        inps.laloStep = None

    return inps
Exemple #5
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    inps.file = ut.get_file_list(inps.file)
    if inps.outfile or not inps.disp_fig:
        inps.save_fig = True
    return inps
Exemple #6
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)
    inps.file = ut.get_file_list(inps.file)

    if inps.ysub and not len(inps.ysub) % 2 == 0:
        raise Exception('ERROR: -y input has to have even length!')
    return inps
Exemple #7
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)
    inps.file = ut.get_file_list(inps.file)

    if len(inps.file) > 1 and inps.outfile:
        inps.outfile = None
        print('more than one file is input, disable custom output filename.')
    return inps
Exemple #8
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    inps.file = ut.get_file_list(inps.file)[0]
    inps = read_reference_input(inps)

    if inps.go_reference:
        reference_file(inps)
    print('Done.')
    return
Exemple #9
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    inps.file = ut.get_file_list(inps.file)
    #print('number of input files: ({})\n{}'.format(len(inps.file), inps.file))
    if len(inps.file) > 1:
        inps.outfile = None

    return inps
Exemple #10
0
def main(iargs=None):
    inps = cmd_line_parse(iargs)
    inps.file = ut.get_file_list(inps.file)
    print('number of input files: ({})\n{}'.format(len(inps.file), inps.file))

    inps = read_aux_subset2inps(inps)

    subset_file_list(inps.file, inps)

    print('Done.')
    return
Exemple #11
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)

    inps.file = ut.get_file_list(inps.file, abspath=True)
    inps.file_ext = os.path.splitext(inps.file[0])[1]

    # check input file extension
    ext_list = ['.unw', '.cor', '.int', '.dem', '.hgt_sim', '.UTM_TO_RDC']
    if inps.file_ext not in ext_list:
        msg = 'unsupported input file extension: {}'.format(inps.file_ext)
        msg += '\nsupported file extensions: {}'.format(ext_list)
        raise ValueError()
    return inps
Exemple #12
0
def copy_aux_file(inps):
    # for Univ of Miami
    fileList = ['PROCESS/unavco_attributes.txt',
                'PROCESS/bl_list.txt',
                'SLC/summary*slc.jpg']
    try:
        projectDir = os.path.join(os.getenv('SCRATCHDIR'), inps.projectName)
        fileList = ut.get_file_list([os.path.join(projectDir, i) for i in fileList],
                                    abspath=True)
        for file in fileList:
            if ut.run_or_skip(out_file=os.path.basename(file),
                              in_file=file,
                              check_readable=False) == 'run':
                shutil.copy2(file, inps.workDir)
                print('copy {} to work directory'.format(os.path.basename(file)))
    except:
        pass
    return inps
Exemple #13
0
def check_exist_grib_file(gfile_list, print_msg=True):
    """Check input list of grib files, and return the existing ones with right size."""
    gfile_exist = ut.get_file_list(gfile_list)
    if gfile_exist:
        file_sizes = [
            os.path.getsize(i) for i in gfile_exist
            if os.path.getsize(i) > 10e6
        ]
        if file_sizes:
            comm_size = ut.most_common([i for i in file_sizes])
            if print_msg:
                print('common file size: {} bytes'.format(comm_size))
                print('number of grib files existed    : {}'.format(
                    len(gfile_exist)))

            gfile_corrupt = []
            for gfile in gfile_exist:
                if os.path.getsize(gfile) < comm_size * 0.9:
                    gfile_corrupt.append(gfile)
        else:
            gfile_corrupt = gfile_exist

        if gfile_corrupt:
            if print_msg:
                print(
                    '------------------------------------------------------------------------------'
                )
                print(
                    'corrupted grib files detected! Delete them and re-download...'
                )
                print('number of grib files corrupted  : {}'.format(
                    len(gfile_corrupt)))
            for i in gfile_corrupt:
                rmCmd = 'rm ' + i
                print(rmCmd)
                os.system(rmCmd)
                gfile_exist.remove(i)
            if print_msg:
                print(
                    '------------------------------------------------------------------------------'
                )
    return gfile_exist
Exemple #14
0
def set_mask():
    global mask, inps, atr

    if not inps.mask_file:
        if os.path.basename(inps.timeseries_file).startswith('geo_'):
            file_list = ['geo_maskTempCoh.h5']
        else:
            file_list = ['maskTempCoh.h5', 'mask.h5']

        try:
            inps.mask_file = ut.get_file_list(file_list)[0]
        except:
            inps.mask_file = None

    try:
        mask = readfile.read(inps.mask_file, datasetName='mask')[0]
        mask[mask!=0] = 1
        print(('load mask from file: '+inps.mask_file))
    except:
        mask = None
        print('No mask used.')
Exemple #15
0
def prepare_metadata(inps):
    inps.file = ut.get_file_list(inps.file, abspath=True)

    # Check input file type
    ext = os.path.splitext(inps.file[0])[1]
    if ext not in ['.unw', '.cor', '.int', '.byt']:
        return

    # check outfile and parallel option
    if inps.parallel:
        num_cores, inps.parallel, Parallel, delayed = ut.check_parallel(
            len(inps.file), print_msg=False)
    if len(inps.file) == 1:
        extract_metadata(inps.file[0])
    elif inps.parallel:
        Parallel(n_jobs=num_cores)(delayed(extract_metadata)(fname)
                                   for fname in inps.file)
    else:
        for fname in inps.file:
            extract_metadata(fname)
    return
Exemple #16
0
def extract_metadata4lookup_table(fname):
    """Read/extract attribute for .UTM_TO_RDC file from Gamma to ROI_PAC
    For example, it read input file, sim_150911-150922.UTM_TO_RDC, 
    find its associated par file, sim_150911-150922.utm.dem.par, read it, and
    convert to ROI_PAC style and write it to an rsc file, sim_150911-150922.UTM_TO_RDC.rsc"""

    # Check existed .rsc file
    rsc_file_list = ut.get_file_list(fname + '.rsc')
    if rsc_file_list:
        rsc_file = rsc_file_list[0]
        #print(rsc_file+' is existed, no need to re-extract.')
        return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]
    atr['Y_UNIT'] = 'degrees'
    atr['X_UNIT'] = 'degrees'

    par_file = os.path.splitext(fname)[0] + '.utm.dem.par'

    #print('read '+os.path.basename(par_file))
    par_dict = readfile.read_gamma_par(par_file)
    atr.update(par_dict)

    # Write to .rsc file
    rsc_file = fname + '.rsc'
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = None
    #keyList = [i for i in atr_orig.keys() if i in atr.keys()]
    if any((i not in atr_orig.keys() or atr_orig[i] != atr[i])
           for i in atr.keys()):
        print('writing >>> ' + os.path.basename(rsc_file))
        writefile.write_roipac_rsc(atr, out_file=rsc_file)
    return rsc_file
Exemple #17
0
def dload_grib_pyaps(date_list, hour, trop_model='ECMWF', weather_dir='./'):
    """Download weather re-analysis grib files using PyAPS
    Inputs:
        date_list   : list of string in YYYYMMDD format
        hour        : string in HH:MM or HH format
        trop_model : string, 
        weather_dir : string,
    Output:
        grib_file_list : list of string
    """
    print(
        '*' * 50 +
        '\nDownloading weather model data using PyAPS (Jolivet et al., 2011, GRL) ...'
    )
    # Grib data directory
    grib_dir = weather_dir + '/' + trop_model
    if not os.path.isdir(grib_dir):
        os.makedirs(grib_dir)
        print('making directory: ' + grib_dir)

    # Date list to grib file list
    grib_file_list = date_list2grib_file(date_list, hour, trop_model, grib_dir)

    # Get date list to download (skip already downloaded files)
    grib_file_existed = ut.get_file_list(grib_file_list)
    if grib_file_existed:
        grib_filesize_digit = ut.most_common(
            [len(str(os.path.getsize(i))) for i in grib_file_existed])
        grib_filesize_max2 = ut.most_common(
            [str(os.path.getsize(i))[0:2] for i in grib_file_existed])
        grib_file_corrupted = [
            i for i in grib_file_existed
            if (len(str(os.path.getsize(i))) != grib_filesize_digit
                or str(os.path.getsize(i))[0:2] != grib_filesize_max2)
        ]
        print('file size mode: %se%d bytes' %
              (grib_filesize_max2, grib_filesize_digit - 2))
        print('number of grib files existed    : %d' % len(grib_file_existed))
        if grib_file_corrupted:
            print(
                '------------------------------------------------------------------------------'
            )
            print(
                'corrupted grib files detected! Delete them and re-download...'
            )
            print('number of grib files corrupted  : %d' %
                  len(grib_file_corrupted))
            for i in grib_file_corrupted:
                rmCmd = 'rm ' + i
                print(rmCmd)
                os.system(rmCmd)
                grib_file_existed.remove(i)
            print(
                '------------------------------------------------------------------------------'
            )
    grib_file2download = sorted(
        list(set(grib_file_list) - set(grib_file_existed)))
    date_list2download = [
        str(re.findall('\d{8}', i)[0]) for i in grib_file2download
    ]
    print('number of grib files to download: %d' % len(date_list2download))
    print(
        '------------------------------------------------------------------------------\n'
    )

    # Download grib file using PyAPS
    if trop_model == 'ECMWF': pa.ECMWFdload(date_list2download, hour, grib_dir)
    elif trop_model == 'MERRA':
        pa.MERRAdload(date_list2download, hour, grib_dir)
    elif trop_model == 'NARR':
        pa.NARRdload(date_list2download, hour, grib_dir)
    elif trop_model == 'ERA':
        pa.ERAdload(date_list2download, hour, grib_dir)
    elif trop_model == 'MERRA1':
        pa.MERRA1dload(date_list2download, hour, grib_dir)
    return grib_file_list
Exemple #18
0
def read_template(inps):
    print('\n**********  Read Template File  **********')
    # default template
    inps.templateFile = os.path.join(inps.workDir, 'pysarApp_template.txt')
    if not os.path.isfile(inps.templateFile):
        print('generate default template file: ' + inps.templateFile)
        with open(inps.templateFile, 'w') as f:
            f.write(TEMPLATE)
    else:
        inps.templateFile = check_obsolete_default_template(inps.templateFile)

    # custom template
    templateCustom = None
    if inps.templateFileCustom:
        # Copy custom template file to work directory
        if ut.update_file(os.path.basename(inps.templateFileCustom),
                          inps.templateFileCustom,
                          check_readable=False):
            shutil.copy2(inps.templateFileCustom, inps.workDir)
            print('copy {} to work directory'.format(
                os.path.basename(inps.templateFileCustom)))

        # Read custom template
        print('read custom template file: ' + inps.templateFileCustom)
        templateCustom = readfile.read_template(inps.templateFileCustom)
        # correct some loose type errors
        for key in templateCustom.keys():
            if templateCustom[key].lower() in ['default']:
                templateCustom[key] = 'auto'
            elif templateCustom[key].lower() in ['n', 'off', 'false']:
                templateCustom[key] = 'no'
            elif templateCustom[key].lower() in ['y', 'on', 'true']:
                templateCustom[key] = 'yes'
        for key in ['pysar.deramp', 'pysar.troposphericDelay.method']:
            if key in templateCustom.keys():
                templateCustom[key] = templateCustom[key].lower().replace(
                    '-', '_')
        if 'processor' in templateCustom.keys():
            templateCustom['pysar.load.processor'] = templateCustom[
                'processor']

        # Update default template with custom input template
        print('update default template based on input custom template')
        inps.templateFile = ut.update_template_file(inps.templateFile,
                                                    templateCustom)

    if inps.generate_template:
        raise SystemExit('Exit as planned after template file generation.')

    print('read default template file: ' + inps.templateFile)
    template = readfile.read_template(inps.templateFile)
    template = ut.check_template_auto_value(template)

    # Get existing files name: unavco_attributes.txt
    try:
        inps.unavcoMetadataFile = ut.get_file_list('unavco_attribute*txt',
                                                   abspath=True)[0]
    except:
        inps.unavcoMetadataFile = None
        print('No UNAVCO attributes file found.')

    return inps, template, templateCustom
Exemple #19
0
def extract_metadata4interferogram(fname):
    """Read/extract attributes for PySAR from Gamma .unw, .cor and .int file
    Parameters: fname : str, Gamma interferogram filename or path,
                    i.e. /PopoSLT143TsxD/diff_filt_HDR_130118-130129_4rlks.unw
    Returns:    atr : dict, Attributes dictionary
    """
    file_dir = os.path.dirname(fname)
    file_basename = os.path.basename(fname)

    rsc_file = fname+'.rsc'
    # if os.path.isfile(rsc_file):
    #    return rsc_file

    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]

    # Get info: date12, num of loooks
    try:
        date12 = str(re.findall('\d{8}[-_]\d{8}', file_basename)[0])
    except:
        date12 = str(re.findall('\d{6}[-_]\d{6}', file_basename)[0])
    m_date, s_date = date12.replace('-', '_').split('_')
    atr['DATE12'] = ptime.yymmdd(m_date)+'-'+ptime.yymmdd(s_date)
    lks = os.path.splitext(file_basename.split(date12)[1])[0]

    # Read .off and .par file
    off_file = file_dir+'/*'+date12+lks+'.off'
    m_par_file = [file_dir+'/*'+m_date+lks+i for i in ['.amp.par', '.ramp.par']]
    s_par_file = [file_dir+'/*'+s_date+lks+i for i in ['.amp.par', '.ramp.par']]

    try:
        off_file = ut.get_file_list(off_file)[0]
    except:
        print('\nERROR: Can not find .off file, it supposed to be like: '+off_file)
    try:
        m_par_file = ut.get_file_list(m_par_file)[0]
    except:
        print('\nERROR: Can not find master date .par file, it supposed to be like: '+m_par_file)
    try:
        s_par_file = ut.get_file_list(s_par_file)[0]
    except:
        print('\nERROR: Can not find slave date .par file, it supposed to be like: '+s_par_file)

    par_dict = readfile.read_gamma_par(m_par_file)
    off_dict = readfile.read_gamma_par(off_file)
    atr.update(par_dict)
    atr.update(off_dict)

    # Perp Baseline Info
    atr = get_perp_baseline(m_par_file, s_par_file, off_file, atr)

    # LAT/LON_REF1/2/3/4
    atr = get_lalo_ref(m_par_file, atr)

    # Write to .rsc file
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = dict()
    if not set(atr.items()).issubset(set(atr_orig.items())):
        atr_out = {**atr_orig, **atr}
        print('merge %s, %s and %s into %s' % (os.path.basename(m_par_file),
                                               os.path.basename(s_par_file),
                                               os.path.basename(off_file),
                                               os.path.basename(rsc_file)))
        writefile.write_roipac_rsc(atr_out, out_file=rsc_file)

    return rsc_file
Exemple #20
0
def main(iargs=None):
    start_time = time.time()
    inps = cmd_line_parse(iargs)
    if inps.version:
        raise SystemExit(version.version_description)

    #########################################
    # Initiation
    #########################################
    print(version.logo)

    # Project Name
    inps.projectName = None
    if inps.templateFileCustom:
        inps.templateFileCustom = os.path.abspath(inps.templateFileCustom)
        inps.projectName = os.path.splitext(
            os.path.basename(inps.templateFileCustom))[0]
        print('Project name: ' + inps.projectName)

    # Work directory
    if not inps.workDir:
        if autoPath and 'SCRATCHDIR' in os.environ and inps.projectName:
            inps.workDir = os.path.join(os.getenv('SCRATCHDIR'),
                                        inps.projectName, 'PYSAR')
        else:
            inps.workDir = os.getcwd()
    inps.workDir = os.path.abspath(inps.workDir)
    if not os.path.isdir(inps.workDir):
        os.makedirs(inps.workDir)
    os.chdir(inps.workDir)
    print("Go to work directory: " + inps.workDir)

    copy_aux_file(inps)

    inps, template, templateCustom = read_template(inps)

    #########################################
    # Loading Data
    #########################################
    print('\n**********  Load Data  **********')
    loadCmd = 'load_data.py --template {}'.format(inps.templateFile)
    if inps.projectName:
        loadCmd += ' --project {}'.format(inps.projectName)
    print(loadCmd)
    status = subprocess.Popen(loadCmd, shell=True).wait()
    os.chdir(inps.workDir)

    print('-' * 50)
    inps, atr = ut.check_loaded_dataset(inps.workDir, inps)

    # Add template options into HDF5 file metadata
    # if inps.templateFileCustom:
    #    atrCmd = 'add_attribute.py {} {}'.format(inps.stackFile, inps.templateFileCustom)
    #    print(atrCmd)
    #    status = subprocess.Popen(atrCmd, shell=True).wait()
    #ut.add_attribute(inps.stackFile, template)

    if inps.load_dataset:
        raise SystemExit('Exit as planned after loading/checking the dataset.')

    if inps.reset:
        print('Reset dataset attributtes for a fresh re-run.\n' + '-' * 50)
        # Reset reference pixel
        refPointCmd = 'reference_point.py {} --reset'.format(inps.stackFile)
        print(refPointCmd)
        status = subprocess.Popen(refPointCmd, shell=True).wait()
        # Reset network modification
        networkCmd = 'modify_network.py {} --reset'.format(inps.stackFile)
        print(networkCmd)
        status = subprocess.Popen(networkCmd, shell=True).wait()

    #########################################
    # Generating Aux files
    #########################################
    print('\n**********  Generate Auxiliary Files  **********')
    # Initial mask (pixels with valid unwrapPhase or connectComponent in ALL interferograms)
    inps.maskFile = 'mask.h5'
    if ut.update_file(inps.maskFile, inps.stackFile):
        maskCmd = 'generate_mask.py {} --nonzero -o {}'.format(
            inps.stackFile, inps.maskFile)
        print(maskCmd)
        status = subprocess.Popen(maskCmd, shell=True).wait()

    # Average spatial coherence
    inps.avgSpatialCohFile = 'avgSpatialCoherence.h5'
    if ut.update_file(inps.avgSpatialCohFile, inps.stackFile):
        avgCmd = 'temporal_average.py {} --dataset coherence -o {}'.format(
            inps.stackFile, inps.avgSpatialCohFile)
        print(avgCmd)
        status = subprocess.Popen(avgCmd, shell=True).wait()

    #########################################
    # Referencing Interferograms in Space
    #########################################
    print('\n**********  Select Reference Point  **********')
    refPointCmd = 'reference_point.py {} -t {} -c {}'.format(
        inps.stackFile, inps.templateFile, inps.avgSpatialCohFile)
    print(refPointCmd)
    status = subprocess.Popen(refPointCmd, shell=True).wait()
    if status is not 0:
        raise Exception('Error while finding reference pixel in space.\n')

    ############################################
    # Unwrapping Error Correction (Optional)
    #    based on the consistency of triplets
    #    of interferograms
    ############################################
    if template['pysar.unwrapError.method']:
        print('\n**********  Unwrapping Error Correction  **********')
        outName = '{}_unwCor.h5'.format(os.path.splitext(inps.stackFile)[0])
        unwCmd = 'unwrap_error.py {} --mask {} --template {}'.format(
            inps.stackFile, inps.maskFile, inps.templateFile)
        print(unwCmd)
        if ut.update_file(outName, inps.stackFile):
            print(
                'This might take a while depending on the size of your data set!'
            )
            status = subprocess.Popen(unwCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting phase unwrapping errors.\n')
        inps.stackFile = outName

    #########################################
    # Network Modification (Optional)
    #########################################
    print('\n**********  Modify Network  **********')
    networkCmd = 'modify_network.py {} -t {}'.format(inps.stackFile,
                                                     inps.templateFile)
    print(networkCmd)
    status = subprocess.Popen(networkCmd, shell=True).wait()
    if status is not 0:
        raise Exception(
            'Error while modifying the network of interferograms.\n')

    # Plot network colored in spatial coherence
    print('--------------------------------------------------')
    plotCmd = 'plot_network.py {} --template {} --nodisplay'.format(
        inps.stackFile, inps.templateFile)
    print(plotCmd)
    inps.cohSpatialAvgFile = '{}_coherence_spatialAverage.txt'.format(
        os.path.splitext(os.path.basename(inps.stackFile))[0])
    if ut.update_file(
            'Network.pdf',
            check_readable=False,
            inFile=[inps.stackFile, inps.cohSpatialAvgFile,
                    inps.templateFile]):
        status = subprocess.Popen(plotCmd, shell=True).wait()

    if inps.modify_network:
        raise SystemExit('Exit as planned after network modification.')

    #########################################
    # Inversion of Interferograms
    ########################################
    print(
        '\n**********  Invert Network of Interferograms into Time-series  **********'
    )
    invCmd = 'ifgram_inversion.py {} --template {}'.format(
        inps.stackFile, inps.templateFile)
    print(invCmd)
    inps.timeseriesFile = 'timeseries.h5'
    inps.tempCohFile = 'temporalCoherence.h5'
    if ut.update_file(inps.timeseriesFile, inps.stackFile):
        status = subprocess.Popen(invCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while inverting network interferograms into timeseries')

    print('\n--------------------------------------------')
    print('Update Mask based on Temporal Coherence ...')
    inps.maskFile = 'maskTempCoh.h5'
    inps.minTempCoh = template['pysar.networkInversion.minTempCoh']
    maskCmd = 'generate_mask.py {} -m {} -o {}'.format(inps.tempCohFile,
                                                       inps.minTempCoh,
                                                       inps.maskFile)
    print(maskCmd)
    if ut.update_file(inps.maskFile, inps.tempCohFile):
        status = subprocess.Popen(maskCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while generating mask file from temporal coherence.')

    if inps.invert_network:
        raise SystemExit('Exit as planned after network inversion.')

    # check number of pixels selected in mask file for following analysis
    min_num_pixel = float(template['pysar.networkInversion.minNumPixel'])
    msk = readfile.read(inps.maskFile)[0]
    num_pixel = np.sum(msk != 0.)
    print('number of pixels selected: {}'.format(num_pixel))
    if num_pixel < min_num_pixel:
        msg = "Not enought coherent pixels selected (minimum of {}). ".format(
            int(min_num_pixel))
        msg += "Try the following:\n"
        msg += "1) Check the reference pixel and make sure it's not in areas with unwrapping errors\n"
        msg += "2) Check the network and make sure it's fully connected without subsets"
        raise RuntimeError(msg)
    del msk

    ##############################################
    # LOD (Local Oscillator Drift) Correction
    #   for Envisat data in radar coord only
    ##############################################
    if atr['PLATFORM'].lower().startswith('env'):
        print(
            '\n**********  Local Oscillator Drift Correction for Envisat  **********'
        )
        outName = os.path.splitext(inps.timeseriesFile)[0] + '_LODcor.h5'
        lodCmd = 'local_oscilator_drift.py {} {} -o {}'.format(
            inps.timeseriesFile, inps.geomFile, outName)
        print(lodCmd)
        if ut.update_file(outName, [inps.timeseriesFile, inps.geomFile]):
            status = subprocess.Popen(lodCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting Local Oscillator Drift.\n')
        inps.timeseriesFile = outName

    ##############################################
    # Tropospheric Delay Correction (Optional)
    ##############################################
    print('\n**********  Tropospheric Delay Correction  **********')
    inps.tropPolyOrder = template['pysar.troposphericDelay.polyOrder']
    inps.tropModel = template['pysar.troposphericDelay.weatherModel']
    inps.tropMethod = template['pysar.troposphericDelay.method']
    try:
        fileList = [
            os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))
        ]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    if inps.tropMethod:
        # Check Conflict with base_trop_cor
        if template['pysar.deramp'] == 'base_trop_cor':
            msg = """
            Method Conflict: base_trop_cor is in conflict with {} option!
            base_trop_cor applies simultaneous ramp removal AND tropospheric correction.
            IGNORE base_trop_cor input and continue pysarApp.py.
            """
            warnings.warn(msg)
            template['pysar.deramp'] = False

        fbase = os.path.splitext(inps.timeseriesFile)[0]
        # Call scripts
        if inps.tropMethod == 'height_correlation':
            outName = '{}_tropHgt.h5'.format(fbase)
            print(
                'tropospheric delay correction with height-correlation approach'
            )
            tropCmd = ('tropcor_phase_elevation.py {t} -d {d} -p {p}'
                       ' -m {m} -o {o}').format(t=inps.timeseriesFile,
                                                d=inps.geomFile,
                                                p=inps.tropPolyOrder,
                                                m=inps.maskFile,
                                                o=outName)
            print(tropCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while correcting tropospheric delay.\n')
            inps.timeseriesFile = outName

        elif inps.tropMethod == 'pyaps':
            inps.weatherDir = template['pysar.troposphericDelay.weatherDir']
            outName = '{}_{}.h5'.format(fbase, inps.tropModel)
            print(('Atmospheric correction using Weather Re-analysis dataset'
                   ' (PyAPS, Jolivet et al., 2011)'))
            print('Weather Re-analysis dataset: ' + inps.tropModel)
            tropCmd = ('tropcor_pyaps.py -f {t} --model {m} --dem {d}'
                       ' -i {i} -w {w}').format(t=inps.timeseriesFile,
                                                m=inps.tropModel,
                                                d=inps.geomFile,
                                                i=inps.geomFile,
                                                w=inps.weatherDir)
            print(tropCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                if inps.tropFile:
                    tropCmd = 'diff.py {} {} -o {}'.format(
                        inps.timeseriesFile, inps.tropFile, outName)
                    print('--------------------------------------------')
                    print('Use existed tropospheric delay file: {}'.format(
                        inps.tropFile))
                    print(tropCmd)
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    print(
                        '\nError while correcting tropospheric delay, try the following:'
                    )
                    print('1) Check the installation of PyAPS')
                    print(
                        '   http://earthdef.caltech.edu/projects/pyaps/wiki/Main'
                    )
                    print('   Try in command line: python -c "import pyaps"')
                    print(
                        '2) Use other tropospheric correction method, height-correlation, for example'
                    )
                    print(
                        '3) or turn off the option by setting pysar.troposphericDelay.method = no.\n'
                    )
                    raise RuntimeError()
            inps.timeseriesFile = outName
        else:
            print('No atmospheric delay correction.')

    # Grab tropospheric delay file
    try:
        fileList = [
            os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))
        ]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    ##############################################
    # Topographic (DEM) Residuals Correction (Optional)
    ##############################################
    print(
        '\n**********  Topographic Residual (DEM error) Correction  **********'
    )
    outName = os.path.splitext(inps.timeseriesFile)[0] + '_demErr.h5'
    topoCmd = 'dem_error.py {} -g {} -t {} -o {}'.format(
        inps.timeseriesFile, inps.geomFile, inps.templateFile, outName)
    print(topoCmd)
    inps.timeseriesResFile = None
    if template['pysar.topographicResidual']:
        if ut.update_file(outName, inps.timeseriesFile):
            status = subprocess.Popen(topoCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while correcting topographic phase residual.\n')
        inps.timeseriesFile = outName
        inps.timeseriesResFile = 'timeseriesResidual.h5'
    else:
        print('No correction for topographic residuals.')

    ##############################################
    # Timeseries Residual Standard Deviation
    ##############################################
    print('\n**********  Timeseries Residual Root Mean Square  **********')
    if inps.timeseriesResFile:
        rmsCmd = 'timeseries_rms.py {} -t {}'.format(inps.timeseriesResFile,
                                                     inps.templateFile)
        print(rmsCmd)
        status = subprocess.Popen(rmsCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while calculating RMS of time series phase residual.\n')
    else:
        print('No timeseries residual file found! Skip residual RMS analysis.')

    ##############################################
    # Reference in Time
    ##############################################
    print('\n**********  Select Reference Date  **********')
    if template['pysar.reference.date']:
        outName = '{}_refDate.h5'.format(
            os.path.splitext(inps.timeseriesFile)[0])
        refCmd = 'reference_date.py {} -t {} -o {}'.format(
            inps.timeseriesFile, inps.templateFile, outName)
        print(refCmd)
        if ut.update_file(outName, inps.timeseriesFile):
            status = subprocess.Popen(refCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while changing reference date.\n')
        inps.timeseriesFile = outName
    else:
        print('No reference change in time.')

    ##############################################
    # Phase Ramp Correction (Optional)
    ##############################################
    print('\n**********  Remove Phase Ramp  **********')
    inps.derampMaskFile = template['pysar.deramp.maskFile']
    inps.derampMethod = template['pysar.deramp']
    if inps.derampMethod:
        print('Phase Ramp Removal method: {}'.format(inps.derampMethod))
        if inps.geocoded and inps.derampMethod in [
                'baseline_cor', 'base_trop_cor'
        ]:
            warnings.warn(
                ('dataset is in geo coordinates,'
                 ' can not apply {} method').format(inps.derampMethod))
            print('skip deramping and continue.')

        # Get executable command and output name
        derampCmd = None
        fbase = os.path.splitext(inps.timeseriesFile)[0]
        if inps.derampMethod in [
                'plane', 'quadratic', 'plane_range', 'quadratic_range',
                'plane_azimuth', 'quadratic_azimuth'
        ]:
            outName = '{}_{}.h5'.format(fbase, inps.derampMethod)
            derampCmd = 'remove_ramp.py {} -s {} -m {} -o {}'.format(
                inps.timeseriesFile, inps.derampMethod, inps.derampMaskFile,
                outName)

        elif inps.derampMethod == 'baseline_cor':
            outName = '{}_baselineCor.h5'.format(fbase)
            derampCmd = 'baseline_error.py {} {}'.format(
                inps.timeseriesFile, inps.maskFile)

        elif inps.derampMethod in [
                'base_trop_cor', 'basetropcor', 'baselinetropcor'
        ]:
            print('Joint estimation of Baseline error and tropospheric delay')
            print('\t[height-correlation approach]')
            outName = '{}_baseTropCor.h5'.format(fbase)
            derampCmd = ('baseline_trop.py {t} {d} {p}'
                         ' range_and_azimuth {m}').format(
                             t=inps.timeseriesFile,
                             d=inps.geomFile,
                             p=inps.tropPolyOrder,
                             m=inps.maskFile)
        else:
            warnings.warn('Unrecognized phase ramp method: {}'.format(
                template['pysar.deramp']))

        # Execute command
        if derampCmd:
            print(derampCmd)
            if ut.update_file(outName, inps.timeseriesFile):
                status = subprocess.Popen(derampCmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while removing phase ramp for time-series.\n')
            inps.timeseriesFile = outName
    else:
        print('No phase ramp removal.')

    #############################################
    # Velocity and rmse maps
    #############################################
    print('\n**********  Estimate Velocity  **********')
    inps.velFile = 'velocity.h5'
    velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format(
        inps.timeseriesFile, inps.templateFile, inps.velFile)
    print(velCmd)
    if ut.update_file(inps.velFile, [inps.timeseriesFile, inps.templateFile]):
        status = subprocess.Popen(velCmd, shell=True).wait()
        if status is not 0:
            raise Exception(
                'Error while estimating linear velocity from time-series.\n')

    # Velocity from Tropospheric delay
    if inps.tropFile:
        suffix = os.path.splitext(os.path.basename(inps.tropFile))[0].title()
        inps.tropVelFile = '{}{}.h5'.format(
            os.path.splitext(inps.velFile)[0], suffix)
        velCmd = 'timeseries2velocity.py {} -t {} -o {}'.format(
            inps.tropFile, inps.templateFile, inps.tropVelFile)
        print(velCmd)
        if ut.update_file(inps.tropVelFile,
                          [inps.tropFile, inps.templateFile]):
            status = subprocess.Popen(velCmd, shell=True).wait()

    ############################################
    # Post-processing
    # Geocodeing --> Masking --> KMZ & HDF-EOS5
    ############################################
    print('\n**********  Post-processing  **********')
    if template['pysar.save.hdfEos5'] is True and template[
            'pysar.geocode'] is False:
        print('Turn ON pysar.geocode to be able to save to HDF-EOS5 format.')
        template['pysar.geocode'] = True

    # Geocoding
    if not inps.geocoded:
        if template['pysar.geocode'] is True:
            print('\n--------------------------------------------')
            geo_dir = os.path.abspath('./GEOCODE')
            if not os.path.isdir(geo_dir):
                os.makedirs(geo_dir)
                print('create directory: {}'.format(geo_dir))
            geoCmd = ('geocode.py {v} {c} {t} {g} -l {l} -t {e}'
                      ' --outdir {d} --update').format(v=inps.velFile,
                                                       c=inps.tempCohFile,
                                                       t=inps.timeseriesFile,
                                                       g=inps.geomFile,
                                                       l=inps.lookupFile,
                                                       e=inps.templateFile,
                                                       d=geo_dir)
            print(geoCmd)
            status = subprocess.Popen(geoCmd, shell=True).wait()
            if status is not 0:
                raise Exception('Error while geocoding.\n')
            else:
                inps.velFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.velFile))
                inps.tempCohFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.tempCohFile))
                inps.timeseriesFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.timeseriesFile))
                inps.geomFile = os.path.join(
                    geo_dir, 'geo_' + os.path.basename(inps.geomFile))
                inps.geocoded = True

            # generate mask based on geocoded temporal coherence
            print('\n--------------------------------------------')
            outName = os.path.join(geo_dir, 'geo_maskTempCoh.h5')
            genCmd = 'generate_mask.py {} -m {} -o {}'.format(
                inps.tempCohFile, inps.minTempCoh, outName)
            print(genCmd)
            if ut.update_file(outName, inps.tempCohFile):
                status = subprocess.Popen(genCmd, shell=True).wait()
            inps.maskFile = outName

    # mask velocity file
    if inps.velFile and inps.maskFile:
        outName = '{}_masked.h5'.format(os.path.splitext(inps.velFile)[0])
        maskCmd = 'mask.py {} -m {} -o {}'.format(inps.velFile, inps.maskFile,
                                                  outName)
        print(maskCmd)
        if ut.update_file(outName, [inps.velFile, inps.maskFile]):
            status = subprocess.Popen(maskCmd, shell=True).wait()
        try:
            inps.velFile = glob.glob(outName)[0]
        except:
            inps.velFile = None

    # Save to Google Earth KML file
    if inps.geocoded and inps.velFile and template['pysar.save.kml'] is True:
        print('\n--------------------------------------------')
        print('creating Google Earth KMZ file for geocoded velocity file: ...')
        outName = '{}.kmz'.format(
            os.path.splitext(os.path.basename(inps.velFile))[0])
        kmlCmd = 'save_kml.py {} -o {}'.format(inps.velFile, outName)
        print(kmlCmd)
        if ut.update_file(outName, inps.velFile, check_readable=False):
            status = subprocess.Popen(kmlCmd, shell=True).wait()
            if status is not 0:
                raise Exception(
                    'Error while generating Google Earth KMZ file.')

    #############################################
    # Save Timeseries to HDF-EOS5 format
    #############################################
    if template['pysar.save.hdfEos5'] is True:
        print('\n**********  Save Time-series in HDF-EOS5 Format  **********')
        if not inps.geocoded:
            warnings.warn(
                'Dataset is in radar coordinates, skip saving to HDF-EOS5 format.'
            )
        else:
            # Add attributes from custom template to timeseries file
            if templateCustom is not None:
                ut.add_attribute(inps.timeseriesFile, templateCustom)

            # Save to HDF-EOS5 format
            print('--------------------------------------------')
            hdfeos5Cmd = ('save_hdfeos5.py {t} -c {c} -m {m} -g {g}'
                          ' -t {e}').format(t=inps.timeseriesFile,
                                            c=inps.tempCohFile,
                                            m=inps.maskFile,
                                            g=inps.geomFile,
                                            e=inps.templateFile)
            print(hdfeos5Cmd)
            SAT = hdfeos5.get_mission_name(atr)
            try:
                inps.hdfeos5File = ut.get_file_list('{}_*.he5'.format(SAT))[0]
            except:
                inps.hdfeos5File = None
            if ut.update_file(inps.hdfeos5File, [
                    inps.timeseriesFile, inps.tempCohFile, inps.maskFile,
                    inps.geomFile
            ]):
                status = subprocess.Popen(hdfeos5Cmd, shell=True).wait()
                if status is not 0:
                    raise Exception(
                        'Error while generating HDF-EOS5 time-series file.\n')

    #############################################
    # Plot Figures
    #############################################
    inps.plotShellFile = os.path.join(os.path.dirname(__file__),
                                      '../sh/plot_pysarApp.sh')
    plotCmd = './' + os.path.basename(inps.plotShellFile)
    inps.plot = template['pysar.plot']
    if inps.plot is True:
        print('\n**********  Plot Results / Save to PIC  **********')
        # Copy to workding directory if not existed yet.
        if not os.path.isfile(plotCmd):
            print('copy {} to work directory: {}'.format(
                inps.plotShellFile, inps.workDir))
            shutil.copy2(inps.plotShellFile, inps.workDir)

    if inps.plot and os.path.isfile(plotCmd):
        print(plotCmd)
        status = subprocess.Popen(plotCmd, shell=True).wait()
        print('\n' + '-' * 50)
        print('For better figures:')
        print(
            '  1) Edit parameters in plot_pysarApp.sh and re-run this script.')
        print(
            '  2) Play with view.py, tsview.py and save_kml.py for more advanced/customized figures.'
        )
        if status is not 0:
            raise Exception(
                'Error while plotting data files using {}'.format(plotCmd))

    #############################################
    # Time                                      #
    #############################################
    m, s = divmod(time.time() - start_time, 60)
    print('\ntime used: {:02.0f} mins {:02.1f} secs'.format(m, s))
    print('\n###############################################')
    print('End of PySAR processing!')
    print('################################################\n')
Exemple #21
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)
    inps.file = ut.get_file_list(inps.file, abspath=True)
    inps.outfile = os.path.abspath(inps.outfile)
    return inps
Exemple #22
0
def main(iargs=None):
    # Actual code.
    inps = cmd_line_parse(iargs)

    # Time Series Info
    atr = readfile.read_attribute(inps.timeseries_file)
    k = atr['FILE_TYPE']
    print('input file is ' + k + ': ' + inps.timeseries_file)
    if not k in ['timeseries', 'GIANT_TS']:
        raise ValueError('Only timeseries file is supported!')

    obj = timeseries(inps.timeseries_file)
    obj.open()
    h5 = h5py.File(inps.timeseries_file, 'r')
    if k in ['GIANT_TS']:
        dateList = [
            dt.fromordinal(int(i)).strftime('%Y%m%d')
            for i in h5['dates'][:].tolist()
        ]
    else:
        dateList = obj.dateList
    date_num = len(dateList)
    inps.dates, inps.yearList = ptime.date_list2vector(dateList)

    # Read exclude dates
    if inps.ex_date_list:
        input_ex_date = list(inps.ex_date_list)
        inps.ex_date_list = []
        if input_ex_date:
            for ex_date in input_ex_date:
                if os.path.isfile(ex_date):
                    ex_date = ptime.read_date_list(ex_date)
                else:
                    ex_date = [ptime.yyyymmdd(ex_date)]
                inps.ex_date_list += list(
                    set(ex_date) - set(inps.ex_date_list))
            # delete dates not existed in input file
            inps.ex_date_list = sorted(
                list(set(inps.ex_date_list).intersection(dateList)))
            inps.ex_dates = ptime.date_list2vector(inps.ex_date_list)[0]
            inps.ex_idx_list = sorted(
                [dateList.index(i) for i in inps.ex_date_list])
            print('exclude date:' + str(inps.ex_date_list))

    # Zero displacement for 1st acquisition
    if inps.zero_first:
        if inps.ex_date_list:
            inps.zero_idx = min(
                list(set(range(date_num)) - set(inps.ex_idx_list)))
        else:
            inps.zero_idx = 0

    # File Size
    length = int(atr['LENGTH'])
    width = int(atr['WIDTH'])
    print('data size in [y0,y1,x0,x1]: [%d, %d, %d, %d]' %
          (0, length, 0, width))
    try:
        ullon = float(atr['X_FIRST'])
        ullat = float(atr['Y_FIRST'])
        lon_step = float(atr['X_STEP'])
        lat_step = float(atr['Y_STEP'])
        lrlon = ullon + width * lon_step
        lrlat = ullat + length * lat_step
        print('data size in [lat0,lat1,lon0,lon1]: [%.4f, %.4f, %.4f, %.4f]' %
              (lrlat, ullat, ullon, lrlon))
    except:
        pass

    # Initial Pixel Coord
    if inps.lalo and 'Y_FIRST' in atr.keys():
        y = int((inps.lalo[0] - ullat) / lat_step + 0.5)
        x = int((inps.lalo[1] - ullon) / lon_step + 0.5)
        inps.yx = [y, x]

    if inps.ref_lalo and 'Y_FIRST' in atr.keys():
        y = int((inps.ref_lalo[0] - ullat) / lat_step + 0.5)
        x = int((inps.ref_lalo[1] - ullon) / lon_step + 0.5)
        inps.ref_yx = [y, x]

    # Display Unit
    if inps.disp_unit == 'cm':
        inps.unit_fac = 100.0
    elif inps.disp_unit == 'm':
        inps.unit_fac = 1.0
    elif inps.disp_unit == 'dm':
        inps.unit_fac = 10.0
    elif inps.disp_unit == 'mm':
        inps.unit_fac = 1000.0
    elif inps.disp_unit == 'km':
        inps.unit_fac = 0.001
    else:
        raise ValueError('Un-recognized unit: ' + inps.disp_unit)
    if k in ['GIANT_TS']:
        print('data    unit: mm')
        inps.unit_fac *= 0.001
    else:
        print('data    unit: m')
    print('display unit: ' + inps.disp_unit)

    # Flip up-down / left-right
    if inps.auto_flip:
        inps.flip_lr, inps.flip_ud = pp.auto_flip_direction(atr)
    else:
        inps.flip_ud = False
        inps.left_lr = False

    # Mask file
    if not inps.mask_file:
        if os.path.basename(inps.timeseries_file).startswith('geo_'):
            file_list = ['geo_maskTempCoh.h5']
        else:
            file_list = ['maskTempCoh.h5', 'mask.h5']
        try:
            inps.mask_file = ut.get_file_list(file_list)[0]
        except:
            inps.mask_file = None
    try:
        mask = readfile.read(inps.mask_file, datasetName='mask')[0]
        mask[mask != 0] = 1
        print('load mask from file: ' + inps.mask_file)
    except:
        mask = None
        print('No mask used.')

    # Initial Map
    d_v = readfile.read(
        inps.timeseries_file,
        datasetName=dateList[inps.epoch_num])[0] * inps.unit_fac
    if inps.ref_date:
        inps.ref_d_v = readfile.read(
            inps.timeseries_file, datasetName=inps.ref_date)[0] * inps.unit_fac
        d_v -= inps.ref_d_v
    if mask is not None:
        d_v = mask_matrix(d_v, mask)
    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
    data_lim = [np.nanmin(d_v), np.nanmax(d_v)]

    if not inps.ylim_mat:
        inps.ylim_mat = data_lim
    print('Initial data range: ' + str(data_lim))
    print('Display data range: ' + str(inps.ylim_mat))

    # Fig 1 - Cumulative Displacement Map
    if not inps.disp_fig:
        plt.switch_backend('Agg')

    fig_v = plt.figure('Cumulative Displacement')

    # Axes 1
    #ax_v = fig_v.add_subplot(111)
    # ax_v.set_position([0.125,0.25,0.75,0.65])
    # This works on OSX. Original worked on Linux.
    # rect[left, bottom, width, height]
    ax_v = fig_v.add_axes([0.125, 0.25, 0.75, 0.65])
    if inps.dem_file:
        dem = readfile.read(inps.dem_file, datasetName='height')[0]
        ax_v = pp.plot_dem_yx(ax_v, dem)
    img = ax_v.imshow(d_v,
                      cmap=inps.colormap,
                      clim=inps.ylim_mat,
                      interpolation='nearest')

    # Reference Pixel
    if inps.ref_yx:
        d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
        ax_v.plot(inps.ref_yx[1], inps.ref_yx[0], 'ks', ms=6)
    else:
        try:
            ax_v.plot(int(atr['REF_X']), int(atr['REF_Y']), 'ks', ms=6)
        except:
            pass

    # Initial Pixel
    if inps.yx:
        ax_v.plot(inps.yx[1], inps.yx[0], 'ro', markeredgecolor='black')

    ax_v.set_xlim(0, np.shape(d_v)[1])
    ax_v.set_ylim(np.shape(d_v)[0], 0)

    # Status Bar
    def format_coord(x, y):
        col = int(x + 0.5)
        row = int(y + 0.5)
        if 0 <= col < width and 0 <= row < length:
            z = d_v[row, col]
            try:
                lon = ullon + x * lon_step
                lat = ullat + y * lat_step
                return 'x=%.0f, y=%.0f, value=%.4f, lon=%.4f, lat=%.4f' % (
                    x, y, z, lon, lat)
            except:
                return 'x=%.0f, y=%.0f, value=%.4f' % (x, y, z)

    ax_v.format_coord = format_coord

    # Title and Axis Label
    ax_v.set_title(
        'N = %d, Time = %s' %
        (inps.epoch_num, inps.dates[inps.epoch_num].strftime('%Y-%m-%d')))
    if not 'Y_FIRST' in atr.keys():
        ax_v.set_xlabel('Range')
        ax_v.set_ylabel('Azimuth')

    # Flip axis
    if inps.flip_lr:
        ax_v.invert_xaxis()
        print('flip map left and right')
    if inps.flip_ud:
        ax_v.invert_yaxis()
        print('flip map up and down')

    # Colorbar
    cbar = fig_v.colorbar(img, orientation='vertical')
    cbar.set_label('Displacement [%s]' % inps.disp_unit)

    # Axes 2 - Time Slider
    ax_time = fig_v.add_axes([0.125, 0.1, 0.6, 0.07],
                             facecolor='lightgoldenrodyellow',
                             yticks=[])
    tslider = Slider(ax_time,
                     'Years',
                     inps.yearList[0],
                     inps.yearList[-1],
                     valinit=inps.yearList[inps.epoch_num])
    tslider.ax.bar(inps.yearList,
                   np.ones(len(inps.yearList)),
                   facecolor='black',
                   width=0.01,
                   ecolor=None)
    tslider.ax.set_xticks(
        np.round(
            np.linspace(inps.yearList[0], inps.yearList[-1], num=5) * 100) /
        100)

    def time_slider_update(val):
        """Update Displacement Map using Slider"""
        timein = tslider.val
        idx_nearest = np.argmin(np.abs(np.array(inps.yearList) - timein))
        ax_v.set_title(
            'N = %d, Time = %s' %
            (idx_nearest, inps.dates[idx_nearest].strftime('%Y-%m-%d')))
        d_v = h5[dateList[idx_nearest]][:] * inps.unit_fac
        if inps.ref_date:
            d_v -= inps.ref_d_v
        if mask is not None:
            d_v = mask_matrix(d_v, mask)
        if inps.ref_yx:
            d_v -= d_v[inps.ref_yx[0], inps.ref_yx[1]]
        img.set_data(d_v)
        fig_v.canvas.draw()

    tslider.on_changed(time_slider_update)

    # Fig 2 - Time Series Displacement - Point
    fig_ts = plt.figure('Time series - point', figsize=inps.fig_size)
    ax_ts = fig_ts.add_subplot(111)

    # Read Error List
    inps.error_ts = None
    if inps.error_file:
        error_fileContent = np.loadtxt(inps.error_file,
                                       dtype=bytes).astype(str)
        inps.error_ts = error_fileContent[:, 1].astype(
            np.float) * inps.unit_fac
        if inps.ex_date_list:
            e_ts = inps.error_ts[:]
            inps.ex_error_ts = np.array([e_ts[i] for i in inps.ex_idx_list])
            inps.error_ts = np.array([
                e_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ])

    def plot_timeseries_errorbar(ax, dis_ts, inps):
        dates = list(inps.dates)
        d_ts = dis_ts[:]
        if inps.ex_date_list:
            # Update displacement time-series
            dates = sorted(list(set(inps.dates) - set(inps.ex_dates)))
            ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list])
            d_ts = np.array([
                dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ])
            # Plot excluded dates
            (_, caps, _) = ax.errorbar(inps.ex_dates,
                                       ex_d_ts,
                                       yerr=inps.ex_error_ts,
                                       fmt='-o',
                                       color='gray',
                                       ms=inps.marker_size,
                                       lw=0,
                                       alpha=1,
                                       mfc='gray',
                                       elinewidth=inps.edge_width,
                                       ecolor='black',
                                       capsize=inps.marker_size * 0.5)
            for cap in caps:
                cap.set_markeredgewidth(inps.edge_width)
        # Plot kept dates
        (_, caps, _) = ax.errorbar(dates,
                                   d_ts,
                                   yerr=inps.error_ts,
                                   fmt='-o',
                                   ms=inps.marker_size,
                                   lw=0,
                                   alpha=1,
                                   elinewidth=inps.edge_width,
                                   ecolor='black',
                                   capsize=inps.marker_size * 0.5)
        for cap in caps:
            cap.set_markeredgewidth(inps.edge_width)
        return ax

    def plot_timeseries_scatter(ax, dis_ts, inps):
        dates = list(inps.dates)
        d_ts = dis_ts[:]
        if inps.ex_date_list:
            # Update displacement time-series
            dates = sorted(list(set(inps.dates) - set(inps.ex_dates)))
            ex_d_ts = np.array([dis_ts[i] for i in inps.ex_idx_list])
            d_ts = np.array([
                dis_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ])
            # Plot excluded dates
            ax.scatter(inps.ex_dates,
                       ex_d_ts,
                       s=inps.marker_size**2,
                       color='gray')  # color='crimson'
        # Plot kept dates
        ax.scatter(dates, d_ts, s=inps.marker_size**2)
        return ax

    def update_timeseries(ax_ts, y, x):
        """Plot point time series displacement at pixel [y, x]"""
        d_ts = read_timeseries_yx(
            inps.timeseries_file, y, x, ref_yx=inps.ref_yx) * inps.unit_fac
        # for date in dateList:
        #    d = h5[k].get(date)[y,x]
        #    if inps.ref_yx:
        #        d -= h5[k].get(date)[inps.ref_yx[0], inps.ref_yx[1]]
        #    d_ts.append(d*inps.unit_fac)

        if inps.zero_first:
            d_ts -= d_ts[inps.zero_idx]

        ax_ts.cla()
        if inps.error_file:
            ax_ts = plot_timeseries_errorbar(ax_ts, d_ts, inps)
        else:
            ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps)
        if inps.ylim:
            ax_ts.set_ylim(inps.ylim)
        for tick in ax_ts.yaxis.get_major_ticks():
            tick.label.set_fontsize(inps.font_size)

        # Title
        title_ts = 'Y = %d, X = %d' % (y, x)
        try:
            lat = ullat + y * lat_step
            lon = ullon + x * lon_step
            title_ts += ', lat = %.4f, lon = %.4f' % (lat, lon)
        except:
            pass
        if inps.disp_title:
            ax_ts.set_title(title_ts)

        ax_ts = pp.auto_adjust_xaxis_date(ax_ts,
                                          inps.yearList,
                                          fontSize=inps.font_size)[0]
        ax_ts.set_xlabel('Time', fontsize=inps.font_size)
        ax_ts.set_ylabel('Displacement [%s]' % inps.disp_unit,
                         fontsize=inps.font_size)

        fig_ts.canvas.draw()

        # Print to terminal
        print('\n---------------------------------------')
        print(title_ts)
        print(d_ts)

        # Slope estimation
        if inps.ex_date_list:
            inps.yearList_kept = [
                inps.yearList[i] for i in range(date_num)
                if i not in inps.ex_idx_list
            ]
            d_ts_kept = [
                d_ts[i] for i in range(date_num) if i not in inps.ex_idx_list
            ]
            d_slope = stats.linregress(np.array(inps.yearList_kept),
                                       np.array(d_ts_kept))
        else:
            d_slope = stats.linregress(np.array(inps.yearList), np.array(d_ts))
        print('linear velocity: %.2f +/- %.2f [%s/yr]' %
              (d_slope[0], d_slope[4], inps.disp_unit))

        return d_ts

    # Initial point time series plot
    if inps.yx:
        d_ts = update_timeseries(ax_ts, inps.yx[0], inps.yx[1])
    else:
        d_ts = np.zeros(len(inps.yearList))
        ax_ts = plot_timeseries_scatter(ax_ts, d_ts, inps)

    def plot_timeseries_event(event):
        """Event function to get y/x from button press"""
        if event.inaxes != ax_v:
            return

        ii = int(event.ydata + 0.5)
        jj = int(event.xdata + 0.5)
        d_ts = update_timeseries(ax_ts, ii, jj)

    # Output
    if inps.save_fig and inps.yx:
        print('save info for pixel ' + str(inps.yx))
        if not inps.fig_base:
            inps.fig_base = 'y%d_x%d' % (inps.yx[0], inps.yx[1])

        # TXT - point time series
        outName = inps.fig_base + '_ts.txt'
        header_info = 'timeseries_file=' + inps.timeseries_file
        header_info += '\ny=%d, x=%d' % (inps.yx[0], inps.yx[1])
        try:
            lat = ullat + inps.yx[0] * lat_step
            lon = ullon + inps.yx[1] * lon_step
            header_info += '\nlat=%.6f, lon=%.6f' % (lat, lon)
        except:
            pass
        if inps.ref_yx:
            header_info += '\nreference pixel: y=%d, x=%d' % (inps.ref_yx[0],
                                                              inps.ref_yx[1])
        else:
            header_info += '\nreference pixel: y=%s, x=%s' % (atr['REF_Y'],
                                                              atr['REF_X'])
        header_info += '\nunit=m/yr'
        np.savetxt(outName,
                   list(zip(np.array(dateList),
                            np.array(d_ts) / inps.unit_fac)),
                   fmt='%s',
                   delimiter='    ',
                   header=header_info)
        print('save time series displacement in meter to ' + outName)

        # Figure - point time series
        outName = inps.fig_base + '_ts.pdf'
        fig_ts.savefig(outName,
                       bbox_inches='tight',
                       transparent=True,
                       dpi=inps.fig_dpi)
        print('save time series plot to ' + outName)

        # Figure - map
        outName = inps.fig_base + '_' + dateList[inps.epoch_num] + '.png'
        fig_v.savefig(outName,
                      bbox_inches='tight',
                      transparent=True,
                      dpi=inps.fig_dpi)
        print('save map plot to ' + outName)

    # Final linking of the canvas to the plots.
    cid = fig_v.canvas.mpl_connect('button_press_event', plot_timeseries_event)
    if inps.disp_fig:
        plt.show()
    fig_v.canvas.mpl_disconnect(cid)
Exemple #23
0
def cmd_line_parse(iargs=None):
    parser = create_parser()
    inps = parser.parse_args(args=iargs)
    inps.file = ut.get_file_list(inps.file)
    return inps
Exemple #24
0
def write_kmz_file(data, metadata, out_file, inps=None):
    """ Generate Google Earth KMZ file for input data matrix.
    Inputs:
        data - 2D np.array in int/float, data matrix to write
        out_file - string, output file name
        metadata  - dict, containing the following attributes:
               WIDTH/LENGTH      : required, file size
               X/Y_FIRST/STEP    : required, for lat/lon spatial converage
               ref_x/y           : optional, column/row number of reference pixel
               PROJECT_NAME      : optional, for KMZ folder name
        inps - Namespace, optional, input options for display
    Output:
        kmz_file - string, output KMZ filename
    Example:
        from pysar.utils import readfile, plot as pp
        from pysar import save_kml
        fname = 'geo_velocity_masked.h5'
        data, atr = readfile.read(fname)
        out_file = pp.auto_figure_title(fname, None)+'.kmz'
        save_kml.write_kmz_file(data, atr, out_file)
    """
    if not inps:
        inps = cmd_line_parse()

    if not inps.ylim:
        inps.ylim = [np.nanmin(data), np.nanmax(data)]

    west, east, south, north = ut.four_corners(metadata)

    # 2.1 Make PNG file - Data
    print('plotting data ...')

    # Figure size
    if not inps.fig_size:
        plot_shape = [east - west, north - south]
        fig_scale = min(pp.min_figsize_single / min(plot_shape),
                        pp.max_figsize_single / max(plot_shape),
                        pp.max_figsize_height / plot_shape[1])
        inps.fig_size = [np.floor(i * fig_scale * 2) / 2 for i in plot_shape]
    print('create figure in size: ' + str(inps.fig_size))
    fig = plt.figure(figsize=inps.fig_size, frameon=False)
    ax = fig.add_axes([0., 0., 1., 1.])
    ax.set_axis_off()

    print('colormap: ' + inps.colormap)
    inps.colormap = plt.get_cmap(inps.colormap)

    # Plot - data matrix
    ax.imshow(data,
              cmap=inps.colormap,
              vmin=inps.ylim[0],
              vmax=inps.ylim[1],
              aspect='auto',
              interpolation='nearest')

    # Plot - reference pixel
    if inps.disp_seed == 'yes':
        try:
            xref = int(metadata['REF_X'])
            yref = int(metadata['REF_Y'])
            ax.plot(xref, yref, 'ks', ms=inps.seed_size)
            print('show reference point')
        except:
            inps.disp_seed = False
            print('Cannot find reference point info!')

    width = int(metadata['WIDTH'])
    length = int(metadata['LENGTH'])
    ax.set_xlim([0, width])
    ax.set_ylim([length, 0])

    out_name_base = os.path.splitext(out_file)[0]
    data_png_file = out_name_base + '.png'
    print('writing {} with dpi={}'.format(data_png_file, inps.fig_dpi))
    plt.savefig(data_png_file,
                pad_inches=0.0,
                transparent=True,
                dpi=inps.fig_dpi)

    # 2.2 Making PNG file - colorbar
    pc = plt.figure(figsize=(1, 8))
    cax = pc.add_subplot(111)
    norm = mpl.colors.Normalize(vmin=inps.ylim[0], vmax=inps.ylim[1])
    cbar = mpl.colorbar.ColorbarBase(cax,
                                     cmap=inps.colormap,
                                     norm=norm,
                                     orientation='vertical')

    cbar.set_label('{} [{}]'.format(inps.cbar_label, inps.disp_unit))
    cbar.locator = mpl.ticker.MaxNLocator(nbins=inps.cbar_bin_num)
    cbar.update_ticks()

    pc.subplots_adjust(left=0.2, bottom=0.3, right=0.4, top=0.7)
    pc.patch.set_facecolor('white')
    pc.patch.set_alpha(0.7)

    cbar_png_file = '{}_cbar.png'.format(out_name_base)
    print('writing ' + cbar_png_file)
    pc.savefig(cbar_png_file,
               bbox_inches='tight',
               facecolor=pc.get_facecolor(),
               dpi=inps.fig_dpi)

    # 2.3 Generate KML file
    print('generating kml file ...')
    try:
        doc = KML.kml(KML.Folder(KML.name(metadata['PROJECT_NAME'])))
    except:
        doc = KML.kml(KML.Folder(KML.name('PySAR product')))

    # Add data png file
    slc = KML.GroundOverlay(
        KML.name(data_png_file), KML.Icon(KML.href(data_png_file)),
        KML.altitudeMode('clampToGround'),
        KML.LatLonBox(KML.north(str(north)), KML.east(str(east)),
                      KML.south(str(south)), KML.west(str(west))))
    doc.Folder.append(slc)

    # Add colorbar png file
    cb_rg = min(north - south, east - west)
    cb_N = (north + south) / 2.0 + 0.5 * 0.5 * cb_rg
    cb_W = east + 0.1 * cb_rg

    # Use mean height from existed DEM file
    if not inps.cbar_height:
        try:
            fileList = [
                'geo_geometry*.h5', 'INPUTS/geometry*.h5', 'dem*.h5', '*.dem',
                'radar*.hgt'
            ]
            dem_file = ut.get_file_list(fileList)[0]
            print('use mean height from file: {} + 1000 m as colorbar height.'.
                  format(dem_file))
            dem_data = readfile.read(dem_file, datasetName='height')[0]
            inps.cbar_height = np.rint(np.nanmean(dem_data)) + 1000.0
        except:
            pass
    elif str(inps.cbar_height).lower().endswith('ground'):
        inps.cbar_height = None

    if inps.cbar_height:
        print('set colorbar in height: %.2f m' % inps.cbar_height)
        slc1 = KML.GroundOverlay(
            KML.name('colorbar'), KML.Icon(KML.href(cbar_png_file)),
            KML.altitude(str(inps.cbar_height)), KML.altitudeMode('absolute'),
            KML.LatLonBox(KML.north(str(cb_N)),
                          KML.south(str(cb_N - 0.5 * cb_rg)),
                          KML.west(str(cb_W)),
                          KML.east(str(cb_W + 0.14 * cb_rg))))
    else:
        print('set colorbar clampToGround')
        slc1 = KML.GroundOverlay(
            KML.name('colorbar'), KML.Icon(KML.href(cbar_png_file)),
            KML.altitudeMode('clampToGround'),
            KML.LatLonBox(KML.north(str(cb_N)),
                          KML.south(str(cb_N - 0.5 * cb_rg)),
                          KML.west(str(cb_W)),
                          KML.east(str(cb_W + 0.14 * cb_rg))))
    doc.Folder.append(slc1)
    kmlstr = etree.tostring(doc, pretty_print=True).decode('utf8')

    # Write KML file
    kml_file = '{}.kml'.format(out_name_base)
    print('writing ' + kml_file)
    with open(kml_file, 'w') as f:
        f.write(kmlstr)

    # 2.4 Generate KMZ file
    kmz_file = '{}.kmz'.format(out_name_base)
    cmdKMZ = 'zip {} {} {} {}'.format(kmz_file, kml_file, data_png_file,
                                      cbar_png_file)
    print('writing {}\n{}'.format(kmz_file, cmdKMZ))
    os.system(cmdKMZ)

    cmdClean = 'rm {} {} {}'.format(kml_file, data_png_file, cbar_png_file)
    print(cmdClean)
    os.system(cmdClean)

    return kmz_file
Exemple #25
0
def correct_tropospheric_delay(inps, template):
    """Correct tropospheric delay with options from template"""
    inps.tropPolyOrder = template['pysar.troposphericDelay.polyOrder']
    inps.tropModel     = template['pysar.troposphericDelay.weatherModel']
    inps.tropMethod    = template['pysar.troposphericDelay.method']

    # check existing tropospheric delay file
    try:
        fileList = [os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None

    # run
    if inps.tropMethod:
        fbase = os.path.splitext(inps.timeseriesFile)[0]

        # Phase/Elevation Ratio (Doin et al., 2009)
        if inps.tropMethod == 'height_correlation':
            outName = '{}_tropHgt.h5'.format(fbase)
            print('tropospheric delay correction with height-correlation approach')
            tropCmd = ('tropcor_phase_elevation.py {t} -g {d} -p {p}'
                       ' -m {m} -o {o}').format(t=inps.timeseriesFile,
                                                d=inps.geomFile,
                                                p=inps.tropPolyOrder,
                                                m=inps.maskFile,
                                                o=outName)
            print(tropCmd)
            if ut.run_or_skip(out_file=outName, in_file=inps.timeseriesFile) == 'run':
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    raise Exception('Error while correcting tropospheric delay.\n')
            inps.timeseriesFile = outName
            inps.timeseriesFiles.append(outName)

        # Weather Re-analysis Data (Jolivet et al., 2011;2014)
        elif inps.tropMethod == 'pyaps':
            inps.weatherDir = template['pysar.troposphericDelay.weatherDir']
            outName = '{}_{}.h5'.format(fbase, inps.tropModel)
            print(('Atmospheric correction using Weather Re-analysis dataset'
                   ' (PyAPS, Jolivet et al., 2011)'))
            print('Weather Re-analysis dataset:', inps.tropModel)
            tropCmd = ('tropcor_pyaps.py -f {t} --model {m} -g {g}'
                       ' -w {w}').format(t=inps.timeseriesFile,
                                         m=inps.tropModel,
                                         g=inps.geomFile,
                                         w=inps.weatherDir)
            print(tropCmd)
            if ut.run_or_skip(out_file=outName, in_file=inps.timeseriesFile) == 'run':
                if inps.tropFile:
                    tropCmd = 'diff.py {} {} -o {} --force'.format(inps.timeseriesFile,
                                                                   inps.tropFile,
                                                                   outName)
                    print('--------------------------------------------')
                    print('Use existed tropospheric delay file: {}'.format(inps.tropFile))
                    print(tropCmd)
                status = subprocess.Popen(tropCmd, shell=True).wait()
                if status is not 0:
                    print('\nError while correcting tropospheric delay, try the following:')
                    print('1) Check the installation of PyAPS')
                    print('   http://earthdef.caltech.edu/projects/pyaps/wiki/Main')
                    print('   Try in command line: python -c "import pyaps"')
                    print('2) Use other tropospheric correction method, height-correlation, for example')
                    print('3) or turn off the option by setting pysar.troposphericDelay.method = no.\n')
                    raise RuntimeError()
            inps.timeseriesFile = outName
            inps.timeseriesFiles.append(outName)
        else:
            print('Un-recognized atmospheric delay correction method: {}'.format(inps.tropMethod))

    # Grab tropospheric delay file
    try:
        fileList = [os.path.join(inps.workDir, 'INPUTS/{}.h5'.format(inps.tropModel))]
        inps.tropFile = ut.get_file_list(fileList)[0]
    except:
        inps.tropFile = None
    return
Exemple #26
0
def read_template(inps):
    print('\n**********  Read Template File  **********')
    # default template
    inps.templateFile = os.path.join(inps.workDir, 'pysarApp_template.txt')
    if not os.path.isfile(inps.templateFile):
        print('generate default template file:', inps.templateFile)
        shutil.copy2(inps.autoTemplateFile, inps.workDir)
    else:
        check_obsolete_default_template(inps)

    # custom template
    customTemplate = None
    if inps.customTemplateFile:
        # Copy custom template file to work directory
        inputs_dir = os.path.join(inps.workDir, 'INPUTS')
        if ut.run_or_skip(out_file=os.path.join(inputs_dir, os.path.basename(inps.customTemplateFile)),
                          in_file=inps.customTemplateFile,
                          check_readable=False) == 'run':
            if not os.path.isdir(inputs_dir):
                os.makedirs(inputs_dir)
                print('create directory:', inputs_dir)
            shutil.copy2(inps.customTemplateFile, inputs_dir)
            print('copy {} to INPUTS directory'.format(os.path.basename(inps.customTemplateFile)))

        # Read custom template
        print('read custom template file:', inps.customTemplateFile)
        customTemplate = readfile.read_template(inps.customTemplateFile)
        # correct some loose type errors
        standardValues = {'def':'auto', 'default':'auto',
                          'y':'yes', 'on':'yes', 'true':'yes',
                          'n':'no', 'off':'no', 'false':'no'
                         }
        for key, value in customTemplate.items():
            if value in standardValues.keys():
                customTemplate[key] = standardValues[value]
        for key in ['pysar.deramp', 'pysar.troposphericDelay.method']:
            if key in customTemplate.keys():
                customTemplate[key] = customTemplate[key].lower().replace('-', '_')
        if 'processor' in customTemplate.keys():
            customTemplate['pysar.load.processor'] = customTemplate['processor']
        for key in ['SUBSET_XMIN', 'SUBSET_YMIN']:
            if key in customTemplate.keys():
                customTemplate.pop(key)

        # Update default template with custom input template
        print('update default template based on input custom template')
        inps.templateFile = ut.update_template_file(inps.templateFile, customTemplate)

    if inps.generate_template:
        raise SystemExit('Exit as planned after template file generation.')

    print('read default template file:', inps.templateFile)
    template = readfile.read_template(inps.templateFile)
    template = ut.check_template_auto_value(template)

    # Get existing files name: unavco_attributes.txt
    try:
        inps.unavcoMetadataFile = ut.get_file_list('unavco_attribute*txt', abspath=True)[0]
    except:
        inps.unavcoMetadataFile = None
        print('No UNAVCO attributes file found.')

    return inps, template, customTemplate
Exemple #27
0
def main(argv):
    inps = cmdLineParse()

    if inps.timeseries_file:
        inps.timeseries_file = ut.get_file_list([inps.timeseries_file])[0]
        atr = readfile.read_attribute(inps.timeseries_file)
        k = atr['FILE_TYPE']
        if 'ref_y' not in list(atr.keys()) and inps.ref_yx:
            print('No reference info found in input file, use input ref_yx: ' +
                  str(inps.ref_yx))
            atr['ref_y'] = inps.ref_yx[0]
            atr['ref_x'] = inps.ref_yx[1]

    #****reading incidence angle file***/
    if os.path.isfile(inps.inc_angle):
        inps.inc_angle = readfile.read(inps.inc_angle,
                                       epoch='incidenceAngle')[0]
        inps.inc_angle = np.nan_to_num(inps.inc_angle)
    else:
        inps.inps.inc_angle = float(inps.inc_angle)
        print('incidence angle: ' + str(inps.inc_angle))
    cinc = np.cos(inps.inc_angle * np.pi / 180.0)

    #****look up file****/
    if inps.lookup_file:
        inps.lookup_file = ut.get_file_list(
            [inps.lookup_file])[0]  #'geomap_32rlks_tight.trans'

    #****GACOS****/
    delay_source = 'GACOS'
    # Get weather directory
    if not inps.GACOS_dir:
        if inps.timeseries_file:
            inps.GACOS_dir = os.path.dirname(
                os.path.abspath(inps.timeseries_file)) + '/../WEATHER/GACOS'
        elif inps.lookup_file:
            inps.GACOS_dir = os.path.dirname(os.path.abspath(
                inps.lookup_file)) + '/../WEATHER/GACOS'
        else:
            inps.GACOS_dir = os.path.abspath(os.getcwd())

    print('Store weather data into directory: ' + inps.GACOS_dir)

    #source_dir=os.path.dirname(os.path.abspath('timeseries_file'))+'/Agung/GACOS/data';print source_dir
    #os.makedirs(GACOS_dir)  -----------------------------------------------add part to copy/download weather data------#
    #----get date list-----#
    if not inps.date_list_file:
        print('read date list info from: ' + inps.timeseries_file)
        h5 = h5py.File(inps.timeseries_file, 'r')
        if 'timeseries' in list(h5.keys()):
            date_list = sorted(h5[k].keys())
        elif k in ['interferograms', 'coherence', 'wrapped']:
            ifgram_list = sorted(h5[k].keys())
            date12_list = ptime.list_ifgram2date12(ifgram_list)
            m_dates = [i.split('-')[0] for i in date12_list]
            s_dates = [i.split('-')[1] for i in date12_list]
            date_list = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
        else:
            raise ValueError('Un-support input file type:' + k)
        h5.close()
    else:
        date_list = ptime.yyyymmdd(
            np.loadtxt(inps.date_list_file, dtype=str, usecols=(0, )).tolist())
        print('read date list info from: ' + inps.date_list_file)

    #****cheacking availability of delays****/
    print('checking availability of delays')
    delay_file_list = []
    for d in date_list:
        if delay_source == 'GACOS':
            delay_file = inps.GACOS_dir + '/' + d + '.ztd'
        delay_file_list.append(delay_file)
    delay_file_existed = ut.get_file_list(delay_file_list)

    if len(delay_file_existed) == len(date_list):
        print('no missing files')
    else:
        print('no. of date files found:', len(delay_file_existed))
        print('no. of dates:', len(date_list))

    #*****Calculating delays***/
    print('calculating delays')

    length = int(atr['FILE_LENGTH'])
    width = int(atr['WIDTH'])
    #initialise delay files
    date_num = len(date_list)
    trop_ts = np.zeros((date_num, length, width), np.float32)

    #reading wrf files for each epoch and getting delay
    for i in range(date_num):
        delay_file = delay_file_existed[i]
        date = date_list[i]
        print('calculating delay for date', date)
        trop_ts[i] = get_delay(delay_file, atr, inps.lookup_file, cinc)

    print('Delays Calculated')
    # Convert relative phase delay on reference date
    try:
        ref_date = atr['ref_date']
    except:
        ref_date = date_list[0]
    print('convert to relative phase delay with reference date: ' + ref_date)
    ref_idx = date_list.index(ref_date)
    trop_ts -= np.tile(trop_ts[ref_idx, :, :], (date_num, 1, 1))

    ## Write tropospheric delay to HDF5
    tropFile = 'GACOSdelays' + '.h5'
    print('writing >>> %s' % (tropFile))
    h5trop = h5py.File(tropFile, 'w')
    group_trop = h5trop.create_group('timeseries')
    print('number of acquisitions: ' + str(date_num))
    prog_bar = ptime.progress_bar(maxValue=date_num)
    for i in range(date_num):
        date = date_list[i]
        group_trop.create_dataset(date, data=trop_ts[i], compression='gzip')
        prog_bar.update(i + 1, suffix=date)
    prog_bar.close()

    # Write Attributes
    for key, value in atr.items():
        group_trop.attrs[key] = value
    h5trop.close()

    ## Write corrected Time series to HDF5
    if k == 'timeseries':
        if not inps.out_file:
            inps.out_file = os.path.splitext(
                inps.timeseries_file)[0] + '_' + 'GACOS' + '.h5'
        print('writing trop corrected timeseries file %s' % (inps.out_file))
        h5ts = h5py.File(inps.timeseries_file, 'r')
        h5tsCor = h5py.File(inps.out_file, 'w')
        group_tsCor = h5tsCor.create_group('timeseries')
        print('number of acquisitions: ' + str(date_num))
        prog_bar = ptime.progress_bar(maxValue=date_num)
        for i in range(date_num):
            date = date_list[i]
            print(date)
            ts = h5ts['timeseries'].get(date)[:]
            group_tsCor.create_dataset(date,
                                       data=ts - trop_ts[i],
                                       compression='gzip')
            prog_bar.update(i + 1, suffix=date)
        prog_bar.close()
        h5ts.close()
        # Write Attributes
        for key, value in atr.items():
            group_tsCor.attrs[key] = value
        h5tsCor.close()
        print('delays written to %s' % (inps.out_file))

    print('finished')
    return inps.out_file
Exemple #28
0
def extract_metadata4geometry_radar(fname):
    """Read/extract attribute for .hgt_sim file from Gamma to ROI_PAC
    Input:
        sim_20070813_20080310.hgt_sim
        sim_20070813_20080310.rdc.dem
    Search for:
        sim_20070813_20080310.diff_par
    Output:
        sim_20070813_20080310.hgt_sim.rsc
        sim_20070813_20080310.rdc.dem.rsc
    """
    # Get/read GAMMA par file
    # for loop to get rid of multiple dot in filename
    fname_base = os.path.splitext(fname)[0]
    for i in range(5):
        fname_base = os.path.splitext(fname_base)[0]
    par_file = fname_base + '.diff_par'
    par_dict = readfile.read_gamma_par(par_file)

    # Get/read LAT/LON_REF1/2/3/4
    msg = 'grab LAT/LON_REF1/2/3/4 from par file: '
    # get date of one acquisition
    try:
        m_date = str(re.findall('\d{8}', fname_base)[0])
    except:
        m_date = str(re.findall('\d{6}', fname_base)[0])

    # search existing par file
    geom_dir = os.path.dirname(fname)  #PROJECT_DIR/geom_master
    ifg_dir = os.path.join(geom_dir, '../*/{}_*'.format(
        m_date))  #PROJECT_DIR/interferograms/{m_date}_20141225
    m_par_files = [
        os.path.join(geom_dir, '*{}*{}'.format(m_date, ext))
        for ext in PAR_EXT_LIST
    ]
    m_par_files += [
        os.path.join(ifg_dir, '*{}*{}'.format(m_date, ext))
        for ext in PAR_EXT_LIST
    ]
    m_par_files = ut.get_file_list(m_par_files)

    # read par file
    if len(m_par_files) > 0:
        m_par_file = m_par_files[0]
        msg += m_par_file
        par_dict = get_lalo_ref(m_par_file, par_dict)
    else:
        msg += ' no par file found with date: {}'.format(m_date)
    print(msg)

    # initiate ROIPAC dict
    atr = {}
    atr['PROCESSOR'] = 'gamma'
    atr['FILE_TYPE'] = os.path.splitext(fname)[1]
    atr.update(par_dict)

    # Write to .rsc file
    rsc_file = fname + '.rsc'
    try:
        atr_orig = readfile.read_roipac_rsc(rsc_file)
    except:
        atr_orig = dict()
    if not set(atr.items()).issubset(set(atr_orig.items())):
        atr_out = {**atr_orig, **atr}
        print('writing >>> ' + os.path.basename(rsc_file))
        writefile.write_roipac_rsc(atr_out, out_file=rsc_file)
    return rsc_file