Esempio n. 1
0
def download_wrapper(args):
    i, ifgd, n_dl, url_data, path_data = args
    dir_data = os.path.dirname(path_data)
    print('  Donwnloading {} ({}/{})...'.format(ifgd, i + 1, n_dl), flush=True)
    if not os.path.exists(dir_data): os.mkdir(dir_data)
    tools_lib.download_data(url_data, path_data)
    return
Esempio n. 2
0
def make_3im_png(data3, pngfile, cmap, title3, vmin=None, vmax=None, cbar=True):
    """
    Make png with 3 images for comparison.
    data3 and title3 must be list with 3 elements.
    cmap can be 'insar'. To wrap data, np.angle(np.exp(1j*x/cycle)*cycle)
    """
    ### Plot setting
    if cmap=='insar':
        cdict = tools_lib.cmap_insar()
        plt.register_cmap(name='insar',data=cdict)

    length, width = data3[0].shape
    figsizex = 12
    xmergin = 4 if cbar else 0
    figsizey = int((figsizex-xmergin)/3*length/width)+2
    
    fig = plt.figure(figsize = (figsizex, figsizey))

    for i in range(3):
        ax = fig.add_subplot(1, 3, i+1) #index start from 1
        im = ax.imshow(data3[i], vmin=vmin, vmax=vmax, cmap=cmap)
        ax.set_title(title3[i])
        ax.set_xticklabels([])
        ax.set_yticklabels([])
        if cbar: fig.colorbar(im, ax=ax)

    plt.tight_layout()
    plt.savefig(pngfile)
    plt.close()
   
    return 
Esempio n. 3
0
def make_im_png(data, pngfile, cmap, title, vmin=None, vmax=None, cbar=True):
    """
    Make png image.
    cmap can be 'insar'. To wrap data, np.angle(np.exp(1j*x/cycle)*cycle)
    """

    if cmap=='insar':
        cdict = tools_lib.cmap_insar()
        plt.register_cmap(name='insar',data=cdict)
    
    length, width = data.shape
    figsizex = 8
    xmergin = 2 if cbar else 0
    figsizey = int((figsizex-xmergin)*(length/width))+1
    
    ### Plot
    fig, ax = plt.subplots(1, 1, figsize=(figsizex, figsizey))
    plt.tight_layout()
    
    im = ax.imshow(data, vmin=vmin, vmax=vmax, cmap=cmap)
    ax.set_xticklabels([])
    ax.set_yticklabels([])
    ax.set_title(title)
    if cbar: fig.colorbar(im)

    plt.savefig(pngfile)
    plt.close()
    
    return
Esempio n. 4
0
def check_exist_wrapper(args):
    """
    Returns :
        0 : Local exist, complete, and new (no need to donwload)
        1 : Local incomplete (need to re-donwload)
        2 : Local old (no need to re-donwload)
        3 : Remote not exist  (can not compare, no download)
        4 : Local not exist (need to download)
    """

    i, n_data, url_data, path_data = args
    bname_data = os.path.basename(path_data)

    #    if np.mod(i, 10) == 0:
    #        print("  {0:3}/{1:3}".format(i, n_data), flush=True)

    if os.path.exists(path_data):
        rc = tools_lib.comp_size_time(url_data, path_data)
        if rc == 1:
            print("Size of {} is not identical.".format(bname_data),
                  flush=True)
        elif rc == 2:
            print("Newer {} available.".format(bname_data), flush=True)
        return rc
    else:
        return 4
Esempio n. 5
0
def check_gacos_wrapper(args):
    """
    Returns :
        0 : Local exist, complete, and new (no need to donwload)
        1 : Local incomplete (need to re-donwload)
        2 : Local old (no need to re-donwload)
        3 : Remote not exist  (can not compare, no download)
        4 : Local not exist and remote exist (need to download)
        5 : Local not exist but remote not exist (can not download)
    """
    i, n_data, url_data, path_data = args
    bname_data = os.path.basename(path_data)

    if np.mod(i, 10) == 0:
        print("  {0:3}/{1:3}".format(i, n_data), flush=True)

    if os.path.exists(path_data):
        rc = tools_lib.comp_size_time(url_data, path_data)
        if rc == 1:
            print("Size of {} is not identical.".format(bname_data),
                  flush=True)
        elif rc == 2:
            print("Newer {} available.".format(bname_data), flush=True)
        return rc
    else:
        response = requests.head(url_data, allow_redirects=True)
        if response.status_code == 200:
            return 4
        else:
            return 5
Esempio n. 6
0
def make_loop_png(unw12, unw23, unw13, loop_ph, png, titles4, cycle):
    cmap_wrap = tools_lib.get_cmap('SCM.romaO')
    cmap_loop = tools_lib.get_cmap('SCM.vik')

    ### Settings
    plt.rcParams['axes.titlesize'] = 10
    data = [unw12, unw23, unw13]

    length, width = unw12.shape
    if length > width:
        figsize_y = 10
        figsize_x = int((figsize_y-1)*width/length)
        if figsize_x < 5: figsize_x = 5
    else:
        figsize_x = 10
        figsize_y = int(figsize_x*length/width+1)
        if figsize_y < 3: figsize_y = 3

    ### Plot
    fig = plt.figure(figsize = (figsize_x, figsize_y))

    ## 3 ifgs
    for i in range(3):
        data_wrapped = np.angle(np.exp(1j*(data[i]/cycle))*cycle)
        ax = fig.add_subplot(2, 2, i+1) #index start from 1
        im = ax.imshow(data_wrapped, vmin=-np.pi, vmax=+np.pi, cmap=cmap_wrap,
                  interpolation='nearest')
        ax.set_title('{}'.format(titles4[i]))
        ax.set_xticklabels([])
        ax.set_yticklabels([])
        cax = plt.colorbar(im)
        cax.set_ticks([])

    ## loop phase
    ax = fig.add_subplot(2, 2, 4) #index start from 1
    im = ax.imshow(loop_ph, vmin=-np.pi, vmax=+np.pi, cmap=cmap_loop,
              interpolation='nearest')
    ax.set_title('{}'.format(titles4[3]))
    ax.set_xticklabels([])
    ax.set_yticklabels([])
    cax = plt.colorbar(im)

    plt.tight_layout()
    plt.savefig(png)
    plt.close()
Esempio n. 7
0
def make_loop_png(ifgd12, ifgd23, ifgd13, unw12, unw23, unw13, loop_ph,
                  loop_pngdir):
    ### Load color map for InSAR
    cdict = tools_lib.cmap_insar()
    plt.register_cmap(name='insar', data=cdict)

    rms = np.sqrt(np.nanmean(loop_ph**2))

    ### Settings
    imd1 = ifgd12[:8]
    imd2 = ifgd23[:8]
    imd3 = ifgd23[-8:]
    pngname = os.path.join(loop_pngdir,
                           imd1 + '_' + imd2 + '_' + imd3 + '_loop.png')
    cycle = 3  # 2pi*3/cycle
    titles = [ifgd12, ifgd23, ifgd13]
    data = [unw12, unw23, unw13]

    length, width = unw12.shape
    if length > width:
        figsize_y = 10
        figsize_x = int((figsize_y - 1) * width / length)
        if figsize_x < 5: figsize_x = 5
    else:
        figsize_x = 10
        figsize_y = int(figsize_x * length / width + 1)
        if figsize_y < 3: figsize_y = 3

    ### Plot
    fig = plt.figure(figsize=(figsize_x, figsize_y))

    ## 3 ifgs
    for i in range(3):
        data_wrapped = np.angle(np.exp(1j * (data[i] / cycle)) * cycle)
        ax = fig.add_subplot(2, 2, i + 1)  #index start from 1
        ax.imshow(data_wrapped, vmin=-np.pi, vmax=+np.pi, cmap='insar')
        ax.set_title('{}'.format(titles[i]))
        ax.set_xticklabels([])
        ax.set_yticklabels([])

    ## loop phase
    ax = fig.add_subplot(2, 2, 4)  #index start from 1
    ax.imshow(loop_ph, vmin=-np.pi, vmax=+np.pi, cmap=SCM.vik)
    ax.set_title('Loop phase (RMS={:.2f}rad)'.format(rms))
    ax.set_xticklabels([])
    ax.set_yticklabels([])

    plt.tight_layout()
    plt.savefig(pngname)
    plt.close()
Esempio n. 8
0
def make_loop_png(unw12, unw23, unw13, loop_ph, png, titles4, cycle):
    ### Load color map for InSAR
    cdict = tools_lib.cmap_insar()
    plt.register_cmap(cmap=mpl.colors.LinearSegmentedColormap('insar', cdict))
    plt.rcParams['axes.titlesize'] = 10

    ### Settings
    data = [unw12, unw23, unw13]

    length, width = unw12.shape
    if length > width:
        figsize_y = 10
        figsize_x = int((figsize_y - 1) * width / length)
        if figsize_x < 5: figsize_x = 5
    else:
        figsize_x = 10
        figsize_y = int(figsize_x * length / width + 1)
        if figsize_y < 3: figsize_y = 3

    ### Plot
    fig = plt.figure(figsize=(figsize_x, figsize_y))

    ## 3 ifgs
    for i in range(3):
        data_wrapped = np.angle(np.exp(1j * (data[i] / cycle)) * cycle)
        ax = fig.add_subplot(2, 2, i + 1)  #index start from 1
        ax.imshow(data_wrapped,
                  vmin=-np.pi,
                  vmax=+np.pi,
                  cmap='insar',
                  interpolation='nearest')
        ax.set_title('{}'.format(titles4[i]))
        ax.set_xticklabels([])
        ax.set_yticklabels([])

    ## loop phase
    ax = fig.add_subplot(2, 2, 4)  #index start from 1
    ax.imshow(loop_ph,
              vmin=-np.pi,
              vmax=+np.pi,
              cmap=SCM.vik,
              interpolation='nearest')
    ax.set_title('{}'.format(titles4[3]))
    ax.set_xticklabels([])
    ax.set_yticklabels([])

    plt.tight_layout()
    plt.savefig(png)
    plt.close()
Esempio n. 9
0
def make_sb_matrix2(ifgdates):
    """
    Make small baseline incidence-like matrix.
    Composed of -1 at primary and 1 at secondary. (n_ifg, n_im)
    Unknown is cumulative displacement.
    """
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    n_im = len(imdates)
    n_ifg = len(ifgdates)

    A = np.zeros((n_ifg, n_im), dtype=np.int16)
    for ifgix, ifgd in enumerate(ifgdates):
        primarydate = ifgd[:8]
        primaryix = imdates.index(primarydate)
        secondarydate = ifgd[-8:]
        secondaryix = imdates.index(secondarydate)
        A[ifgix, primaryix] = -1
        A[ifgix, secondaryix] = 1
    return A
Esempio n. 10
0
def make_sb_matrix(ifgdates):
    """
    Make small baseline incidence-like matrix.
    Composed of 1 between primary and secondary. (n_ifg, n_im-1)
    Unknown is incremental displacement.
    """
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    n_im = len(imdates)
    n_ifg = len(ifgdates)

    G = np.zeros((n_ifg, n_im - 1), dtype=np.int16)
    for ifgix, ifgd in enumerate(ifgdates):
        primarydate = ifgd[:8]
        primaryix = imdates.index(primarydate)
        secondarydate = ifgd[-8:]
        secondaryix = imdates.index(secondarydate)
        G[ifgix, primaryix:secondaryix] = 1

    return G
Esempio n. 11
0
def make_sb_matrix(ifgdates):
    """
    Make small baseline incidence-like matrix.
    Composed of 1 between master and slave. (n_ifg, n_im-1)
    Unknown is incremental displacement.
    """
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    n_im = len(imdates)
    n_ifg = len(ifgdates)

    G = np.zeros((n_ifg, n_im - 1), dtype=np.int16)
    for ifgix, ifgd in enumerate(ifgdates):
        masterdate = ifgd[:8]
        masterix = imdates.index(masterdate)
        slavedate = ifgd[-8:]
        slaveix = imdates.index(slavedate)
        G[ifgix, masterix:slaveix] = 1

    return G
Esempio n. 12
0
def make_sb_matrix2(ifgdates):
    """
    Make small baseline incidence-like matrix.
    Composed of -1 at master and 1 at slave. (n_ifg, n_im)
    Unknown is cumulative displacement.
    """
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    n_im = len(imdates)
    n_ifg = len(ifgdates)

    A = np.zeros((n_ifg, n_im), dtype=np.int16)
    for ifgix, ifgd in enumerate(ifgdates):
        masterdate = ifgd[:8]
        masterix = imdates.index(masterdate)
        slavedate = ifgd[-8:]
        slaveix = imdates.index(slavedate)
        A[ifgix, masterix] = -1
        A[ifgix, slaveix] = 1
    return A
Esempio n. 13
0
def deramp_wrapper(args):
    i, _cum_org = args
    if np.mod(i, 10) == 0:
        print("  {0:3}/{1:3}th image...".format(i, len(imdates)), flush=True)

    fit, model = tools_lib.fit2dh(_cum_org*mask*mask2, deg_ramp, hgt, hgt_min, hgt_max)  ## fit is not masked
    _cum = _cum_org-fit
    
    if hgt_linearflag:
        fit_hgt = hgt*model[-1]*mask  ## extract only hgt-linear component
        cum_bf = _cum+fit_hgt ## After deramp before hgt-linear
    
        ## Output comparison image of hgt_linear
        std_before = np.nanstd(cum_bf)
        std_after = np.nanstd(_cum*mask)
        data3 = [np.angle(np.exp(1j*(data/coef_r2m/cycle))*cycle) for data in [cum_bf, fit_hgt, _cum*mask]]
        title3 = ['Before hgt-linear (STD: {:.1f}mm)'.format(std_before), 'hgt-linear phase ({:.1f}mm/km)'.format(model[-1]*1000), 'After hgt-linear (STD: {:.1f}mm)'.format(std_after)]
        pngfile = os.path.join(filtcumdir, imdates[i]+'_hgt_linear.png')
        plot_lib.make_3im_png(data3, pngfile, cmap_wrap, title3, vmin=-np.pi, vmax=np.pi, cbar=False)
        
        pngfile = os.path.join(filtcumdir, imdates[i]+'_hgt_corr.png')
        title = '{} ({:.1f}mm/km, based on {}<=hgt<={})'.format(imdates[i], model[-1]*1000, hgt_min, hgt_max)
        plot_lib.plot_hgt_corr(cum_bf, fit_hgt, hgt, title, pngfile)
    
    else:
        fit_hgt = 0  ## for plot deframp
    
    if deg_ramp:
        ramp = (fit-fit_hgt)*mask
        
        ## Output comparison image of deramp
        data3 = [np.angle(np.exp(1j*(data/coef_r2m/cycle))*cycle) for data in [_cum_org*mask, ramp, _cum_org*mask-ramp]]
        pngfile = os.path.join(filtcumdir, imdates[i]+'_deramp.png')
        deramp_title3 = ['Before deramp ({}pi/cycle)'.format(cycle*2), 'ramp phase (deg:{})'.format(deg_ramp), 'After deramp ({}pi/cycle)'.format(cycle*2)]
        plot_lib.make_3im_png(data3, pngfile, cmap_wrap, deramp_title3, vmin=-np.pi, vmax=np.pi, cbar=False)
 
    return _cum, model
Esempio n. 14
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    #%% Set default
    frameID = []
    startdate = 20141001
    enddate = int(dt.date.today().strftime("%Y%m%d"))

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hf:s:e:", ["help"])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-f':
                frameID = a
            elif o == '-s':
                startdate = int(a)
            elif o == '-e':
                enddate = int(a)

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Determine frameID
    wd = os.getcwd()
    if not frameID:  ## if frameID not indicated
        _tmp = re.findall(r'\d{3}[AD]_\d{5}_\d{6}', wd)
        ##e.g., 021D_04972_131213
        if len(_tmp) == 0:
            print('\nFrame ID cannot be identified from dir name!',
                  file=sys.stderr)
            print('Use -f option', file=sys.stderr)
            return
        else:
            frameID = _tmp[0]
            print('\nFrame ID is {}\n'.format(frameID), flush=True)
    trackID = str(int(frameID[0:3]))

    #%% Directory and file setting
    outdir = os.path.join(wd, 'GEOC')
    if not os.path.exists(outdir): os.mkdir(outdir)
    os.chdir(outdir)

    LiCSARweb = 'http://gws-access.ceda.ac.uk/public/nceo_geohazards/LiCSAR_products/'

    #%% ENU
    for ENU in ['E', 'N', 'U']:
        enutif = '{}.geo.{}.tif'.format(frameID, ENU)
        if os.path.exists(enutif):
            print('{} already exist. Skip download.'.format(enutif),
                  flush=True)
            continue

        print('Download {}'.format(enutif), flush=True)

        url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', enutif)
        if not tools_lib.download_data(url, enutif):
            print('  Error while downloading from {}'.format(url),
                  file=sys.stderr,
                  flush=True)
            continue

    #%% baselines
    print('Download baselines', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', 'baselines')
    if not tools_lib.download_data(url, 'baselines'):
        print('  Error while downloading from {}'.format(url),
              file=sys.stderr,
              flush=True)

    #%% unw and cc
    ### Get available dates
    print('\nDownload geotiff of unw and cc', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'products')
    response = requests.get(url)
    response.encoding = response.apparent_encoding  #avoid garble
    html_doc = response.text
    soup = BeautifulSoup(html_doc, "html.parser")
    tags = soup.find_all(href=re.compile(r"\d{8}_\d{8}"))
    ifgdates_all = [tag.get("href")[0:17] for tag in tags]

    ### Extract during start_date to end_date
    ifgdates = []
    for ifgd in ifgdates_all:
        mimd = int(ifgd[:8])
        simd = int(ifgd[-8:])
        if mimd >= startdate and simd <= enddate:
            ifgdates.append(ifgd)

    n_ifg = len(ifgdates)
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    print('{} IFGs available from {} to {}'.format(n_ifg, imdates[0],
                                                   imdates[-1]),
          flush=True)

    ### Download
    for i, ifgd in enumerate(ifgdates):
        print('  Donwnloading {} ({}/{})...'.format(ifgd, i + 1, n_ifg),
              flush=True)
        url_unw = os.path.join(url, ifgd, ifgd + '.geo.unw.tif')
        path_unw = os.path.join(ifgd, ifgd + '.geo.unw.tif')
        if not os.path.exists(ifgd): os.mkdir(ifgd)
        if os.path.exists(path_unw):
            print('    {}.geo.unw.tif already exist. Skip'.format(ifgd),
                  flush=True)
        elif not tools_lib.download_data(url_unw, path_unw):
            print('    Error while downloading from {}'.format(url_unw),
                  file=sys.stderr,
                  flush=True)

        url_cc = os.path.join(url, ifgd, ifgd + '.geo.cc.tif')
        path_cc = os.path.join(ifgd, ifgd + '.geo.cc.tif')
        if os.path.exists(path_cc):
            print('    {}.geo.cc.tif already exist. Skip.'.format(ifgd),
                  flush=True)
        if not tools_lib.download_data(url_cc, path_cc):
            print('    Error while downloading from {}'.format(url_cc),
                  file=sys.stderr,
                  flush=True)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(outdir))
Esempio n. 15
0
def main(argv=None):
    
    #%% Check argv
    if argv == None:
        argv = sys.argv
        
    start = time.time()
    ver=1.2; date=20200228; author="Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date, author), flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])), flush=True)


    #%% Set default
    in_dir = []
    out_dir = []
    gacosdir = 'GACOS'
    resampleAlg = 'cubicspline'# None # 'cubic' 
    fillholeflag = False


    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hi:o:g:z:", ["fillhole", "help"])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-i':
                in_dir = a
            elif o == '-o':
                out_dir = a
            elif o == '-z': ## for backward-compatible
                gacosdir = a
            elif o == '-g':
                gacosdir = a
            elif o == "--fillhole":
                fillholeflag = True

        if not in_dir:
            raise Usage('No input directory given, -i is not optional!')
        elif not os.path.isdir(in_dir):
            raise Usage('No {} dir exists!'.format(in_dir))
        elif not os.path.exists(os.path.join(in_dir, 'slc.mli.par')):
            raise Usage('No slc.mli.par file exists in {}!'.format(in_dir))
        if not out_dir:
            raise Usage('No output directory given, -o is not optional!')
        if not os.path.isdir(gacosdir):
            raise Usage('No {} dir exists!'.format(gacosdir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  "+str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2
    
    
    #%% Read data information
    ### Directory
    in_dir = os.path.abspath(in_dir)
    gacosdir = os.path.abspath(gacosdir)

    out_dir = os.path.abspath(out_dir)
    if not os.path.exists(out_dir): os.mkdir(out_dir)

    sltddir = os.path.join(os.path.join(out_dir),'sltd')
    if not os.path.exists(sltddir): os.mkdir(sltddir)

    ### Get general info
    mlipar = os.path.join(in_dir, 'slc.mli.par')
    width_unw = int(io_lib.get_param_par(mlipar, 'range_samples'))
    length_unw = int(io_lib.get_param_par(mlipar, 'azimuth_lines'))
    speed_of_light = 299792458 #m/s
    radar_frequency = float(io_lib.get_param_par(mlipar, 'radar_frequency')) #Hz
    wavelength = speed_of_light/radar_frequency #meter
    m2r_coef = 4*np.pi/wavelength
    
    if wavelength > 0.2: ## L-band
        cycle = 1.5  # 2pi/cycle for png
    else: ## C-band
        cycle = 3  # 2pi*3/cycle for png

    ### Get geo info. Grid registration
    dempar = os.path.join(in_dir, 'EQA.dem_par')
    width_geo = int(io_lib.get_param_par(dempar, 'width'))
    length_geo = int(io_lib.get_param_par(dempar, 'nlines'))
    dlat_geo = float(io_lib.get_param_par(dempar, 'post_lat')) #minus
    dlon_geo = float(io_lib.get_param_par(dempar, 'post_lon'))
    latn_geo = float(io_lib.get_param_par(dempar, 'corner_lat'))
    lonw_geo = float(io_lib.get_param_par(dempar, 'corner_lon'))
    lats_geo = latn_geo+dlat_geo*(length_geo-1)
    lone_geo = lonw_geo+dlon_geo*(width_geo-1)

    ### Check coordinate
    if width_unw!=width_geo or length_unw!=length_geo:
        print('\n{} seems to contain files in radar coordinate!!\n'.format(in_dir), file=sys.stderr)
        print('Not supported.\n'.format(in_dir), file=sys.stderr)
        return 1

    ### Calc incidence angle from U.geo
    ufile = os.path.join(in_dir, 'U.geo')
    LOSu = io_lib.read_img(ufile, length_geo, width_geo)
    LOSu[LOSu==0] = np.nan

    ### Get ifgdates and imdates
    ifgdates = tools_lib.get_ifgdates(in_dir)
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    n_ifg = len(ifgdates)
    n_im = len(imdates)


    #%% Process ztd files 
    print('\nConvert ztd/sltd.geo.tif files to sltd.geo files...', flush=True)
    ### First check if sltd already exist
    imdates2 = []
    for imd in imdates:
        sltd_geofile = os.path.join(sltddir, imd+'.sltd.geo')
        if not os.path.exists(sltd_geofile):
            imdates2.append(imd)

    n_im2 = len(imdates2)
    if n_im-n_im2 > 0:
        print("  {0:3}/{1:3} sltd already exist. Skip".format(n_im-n_im2, n_im), flush=True)

    no_gacos_imfile = os.path.join(out_dir, 'no_gacos_im.txt')
    if os.path.exists(no_gacos_imfile): os.remove(no_gacos_imfile)
    
    for ix_im, imd in enumerate(imdates2):
        if np.mod(ix_im, 10)==0:
            print('  Finished {0:4}/{1:4}th sltd...'.format(ix_im, n_im2), flush=True)

        ztdfile = os.path.join(gacosdir, imd+'.ztd')
        sltdtiffile = os.path.join(gacosdir, imd+'.sltd.geo.tif')

        if os.path.exists(sltdtiffile):
            infile = os.path.basename(sltdtiffile)
            try: ### Cut and resapmle. Already in rad.
                sltd_geo = gdal.Warp("", sltdtiffile, format='MEM', outputBounds=(lonw_geo, lats_geo, lone_geo, latn_geo), width=width_geo, height=length_geo, resampleAlg=resampleAlg, srcNodata=0).ReadAsArray()
            except: ## if broken
                print ('  {} cannot open. Skip'.format(infile), flush=True)
                with open(no_gacos_imfile, mode='a') as fnogacos:
                    print('{}'.format(imd), file=fnogacos)
                continue

        elif os.path.exists(ztdfile):
            infile = os.path.basename(ztdfile)
            hdrfile = os.path.join(sltddir, imd+'.hdr')
            bilfile = os.path.join(sltddir, imd+'.bil')
            if os.path.exists(hdrfile): os.remove(hdrfile)
            if os.path.exists(bilfile): os.remove(bilfile)
            make_hdr(ztdfile+'.rsc', hdrfile)
            os.symlink(os.path.relpath(ztdfile, sltddir), bilfile)
    
            ### Cut and resapmle ztd to geo
            ztd_geo = gdal.Warp("", bilfile, format='MEM', outputBounds=(lonw_geo, lats_geo, lone_geo, latn_geo), width=width_geo, height=length_geo, resampleAlg=resampleAlg, srcNodata=0).ReadAsArray()
            os.remove(hdrfile)
            os.remove(bilfile)
    
            ### Meter to rad, slantrange
            sltd_geo = ztd_geo/LOSu*m2r_coef ## LOSu=cos(inc)

        else:
            print('  There is no ztd|sltd.geo.tif for {}!'.format(imd), flush=True)
            with open(no_gacos_imfile, mode='a') as fnogacos:
                print('{}'.format(imd), file=fnogacos)
            continue ## Next imd

        ### Skip if no data in the area
        if np.all((sltd_geo==0)|np.isnan(sltd_geo)):
            print('  There is no valid data in {}!'.format(infile), flush=True)
            with open(no_gacos_imfile, mode='a') as fnogacos:
                print('{}'.format(imd), file=fnogacos)
            continue ## Next imd

        ### Fill hole is specified
        if fillholeflag:
            sltd_geo = fillhole(sltd_geo)
        
        ### Output as sltd.geo
        sltd_geofile = os.path.join(sltddir, imd+'.sltd.geo')
        sltd_geo.tofile(sltd_geofile)

    
    #%% Correct unw files
    print('\nCorrect unw data...', flush=True)
    ### Information files    
    gacinfofile = os.path.join(out_dir, 'GACOS_info.txt')
    if not os.path.exists(gacinfofile):
        ### Add header
        with open(gacinfofile, "w") as f:
            print(' Phase STD (rad) Before After  ReductionRate', file=f)
    
    no_gacos_ifgfile = os.path.join(out_dir, 'no_gacos_ifg.txt')
    if os.path.exists(no_gacos_ifgfile): os.remove(no_gacos_ifgfile)

    ### First check if already corrected unw exist
    ifgdates2 = []
    for i, ifgd in enumerate(ifgdates): 
        out_dir1 = os.path.join(out_dir, ifgd)
        unw_corfile = os.path.join(out_dir1, ifgd+'.unw')
        if not os.path.exists(unw_corfile):
            ifgdates2.append(ifgd)

    n_ifg2 = len(ifgdates2)
    if n_ifg-n_ifg2 > 0:
        print("  {0:3}/{1:3} corrected unw already exist. Skip".format(n_ifg-n_ifg2, n_ifg), flush=True)

    ### Correct
    for i, ifgd in enumerate(ifgdates2):
        if np.mod(i, 10)==0:
            print('  Finished {0:4}/{1:4}th unw...'.format(i, n_ifg2), flush=True)

        md = ifgd[:8]
        sd = ifgd[-8:]
        msltdfile = os.path.join(sltddir, md+'.sltd.geo')
        ssltdfile = os.path.join(sltddir, sd+'.sltd.geo')
        
        in_dir1 = os.path.join(in_dir, ifgd)
        out_dir1 = os.path.join(out_dir, ifgd)
        
        ### Check if sltd available for both master and slave. If not continue
        ## Not use in tsa because loop cannot be closed
        if not (os.path.exists(msltdfile) and os.path.exists(ssltdfile)):
            print('  ztd file not available for {}'.format(ifgd), flush=True)
            with open(no_gacos_ifgfile, mode='a') as fnogacos:
                print('{}'.format(ifgd), file=fnogacos)
            continue

        ### Prepare directory and file
        if not os.path.exists(out_dir1): os.mkdir(out_dir1)
        unwfile = os.path.join(in_dir1, ifgd+'.unw')
        unw_corfile = os.path.join(out_dir1, ifgd+'.unw')
        
        ### Calculate dsltd
        msltd = io_lib.read_img(msltdfile, length_unw, width_unw)
        ssltd = io_lib.read_img(ssltdfile, length_unw, width_unw)

        msltd[msltd==0] = np.nan
        ssltd[ssltd==0] = np.nan
        
        dsltd = ssltd-msltd
        
        ### Correct unw
        unw = io_lib.read_img(unwfile, length_unw, width_unw)
        
        unw[unw==0] = np.nan
        unw_cor = unw-dsltd
        unw_cor.tofile(unw_corfile)
        
        ### Output std
        std_unw = np.nanstd(unw)
        std_unwcor = np.nanstd(unw_cor)
        rate = (std_unw-std_unwcor)/std_unw*100
        with open(gacinfofile, "a") as f:
            print('{0}  {1:4.1f}  {2:4.1f} {3:5.1f}%'.format(ifgd, std_unw, std_unwcor, rate), file=f)

        ### Link cc
        if not os.path.exists(os.path.join(out_dir1, ifgd+'.cc')):
            os.symlink(os.path.relpath(os.path.join(in_dir1, ifgd+'.cc'), out_dir1), os.path.join(out_dir1, ifgd+'.cc'))
            
        ### Output png for comparison
        data3 = [np.angle(np.exp(1j*(data/cycle))*cycle) for data in [unw, unw_cor, dsltd]]
        title3 = ['unw_org (STD: {:.1f} rad)'.format(std_unw), 'unw_cor (STD: {:.1f} rad)'.format(std_unwcor), 'dsltd ({:.1f}% reduced)'.format(rate)]
        pngfile = os.path.join(out_dir1, ifgd+'.gacos.png')
        plot_lib.make_3im_png(data3, pngfile, 'insar', title3, vmin=-np.pi, vmax=np.pi, cbar=False)
        
        ## Output png for corrected unw
        pngfile = os.path.join(out_dir1, ifgd+'.unw.png')
        title = '{} ({}pi/cycle)'.format(ifgd, cycle*2)
        plot_lib.make_im_png(np.angle(np.exp(1j*unw_cor/cycle)*cycle), pngfile, 'insar', title, -np.pi, np.pi, cbar=False)

    print("", flush=True)
    
    
    #%% Create correlation png
    pngfile = os.path.join(out_dir, 'GACOS_info.png')
    plot_lib.plot_gacos_info(gacinfofile, pngfile)
    
    
    #%% Copy other files
    files = glob.glob(os.path.join(in_dir, '*'))
    for file in files:
        if not os.path.isdir(file): #not copy directory, only file
            print('Copy {}'.format(os.path.basename(file)), flush=True)
            shutil.copy(file, out_dir)
    
    
    #%% Finish
    elapsed_time = time.time()-start
    hour = int(elapsed_time/3600)
    minite = int(np.mod((elapsed_time/60),60))
    sec = int(np.mod(elapsed_time,60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour,minite,sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(out_dir)))

    if os.path.exists(no_gacos_ifgfile):
        print('Caution: Some ifgs below are excluded due to GACOS unavailable')
        with open(no_gacos_ifgfile) as f:
            for line in f:
                print(line, end='')
        print('')

    if os.path.exists(no_gacos_imfile):
        print('GACOS data for the following dates are missing:')
        with open(no_gacos_imfile) as f:
            for line in f:
                print(line, end='')
        print('')
Esempio n. 16
0
def convert_wrapper(i):
    ifgd = ifgdates2[i]
    if np.mod(i, 10) == 0:
        print("  {0:3}/{1:3}th IFG...".format(i, len(ifgdates2)), flush=True)

    unw_tiffile = os.path.join(geocdir, ifgd, ifgd + '.geo.unw.tif')
    cc_tiffile = os.path.join(geocdir, ifgd, ifgd + '.geo.cc.tif')

    ### Check if inputs exist
    if not os.path.exists(unw_tiffile):
        print('  No {} found. Skip'.format(ifgd + '.geo.unw.tif'), flush=True)
        return 1
    elif not os.path.exists(cc_tiffile):
        print('  No {} found. Skip'.format(ifgd + '.geo.cc.tif'), flush=True)
        return 1

    ### Output dir and files
    ifgdir1 = os.path.join(outdir, ifgd)
    if not os.path.exists(ifgdir1): os.mkdir(ifgdir1)
    unwfile = os.path.join(ifgdir1, ifgd + '.unw')
    ccfile = os.path.join(ifgdir1, ifgd + '.cc')

    ### Read data from geotiff
    try:
        unw = gdal.Open(unw_tiffile).ReadAsArray()
        unw[unw == 0] = np.nan
    except:  ## if broken
        print('  {} cannot open. Skip'.format(ifgd + '.geo.unw.tif'),
              flush=True)
        shutil.rmtree(ifgdir1)
        return 1

    try:
        cc = gdal.Open(cc_tiffile).ReadAsArray()
        if cc.dtype == np.float32:
            cc = cc * 255  ## 0-1 -> 0-255 to output in uint8
    except:  ## if broken
        print('  {} cannot open. Skip'.format(ifgd + '.geo.cc.tif'),
              flush=True)
        shutil.rmtree(ifgdir1)
        return 1

    ### Multilook
    if nlook != 1:
        unw = tools_lib.multilook(unw, nlook, nlook, n_valid_thre)
        cc = cc.astype(np.float32)
        cc[cc == 0] = np.nan
        cc = tools_lib.multilook(cc, nlook, nlook, n_valid_thre)

    ### Output float
    unw.tofile(unwfile)
    cc = cc.astype(np.uint8)  ##nan->0, max255, auto-floored
    cc.tofile(ccfile)

    ### Make png
    unwpngfile = os.path.join(ifgdir1, ifgd + '.unw.png')
    plot_lib.make_im_png(np.angle(np.exp(1j * unw / cycle) * cycle),
                         unwpngfile,
                         cmap,
                         ifgd + '.unw',
                         vmin=-np.pi,
                         vmax=np.pi,
                         cbar=False)

    return 0
Esempio n. 17
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = "1.7.1"
    date = 20201028
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    ### For parallel processing
    global ifgdates2, geocdir, outdir, nlook, n_valid_thre, cycle, cmap

    #%% Set default
    geocdir = []
    outdir = []
    nlook = 1
    radar_freq = 5.405e9
    try:
        n_para = len(os.sched_getaffinity(0))
    except:
        n_para = multi.cpu_count()

    cmap = 'insar'
    cycle = 3
    n_valid_thre = 0.5

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hi:o:n:",
                                       ["help", "freq=", "n_para="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-i':
                geocdir = a
            elif o == '-o':
                outdir = a
            elif o == '-n':
                nlook = int(a)
            elif o == '--freq':
                radar_freq = float(a)
            elif o == '--n_para':
                n_para = int(a)

        if not geocdir:
            raise Usage('No GEOC directory given, -d is not optional!')
        elif not os.path.isdir(geocdir):
            raise Usage('No {} dir exists!'.format(geocdir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Directory and file setting
    geocdir = os.path.abspath(geocdir)
    if not outdir:
        outdir = os.path.join(os.path.dirname(geocdir),
                              'GEOCml{}'.format(nlook))
    if not os.path.exists(outdir): os.mkdir(outdir)

    mlipar = os.path.join(outdir, 'slc.mli.par')
    dempar = os.path.join(outdir, 'EQA.dem_par')

    no_unw_list = os.path.join(outdir, 'no_unw_list.txt')
    if os.path.exists(no_unw_list): os.remove(no_unw_list)

    bperp_file_in = os.path.join(geocdir, 'baselines')
    bperp_file_out = os.path.join(outdir, 'baselines')

    metadata_file = os.path.join(geocdir, 'metadata.txt')
    if os.path.exists(metadata_file):
        center_time = subp.check_output(['grep', 'center_time', metadata_file
                                         ]).decode().split('=')[1].strip()
    else:
        center_time = None

    #%% ENU
    for ENU in ['E', 'N', 'U']:
        print('\nCreate {}'.format(ENU + '.geo'), flush=True)
        enutif = glob.glob(os.path.join(geocdir, '*.geo.{}.tif'.format(ENU)))

        ### Download if not exist
        if len(enutif) == 0:
            print('  No *.geo.{}.tif found in {}'.format(
                ENU, os.path.basename(geocdir)),
                  flush=True)
            continue

        else:
            enutif = enutif[0]  ## first one

        ### Create float
        data = gdal.Open(enutif).ReadAsArray()
        data[data == 0] = np.nan

        if nlook != 1:
            ### Multilook
            data = tools_lib.multilook(data, nlook, nlook)

        outfile = os.path.join(outdir, ENU + '.geo')
        data.tofile(outfile)
        print('  {}.geo created'.format(ENU), flush=True)

    #%% mli
    print('\nCreate slc.mli', flush=True)
    mlitif = glob.glob(os.path.join(geocdir, '*.geo.mli.tif'))
    if len(mlitif) > 0:
        mlitif = mlitif[0]  ## First one
        mli = np.float32(gdal.Open(mlitif).ReadAsArray())
        mli[mli == 0] = np.nan

        if nlook != 1:
            ### Multilook
            mli = tools_lib.multilook(mli, nlook, nlook)

        mlifile = os.path.join(outdir, 'slc.mli')
        mli.tofile(mlifile)
        mlipngfile = mlifile + '.png'
        mli = np.log10(mli)
        vmin = np.nanpercentile(mli, 5)
        vmax = np.nanpercentile(mli, 95)
        plot_lib.make_im_png(mli,
                             mlipngfile,
                             'gray',
                             'MLI (log10)',
                             vmin,
                             vmax,
                             cbar=True)
        print('  slc.mli[.png] created', flush=True)
    else:
        print('  No *.geo.mli.tif found in {}'.format(
            os.path.basename(geocdir)),
              flush=True)

    #%% hgt
    print('\nCreate hgt', flush=True)
    hgttif = glob.glob(os.path.join(geocdir, '*.geo.hgt.tif'))
    if len(hgttif) > 0:
        hgttif = hgttif[0]  ## First one
        hgt = np.float32(gdal.Open(hgttif).ReadAsArray())
        hgt[hgt == 0] = np.nan

        if nlook != 1:
            ### Multilook
            hgt = tools_lib.multilook(hgt, nlook, nlook)

        hgtfile = os.path.join(outdir, 'hgt')
        hgt.tofile(hgtfile)
        hgtpngfile = hgtfile + '.png'
        vmax = np.nanpercentile(hgt, 99)
        vmin = -vmax / 3  ## bnecause 1/4 of terrain is blue
        plot_lib.make_im_png(hgt,
                             hgtpngfile,
                             'terrain',
                             'DEM (m)',
                             vmin,
                             vmax,
                             cbar=True)
        print('  hgt[.png] created', flush=True)
    else:
        print('  No *.geo.hgt.tif found in {}'.format(
            os.path.basename(geocdir)),
              flush=True)

    #%% tif -> float (with multilook/downsampling)
    print('\nCreate unw and cc', flush=True)
    ifgdates = tools_lib.get_ifgdates(geocdir)
    n_ifg = len(ifgdates)

    ### First check if float already exist
    ifgdates2 = []
    for i, ifgd in enumerate(ifgdates):
        ifgdir1 = os.path.join(outdir, ifgd)
        unwfile = os.path.join(ifgdir1, ifgd + '.unw')
        ccfile = os.path.join(ifgdir1, ifgd + '.cc')
        if not (os.path.exists(unwfile) and os.path.exists(ccfile)):
            ifgdates2.append(ifgd)

    n_ifg2 = len(ifgdates2)
    if n_ifg - n_ifg2 > 0:
        print("  {0:3}/{1:3} unw and cc already exist. Skip".format(
            n_ifg - n_ifg2, n_ifg),
              flush=True)

    if n_ifg2 > 0:
        if n_para > n_ifg2:
            n_para = n_ifg2

        ### Create float with parallel processing
        print('  {} parallel processing...'.format(n_para), flush=True)
        p = multi.Pool(n_para)
        rc = p.map(convert_wrapper, range(n_ifg2))
        p.close()

        ifgd_ok = []
        for i, _rc in enumerate(rc):
            if _rc == 1:
                with open(no_unw_list, 'a') as f:
                    print('{}'.format(ifgdates2[i]), file=f)
            elif _rc == 0:
                ifgd_ok = ifgdates2[i]  ## readable tiff

        ### Read info
        ## If all float already exist, this will not be done, but no problem because
        ## par files should alerady exist!
        if ifgd_ok:
            unw_tiffile = os.path.join(geocdir, ifgd_ok,
                                       ifgd_ok + '.geo.unw.tif')
            geotiff = gdal.Open(unw_tiffile)
            width = geotiff.RasterXSize
            length = geotiff.RasterYSize
            lon_w_p, dlon, _, lat_n_p, _, dlat = geotiff.GetGeoTransform()
            ## lat lon are in pixel registration. dlat is negative
            lon_w_g = lon_w_p + dlon / 2
            lat_n_g = lat_n_p + dlat / 2
            ## to grit registration by shifting half pixel inside
            if nlook != 1:
                width = int(width / nlook)
                length = int(length / nlook)
                dlon = dlon * nlook
                dlat = dlat * nlook

    #%% EQA.dem_par, slc.mli.par
    if not os.path.exists(mlipar):
        print('\nCreate slc.mli.par', flush=True)
        #        radar_freq = 5.405e9 ## fixed for Sentnel-1

        with open(mlipar, 'w') as f:
            print('range_samples:   {}'.format(width), file=f)
            print('azimuth_lines:   {}'.format(length), file=f)
            print('radar_frequency: {} Hz'.format(radar_freq), file=f)
            if center_time is not None:
                print('center_time: {}'.format(center_time), file=f)

    if not os.path.exists(dempar):
        print('\nCreate EQA.dem_par', flush=True)

        text = [
            "Gamma DIFF&GEO DEM/MAP parameter file", "title: DEM",
            "DEM_projection:     EQA", "data_format:        REAL*4",
            "DEM_hgt_offset:          0.00000",
            "DEM_scale:               1.00000", "width: {}".format(width),
            "nlines: {}".format(length),
            "corner_lat:     {}  decimal degrees".format(lat_n_g),
            "corner_lon:    {}  decimal degrees".format(lon_w_g),
            "post_lat: {} decimal degrees".format(dlat),
            "post_lon: {} decimal degrees".format(dlon), "",
            "ellipsoid_name: WGS 84", "ellipsoid_ra:        6378137.000   m",
            "ellipsoid_reciprocal_flattening:  298.2572236", "",
            "datum_name: WGS 1984", "datum_shift_dx:              0.000   m",
            "datum_shift_dy:              0.000   m",
            "datum_shift_dz:              0.000   m",
            "datum_scale_m:         0.00000e+00",
            "datum_rotation_alpha:  0.00000e+00   arc-sec",
            "datum_rotation_beta:   0.00000e+00   arc-sec",
            "datum_rotation_gamma:  0.00000e+00   arc-sec",
            "datum_country_list: Global Definition, WGS84, World\n"
        ]

        with open(dempar, 'w') as f:
            f.write('\n'.join(text))

    #%% bperp
    print('\nCopy baselines file', flush=True)
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    if os.path.exists(bperp_file_in):
        ## Check exisiting bperp_file
        if not io_lib.read_bperp_file(bperp_file_in, imdates):
            print('  baselines file found, but not complete. Make dummy',
                  flush=True)
            io_lib.make_dummy_bperp(bperp_file_out, imdates)
        else:
            shutil.copyfile(bperp_file_in, bperp_file_out)
    else:
        print('  No valid baselines file exists. Make dummy.', flush=True)
        io_lib.make_dummy_bperp(bperp_file_out, imdates)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(outdir)))
Esempio n. 18
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()

    #%% Set default
    imd_s = []
    imd_e = []
    cumfile = 'cum_filt.h5'
    outfile = []
    refarea = []
    maskfile = []
    vstdflag = False
    sinflag = False
    pngflag = False

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hs:e:i:o:r:",
                                       ["help", "vstd", "sin", "png", "mask="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-s':
                imd_s = a
            elif o == '-e':
                imd_e = a
            elif o == '-i':
                cumfile = a
            elif o == '-o':
                outfile = a
            elif o == '-r':
                refarea = a
            elif o == '--vstd':
                vstdflag = True
            elif o == '--sin':
                sinflag = True
            elif o == '--mask':
                maskfile = a
            elif o == '--png':
                pngflag = True

        if not os.path.exists(cumfile):
            raise Usage('No {} exists! Use -i option.'.format(cumfile))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Read info
    ### Read cumfile
    cumh5 = h5.File(cumfile, 'r')
    imdates = cumh5['imdates'][()].astype(str).tolist()
    cum = cumh5['cum']
    n_im_all, length, width = cum.shape

    if not refarea:
        refarea = cumh5['refarea'][()]
        refx1, refx2, refy1, refy2 = [
            int(s) for s in re.split('[:/]', refarea)
        ]
    else:
        if not tools_lib.read_range(refarea, width, length):
            print('\nERROR in {}\n'.format(refarea), file=sys.stderr)
            return 2
        else:
            refx1, refx2, refy1, refy2 = tools_lib.read_range(
                refarea, width, length)

    #%% Setting
    ### Dates
    if not imd_s:
        imd_s = imdates[0]

    if not imd_e:
        imd_e = imdates[-1]

    ### mask
    if maskfile:
        mask = io_lib.read_img(maskfile, length, width)
        mask[mask == 0] = np.nan
        suffix_mask = '.mskd'
    else:
        mask = np.ones((length, width), dtype=np.float32)
        suffix_mask = ''

    ### Find date index if not exist in imdates
    if not imd_s in imdates:
        for imd in imdates:
            if int(imd) >= int(imd_s):  ## First larger one than imd_s
                imd_s = imd
                break

    if not imd_e in imdates:
        for imd in imdates[::-1]:
            if int(imd) <= int(imd_e):  ## Last smaller one than imd_e
                imd_e = imd
                break

    ix_s = imdates.index(imd_s)
    ix_e = imdates.index(imd_e) + 1  #+1 for python custom
    n_im = ix_e - ix_s

    ### Calc dt in year
    imdates_dt = ([
        dt.datetime.strptime(imd, '%Y%m%d').toordinal()
        for imd in imdates[ix_s:ix_e]
    ])
    dt_cum = np.float32((np.array(imdates_dt) - imdates_dt[0]) / 365.25)

    ### Outfile
    if not outfile:
        outfile = '{}_{}.vel{}'.format(imd_s, imd_e, suffix_mask)

    #%% Display info
    print('')
    print('Start date  : {}'.format(imdates[ix_s]))
    print('End date    : {}'.format(imdates[ix_e - 1]))
    print('# of images : {}'.format(n_im))
    print('Ref area    : {}:{}/{}:{}'.format(refx1, refx2, refy1, refy2))
    print('')

    #%% Calc velocity and vstd
    vconst = np.zeros((length, width), dtype=np.float32) * np.nan
    vel = np.zeros((length, width), dtype=np.float32) * np.nan

    ### Read cum data
    cum_tmp = cum[ix_s:ix_e, :, :] * mask
    cum_ref = np.nanmean(cum[ix_s:ix_e, refy1:refy2, refx1:refx2] *
                         mask[refy1:refy2, refx1:refx2],
                         axis=(1, 2))

    if np.all(np.isnan(cum_ref)):
        print('\nERROR: Ref area has only NaN value!\n', file=sys.stderr)
        return 2

    cum_tmp = cum_tmp - cum_ref[:, np.newaxis, np.newaxis]

    ### Extract not nan points
    bool_allnan = np.all(np.isnan(cum_tmp), axis=0)
    cum_tmp = cum_tmp.reshape(n_im, length *
                              width)[:, ~bool_allnan.ravel()].transpose()

    if not sinflag:  ## Linear function
        print('Calc velocity...')
        vel[~bool_allnan], vconst[~bool_allnan] = inv_lib.calc_vel(
            cum_tmp, dt_cum)
        vel.tofile(outfile)
    else:  ## Linear+sin function
        print('Calc velocity and annual components...')
        amp = np.zeros((length, width), dtype=np.float32) * np.nan
        delta_t = np.zeros((length, width), dtype=np.float32) * np.nan
        ampfile = outfile.replace('vel', 'amp')
        dtfile = outfile.replace('vel', 'dt')

        vel[~bool_allnan], vconst[~bool_allnan], amp[~bool_allnan], delta_t[
            ~bool_allnan] = inv_lib.calc_velsin(cum_tmp, dt_cum, imdates[0])
        vel.tofile(outfile)
        amp.tofile(ampfile)
        delta_t.tofile(dtfile)

    ### vstd
    if vstdflag:
        vstdfile = outfile.replace('vel', 'vstd')
        vstd = np.zeros((length, width), dtype=np.float32) * np.nan

        print('Calc vstd...')
        vstd[~bool_allnan] = inv_lib.calc_velstd_withnan(cum_tmp, dt_cum)
        vstd.tofile(vstdfile)

    #%% Make png if specified
    if pngflag:
        pngfile = outfile + '.png'
        title = 'n_im: {}, Ref X/Y {}:{}/{}:{}'.format(n_im, refx1, refx2,
                                                       refy1, refy2)
        plot_lib.make_im_png(vel, pngfile, 'jet', title)

        if sinflag:
            amp_max = np.nanpercentile(amp, 99)
            plot_lib.make_im_png(amp,
                                 ampfile + '.png',
                                 'viridis',
                                 title,
                                 vmax=amp_max)
            plot_lib.make_im_png(delta_t, dtfile + '.png', 'hsv', title)

        if vstdflag:
            plot_lib.make_im_png(vstd, vstdfile + '.png', 'jet', title)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output: {}'.format(outfile), flush=True)
    if vstdflag:
        print('       {}'.format(vstdfile), flush=True)
    print('')
Esempio n. 19
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    #%% Set default
    in_dir = []
    out_dir = []
    range_str = []
    range_geo_str = []

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hi:o:r:g:", ["help"])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-i':
                in_dir = a
            elif o == '-o':
                out_dir = a
            elif o == '-r':
                range_str = a
            elif o == '-g':
                range_geo_str = a

        if not in_dir:
            raise Usage('No input directory given, -i is not optional!')
        if not out_dir:
            raise Usage('No output directory given, -o is not optional!')
        if not range_str and not range_geo_str:
            raise Usage('No clip area given, use either -r or -g!')
        if range_str and range_geo_str:
            raise Usage('Both -r and -g given, use either -r or -g not both!')
        elif not os.path.isdir(in_dir):
            raise Usage('No {} dir exists!'.format(in_dir))
        elif not os.path.exists(os.path.join(in_dir, 'slc.mli.par')):
            raise Usage('No slc.mli.par file exists in {}!'.format(in_dir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Read info and make dir
    in_dir = os.path.abspath(in_dir)
    out_dir = os.path.abspath(out_dir)

    ifgdates = tools_lib.get_ifgdates(in_dir)
    n_ifg = len(ifgdates)

    mlipar = os.path.join(in_dir, 'slc.mli.par')
    width = int(io_lib.get_param_par(mlipar, 'range_samples'))
    length = int(io_lib.get_param_par(mlipar, 'azimuth_lines'))

    speed_of_light = 299792458  #m/s
    radar_frequency = float(io_lib.get_param_par(mlipar,
                                                 'radar_frequency'))  #Hz
    wavelength = speed_of_light / radar_frequency  #meter
    if wavelength > 0.2:  ## L-band
        cycle = 1.5  # 2pi/cycle for png
    else:  ## C-band
        cycle = 3  # 2pi*3/cycle for png

    dempar = os.path.join(in_dir, 'EQA.dem_par')
    lat1 = float(io_lib.get_param_par(dempar, 'corner_lat'))  # north
    lon1 = float(io_lib.get_param_par(dempar, 'corner_lon'))  # west
    postlat = float(io_lib.get_param_par(dempar, 'post_lat'))  # negative
    postlon = float(io_lib.get_param_par(dempar, 'post_lon'))  # positive
    lat2 = lat1 + postlat * (length - 1)  # south
    lon2 = lon1 + postlon * (width - 1)  # east

    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    #%% Check and set range to be clipped
    ### Read -r or -g option
    if range_str:  ## -r
        if not tools_lib.read_range(range_str, width, length):
            print('\nERROR in {}\n'.format(range_str), file=sys.stderr)
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range(range_str, width, length)
    else:  ## -g
        if not tools_lib.read_range_geo(range_geo_str, width, length, lat1,
                                        postlat, lon1, postlon):
            print('\nERROR in {}\n'.format(range_geo_str), file=sys.stderr)
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range_geo(range_geo_str, width,
                                                      length, lat1, postlat,
                                                      lon1, postlon)
            range_str = '{}:{}/{}:{}'.format(x1, x2, y1, y2)

    ### Calc clipped  info
    width_c = x2 - x1
    length_c = y2 - y1
    lat1_c = lat1 + postlat * y1  # north
    lon1_c = lon1 + postlon * x1  # west
    lat2_c = lat1_c + postlat * (length_c - 1)  # south
    lon2_c = lon1_c + postlon * (width_c - 1)  # east

    print("\nArea to be clipped:", flush=True)
    print("  0:{}/0:{} -> {}:{}/{}:{}".format(width, length, x1, x2, y1, y2))
    print("  {:.7f}/{:.7f}/{:.7f}/{:.7f} ->".format(lon1, lon2, lat2, lat1))
    print("  {:.7f}/{:.7f}/{:.7f}/{:.7f}".format(lon1_c, lon2_c, lat2_c,
                                                 lat1_c))
    print("  Width/Length: {}/{} -> {}/{}".format(width, length, width_c,
                                                  length_c))
    print("", flush=True)

    clipareafile = os.path.join(out_dir, 'cliparea.txt')
    with open(clipareafile, 'w') as f:
        f.write(range_str)

    #%% Make clipped par files
    mlipar_c = os.path.join(out_dir, 'slc.mli.par')
    dempar_c = os.path.join(out_dir, 'EQA.dem_par')

    ### slc.mli.par
    with open(mlipar, 'r') as f:
        file = f.read()
    file = re.sub(r'range_samples:\s*{}'.format(width),
                  'range_samples: {}'.format(width_c), file)
    file = re.sub(r'azimuth_lines:\s*{}'.format(length),
                  'azimuth_lines: {}'.format(length_c), file)
    with open(mlipar_c, 'w') as f:
        f.write(file)

    ### EQA.dem_par
    with open(dempar, 'r') as f:
        file = f.read()
    file = re.sub(r'width:\s*{}'.format(width), 'width: {}'.format(width_c),
                  file)
    file = re.sub(r'nlines:\s*{}'.format(length),
                  'nlines: {}'.format(length_c), file)
    file = re.sub(r'corner_lat:\s*{}'.format(lat1),
                  'corner_lat: {}'.format(lat1_c), file)
    file = re.sub(r'corner_lon:\s*{}'.format(lon1),
                  'corner_lon: {}'.format(lon1_c), file)
    with open(dempar_c, 'w') as f:
        f.write(file)

    #%% Clip or copy other files than unw and cc
    files = glob.glob(os.path.join(in_dir, '*'))
    for file in files:
        if os.path.isdir(file):
            continue  #not copy directory
        elif file == mlipar or file == dempar:
            continue  #not copy
        elif os.path.getsize(file) == width * length * 4:  ##float file
            print('Clip {}'.format(os.path.basename(file)), flush=True)
            data = io_lib.read_img(file, length, width)
            data = data[y1:y2, x1:x2]
            filename = os.path.basename(file)
            outfile = os.path.join(out_dir, filename)
            data.tofile(outfile)
        elif file == os.path.join(in_dir, 'slc.mli.png'):
            print('Recreate slc.mli.png', flush=True)
            mli = io_lib.read_img(os.path.join(out_dir, 'slc.mli'), length_c,
                                  width_c)
            pngfile = os.path.join(out_dir, 'slc.mli.png')
            plot_lib.make_im_png(mli, pngfile, 'gray', 'MLI', cbar=False)
        else:
            print('Copy {}'.format(os.path.basename(file)), flush=True)
            shutil.copy(file, out_dir)

    #%% Clip unw and cc
    print('\nClip unw and cc', flush=True)
    ### First, check if already exist
    ifgdates2 = []
    for ifgix, ifgd in enumerate(ifgdates):
        out_dir1 = os.path.join(out_dir, ifgd)
        unwfile_c = os.path.join(out_dir1, ifgd + '.unw')
        ccfile_c = os.path.join(out_dir1, ifgd + '.cc')
        if not (os.path.exists(unwfile_c) and os.path.exists(ccfile_c)):
            ifgdates2.append(ifgd)

    n_ifg2 = len(ifgdates2)
    if n_ifg - n_ifg2 > 0:
        print("  {0:3}/{1:3} clipped unw and cc already exist. Skip".format(
            n_ifg - n_ifg2, n_ifg),
              flush=True)

    ### Clip
    for ifgix, ifgd in enumerate(ifgdates2):
        if np.mod(ifgix, 100) == 0:
            print("  {0:3}/{1:3}th unw...".format(ifgix, n_ifg2), flush=True)

        unwfile = os.path.join(in_dir, ifgd, ifgd + '.unw')
        ccfile = os.path.join(in_dir, ifgd, ifgd + '.cc')

        unw = io_lib.read_img(unwfile, length, width)
        coh = io_lib.read_img(ccfile, length, width)

        ### Clip
        unw = unw[y1:y2, x1:x2]
        coh = coh[y1:y2, x1:x2]

        ### Output
        out_dir1 = os.path.join(out_dir, ifgd)
        if not os.path.exists(out_dir1): os.mkdir(out_dir1)

        unw.tofile(os.path.join(out_dir1, ifgd + '.unw'))
        coh.tofile(os.path.join(out_dir1, ifgd + '.cc'))

        ## Output png for corrected unw
        pngfile = os.path.join(out_dir1, ifgd + '.unw.png')
        title = '{} ({}pi/cycle)'.format(ifgd, cycle * 2)
        plot_lib.make_im_png(np.angle(np.exp(1j * unw / cycle) * cycle),
                             pngfile,
                             'insar',
                             title,
                             -np.pi,
                             np.pi,
                             cbar=False)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(out_dir)))
Esempio n. 20
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = "1.2.5"
    date = 20210105
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    ### For parallel processing
    global ifgdates2, in_dir, out_dir, length, width, x1, x2, y1, y2, cycle, cmap_wrap

    #%% Set default
    in_dir = []
    out_dir = []
    range_str = []
    range_geo_str = []
    try:
        n_para = len(os.sched_getaffinity(0))
    except:
        n_para = multi.cpu_count()

    q = multi.get_context('fork')
    cmap_wrap = SCM.romaO

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hi:o:r:g:",
                                       ["help", "n_para="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-i':
                in_dir = a
            elif o == '-o':
                out_dir = a
            elif o == '-r':
                range_str = a
            elif o == '-g':
                range_geo_str = a
            elif o == '--n_para':
                n_para = int(a)

        if not in_dir:
            raise Usage('No input directory given, -i is not optional!')
        if not out_dir:
            raise Usage('No output directory given, -o is not optional!')
        if not range_str and not range_geo_str:
            raise Usage('No clip area given, use either -r or -g!')
        if range_str and range_geo_str:
            raise Usage('Both -r and -g given, use either -r or -g not both!')
        elif not os.path.isdir(in_dir):
            raise Usage('No {} dir exists!'.format(in_dir))
        elif not os.path.exists(os.path.join(in_dir, 'slc.mli.par')):
            raise Usage('No slc.mli.par file exists in {}!'.format(in_dir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Read info and make dir
    in_dir = os.path.abspath(in_dir)
    out_dir = os.path.abspath(out_dir)

    ifgdates = tools_lib.get_ifgdates(in_dir)
    n_ifg = len(ifgdates)

    mlipar = os.path.join(in_dir, 'slc.mli.par')
    width = int(io_lib.get_param_par(mlipar, 'range_samples'))
    length = int(io_lib.get_param_par(mlipar, 'azimuth_lines'))

    speed_of_light = 299792458  #m/s
    radar_frequency = float(io_lib.get_param_par(mlipar,
                                                 'radar_frequency'))  #Hz
    wavelength = speed_of_light / radar_frequency  #meter
    if wavelength > 0.2:  ## L-band
        cycle = 1.5  # 2pi/cycle for png
    else:  ## C-band
        cycle = 3  # 2pi*3/cycle for png

    dempar = os.path.join(in_dir, 'EQA.dem_par')
    lat1 = float(io_lib.get_param_par(dempar, 'corner_lat'))  # north
    lon1 = float(io_lib.get_param_par(dempar, 'corner_lon'))  # west
    postlat = float(io_lib.get_param_par(dempar, 'post_lat'))  # negative
    postlon = float(io_lib.get_param_par(dempar, 'post_lon'))  # positive
    lat2 = lat1 + postlat * (length - 1)  # south
    lon2 = lon1 + postlon * (width - 1)  # east

    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    #%% Check and set range to be clipped
    ### Read -r or -g option
    if range_str:  ## -r
        if not tools_lib.read_range(range_str, width, length):
            print('\nERROR in {}\n'.format(range_str), file=sys.stderr)
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range(range_str, width, length)
    else:  ## -g
        if not tools_lib.read_range_geo(range_geo_str, width, length, lat1,
                                        postlat, lon1, postlon):
            print('\nERROR in {}\n'.format(range_geo_str), file=sys.stderr)
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range_geo(range_geo_str, width,
                                                      length, lat1, postlat,
                                                      lon1, postlon)
            range_str = '{}:{}/{}:{}'.format(x1, x2, y1, y2)

    ### Calc clipped  info
    width_c = x2 - x1
    length_c = y2 - y1
    lat1_c = lat1 + postlat * y1  # north
    lon1_c = lon1 + postlon * x1  # west
    lat2_c = lat1_c + postlat * (length_c - 1)  # south
    lon2_c = lon1_c + postlon * (width_c - 1)  # east

    print("\nArea to be clipped:", flush=True)
    print("  0:{}/0:{} -> {}:{}/{}:{}".format(width, length, x1, x2, y1, y2))
    print("  {:.7f}/{:.7f}/{:.7f}/{:.7f} ->".format(lon1, lon2, lat2, lat1))
    print("  {:.7f}/{:.7f}/{:.7f}/{:.7f}".format(lon1_c, lon2_c, lat2_c,
                                                 lat1_c))
    print("  Width/Length: {}/{} -> {}/{}".format(width, length, width_c,
                                                  length_c))
    print("", flush=True)

    clipareafile = os.path.join(out_dir, 'cliparea.txt')
    with open(clipareafile, 'w') as f:
        f.write(range_str)

    #%% Make clipped par files
    mlipar_c = os.path.join(out_dir, 'slc.mli.par')
    dempar_c = os.path.join(out_dir, 'EQA.dem_par')

    ### slc.mli.par
    with open(mlipar, 'r') as f:
        file = f.read()
    file = re.sub(r'range_samples:\s*{}'.format(width),
                  'range_samples: {}'.format(width_c), file)
    file = re.sub(r'azimuth_lines:\s*{}'.format(length),
                  'azimuth_lines: {}'.format(length_c), file)
    with open(mlipar_c, 'w') as f:
        f.write(file)

    ### EQA.dem_par
    with open(dempar, 'r') as f:
        file = f.read()
    file = re.sub(r'width:\s*{}'.format(width), 'width: {}'.format(width_c),
                  file)
    file = re.sub(r'nlines:\s*{}'.format(length),
                  'nlines: {}'.format(length_c), file)
    file = re.sub(r'corner_lat:\s*{}'.format(lat1),
                  'corner_lat: {}'.format(lat1_c), file)
    file = re.sub(r'corner_lon:\s*{}'.format(lon1),
                  'corner_lon: {}'.format(lon1_c), file)
    with open(dempar_c, 'w') as f:
        f.write(file)

    #%% Clip or copy other files than unw and cc
    files = sorted(glob.glob(os.path.join(in_dir, '*')))
    for file in files:
        if os.path.isdir(file):
            continue  #not copy directory
        elif file == mlipar or file == dempar:
            continue  #not copy
        elif os.path.getsize(file) == width * length * 4:  ##float file
            print('Clip {}'.format(os.path.basename(file)), flush=True)
            data = io_lib.read_img(file, length, width)
            data = data[y1:y2, x1:x2]
            filename = os.path.basename(file)
            outfile = os.path.join(out_dir, filename)
            data.tofile(outfile)
        elif file == os.path.join(in_dir, 'slc.mli.png'):
            print('Recreate slc.mli.png', flush=True)
            mli = io_lib.read_img(os.path.join(out_dir, 'slc.mli'), length_c,
                                  width_c)
            pngfile = os.path.join(out_dir, 'slc.mli.png')
            plot_lib.make_im_png(mli, pngfile, 'gray', 'MLI', cbar=False)
        elif file == os.path.join(in_dir, 'hgt.png'):
            print('Recreate hgt.png', flush=True)
            hgt = io_lib.read_img(os.path.join(out_dir, 'hgt'), length_c,
                                  width_c)
            vmax = np.nanpercentile(hgt, 99)
            vmin = -vmax / 3  ## bnecause 1/4 of terrain is blue
            pngfile = os.path.join(out_dir, 'hgt.png')
            plot_lib.make_im_png(hgt,
                                 pngfile,
                                 'terrain',
                                 'DEM (m)',
                                 vmin,
                                 vmax,
                                 cbar=True)
        else:
            print('Copy {}'.format(os.path.basename(file)), flush=True)
            shutil.copy(file, out_dir)

    #%% Clip unw and cc
    print('\nClip unw and cc', flush=True)
    ### First, check if already exist
    ifgdates2 = []
    for ifgix, ifgd in enumerate(ifgdates):
        out_dir1 = os.path.join(out_dir, ifgd)
        unwfile_c = os.path.join(out_dir1, ifgd + '.unw')
        ccfile_c = os.path.join(out_dir1, ifgd + '.cc')
        if not (os.path.exists(unwfile_c) and os.path.exists(ccfile_c)):
            ifgdates2.append(ifgd)

    n_ifg2 = len(ifgdates2)
    if n_ifg - n_ifg2 > 0:
        print("  {0:3}/{1:3} clipped unw and cc already exist. Skip".format(
            n_ifg - n_ifg2, n_ifg),
              flush=True)

    if n_ifg2 > 0:
        ### Clip with parallel processing
        if n_para > n_ifg2:
            n_para = n_ifg2

        print('  {} parallel processing...'.format(n_para), flush=True)
        p = q.Pool(n_para)
        p.map(clip_wrapper, range(n_ifg2))
        p.close()

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(out_dir)))
Esempio n. 21
0
    try:
        geocod_flag = True
        lat1 = float(cumh5['corner_lat'][()])
        lon1 = float(cumh5['corner_lon'][()])
        dlat = float(cumh5['post_lat'][()])
        dlon = float(cumh5['post_lon'][()])
        aspect = np.abs(dlat/dlon/np.cos(np.deg2rad(lat1+dlat*length/2)))
    except:
        geocod_flag = False
        aspect = 1
        print('No latlon field found in {}. Skip.'.format(cumfile))
            
    ### Set initial ref area
    if refarea:
        if not tools_lib.read_range(refarea, width, length):
            print('\nERROR in {}\n'.format(refarea), file=sys.stderr)
            sys.exit(2)
        else:
            refx1, refx2, refy1, refy2 = tools_lib.read_range(refarea, width, length)
    elif refarea_geo and geocod_flag:
        if not tools_lib.read_range_geo(refarea_geo, width, length, lat1, dlat, lon1, dlon):
            print('\nERROR in {}\n'.format(refarea_geo), file=sys.stderr)
            sys.exit(2)
        else:
            refx1, refx2, refy1, refy2 = tools_lib.read_range_geo(refarea_geo, width, length, lat1, dlat, lon1, dlon)
    else:
        refarea = cumh5['refarea'][()]
        refx1, refx2, refy1, refy2 = [int(s) for s in re.split('[:/]', refarea)]

Esempio n. 22
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = "1.3.2"
    date = 20201116
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    #%% Set default
    ifgdir = []
    tsadir = []
    coh_thre = 0.05
    unw_cov_thre = 0.3

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hd:t:c:u:", ["help"])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-d':
                ifgdir = a
            elif o == '-t':
                tsadir = a
            elif o == '-c':
                coh_thre = float(a)
            elif o == '-u':
                unw_cov_thre = float(a)

        if not ifgdir:
            raise Usage('No data directory given, -d is not optional!')
        elif not os.path.isdir(ifgdir):
            raise Usage('No {} dir exists!'.format(ifgdir))
        elif not os.path.exists(os.path.join(ifgdir, 'slc.mli.par')):
            raise Usage('No slc.mli.par file exists in {}!'.format(ifgdir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    print("\ncoh_thre     : {}".format(coh_thre), flush=True)
    print("unw_cov_thre : {}".format(unw_cov_thre), flush=True)

    #%% Directory setting
    ifgdir = os.path.abspath(ifgdir)

    if not tsadir:
        tsadir = os.path.join(os.path.dirname(ifgdir),
                              'TS_' + os.path.basename(ifgdir))

    if not os.path.exists(tsadir): os.mkdir(tsadir)

    ifg_rasdir = os.path.join(tsadir, '11ifg_ras')
    bad_ifg_rasdir = os.path.join(tsadir, '11bad_ifg_ras')

    if os.path.exists(ifg_rasdir): shutil.rmtree(ifg_rasdir)
    if os.path.exists(bad_ifg_rasdir): shutil.rmtree(bad_ifg_rasdir)
    os.mkdir(ifg_rasdir)
    os.mkdir(bad_ifg_rasdir)

    netdir = os.path.join(tsadir, 'network')
    if not os.path.exists(netdir): os.mkdir(netdir)

    infodir = os.path.join(tsadir, 'info')
    if not os.path.exists(infodir): os.mkdir(infodir)

    resultsdir = os.path.join(tsadir, 'results')
    if not os.path.exists(resultsdir): os.mkdir(resultsdir)

    #%% Read date, network information and size
    ### Get dates
    ifgdates = tools_lib.get_ifgdates(ifgdir)
    imdates = tools_lib.ifgdates2imdates(ifgdates)

    n_ifg = len(ifgdates)
    n_im = len(imdates)

    ### Get size
    mlipar = os.path.join(ifgdir, 'slc.mli.par')
    width = int(io_lib.get_param_par(mlipar, 'range_samples'))
    length = int(io_lib.get_param_par(mlipar, 'azimuth_lines'))
    print("\nSize         : {} x {}".format(width, length), flush=True)

    ### Copy dempar and mli[png|par]
    for file in ['slc.mli.par', 'EQA.dem_par']:
        if os.path.exists(os.path.join(ifgdir, file)):
            shutil.copy(os.path.join(ifgdir, file), infodir)

    for file in ['slc.mli', 'slc.mli.png', 'hgt', 'hgt.png']:
        if os.path.exists(os.path.join(ifgdir, file)):
            shutil.copy(os.path.join(ifgdir, file), resultsdir)

    #%% Read data
    ### Allocate memory
    n_unw = np.zeros((length, width), dtype=np.float32)
    coh_avg_ifg = []
    n_unw_ifg = []

    ### Read data and calculate
    print('\nReading unw and cc data...', flush=True)
    ## First, identify valid area (n_unw>im)
    for ifgix, ifgd in enumerate(ifgdates):
        if np.mod(ifgix, 100) == 0:
            print("  {0:3}/{1:3}th unw to identify valid area...".format(
                ifgix, n_ifg),
                  flush=True)
        unwfile = os.path.join(ifgdir, ifgd, ifgd + '.unw')
        unw = io_lib.read_img(unwfile, length, width)

        unw[unw == 0] = np.nan  # Fill 0 with nan
        n_unw += ~np.isnan(unw)  # Summing number of unnan unw

    ## Identify valid area and calc rate_cov
    bool_valid = (n_unw >= n_im)
    n_unw_valid = bool_valid.sum()

    ## Read cc and unw data
    for ifgix, ifgd in enumerate(ifgdates):
        if np.mod(ifgix, 100) == 0:
            print("  {0:3}/{1:3}th cc and unw...".format(ifgix, n_ifg),
                  flush=True)
        ## unw
        unwfile = os.path.join(ifgdir, ifgd, ifgd + '.unw')
        unw = io_lib.read_img(unwfile, length, width)

        unw[unw == 0] = np.nan  # Fill 0 with nan
        unw[~bool_valid] = np.nan  # Fill sea area with nan
        n_unw_ifg.append((~np.isnan(unw)).sum())

        ## cc
        ccfile = os.path.join(ifgdir, ifgd, ifgd + '.cc')
        if os.path.getsize(ccfile) == length * width:
            coh = io_lib.read_img(ccfile, length, width, np.uint8)
            coh = coh.astype(np.float32) / 255
            coh[coh == 0] = np.nan
        else:
            coh = io_lib.read_img(ccfile, length, width)

        coh_avg_ifg.append(np.nanmean(coh[bool_valid]))  # Use valid area only

    rate_cov = np.array(n_unw_ifg) / n_unw_valid

    ## Read bperp data or dummy
    bperp_file = os.path.join(ifgdir, 'baselines')
    if os.path.exists(bperp_file):
        bperp = io_lib.read_bperp_file(bperp_file, imdates)
    else:  #dummy
        bperp = np.random.random(n_im).tolist()

    #%% Identify bad ifgs, link ras and output stats information
    bad_ifgdates = []
    ixs_bad_ifgdates = []

    ### Header of stats file
    ifg_statsfile = os.path.join(infodir, '11ifg_stats.txt')
    fstats = open(ifg_statsfile, 'w')
    print('# Size: {0}({1}x{2}), n_valid: {3}'.format(width * length, width,
                                                      length, n_unw_valid),
          file=fstats)
    print('# unw_cov_thre: {0}, coh_thre: {1}'.format(unw_cov_thre, coh_thre),
          file=fstats)
    print('# ifg dates         bperp   dt unw_cov  coh_av', file=fstats)

    ### Identify suffix of raster image (png, ras or bmp?)
    unwfile = os.path.join(ifgdir, ifgdates[0], ifgdates[0] + '.unw')
    if os.path.exists(unwfile + '.ras'):
        suffix = '.ras'
    elif os.path.exists(unwfile + '.bmp'):
        suffix = '.bmp'
    elif os.path.exists(unwfile + '.png'):
        suffix = '.png'
    else:
        print('\nERROR: No browse image available for {}!\n'.format(unwfile),
              file=sys.stderr)
        return 2

    for i, ifgd in enumerate(ifgdates):
        rasname = ifgdates[i] + '.unw' + suffix
        rasorg = os.path.join(ifgdir, ifgdates[i], rasname)

        if not os.path.exists(rasorg):
            print('\nERROR: No browse image {} available!\n'.format(rasorg),
                  file=sys.stderr)
            return 2

        ### Identify bad ifgs and link ras
        if rate_cov[i] < unw_cov_thre or coh_avg_ifg[i] < coh_thre:
            bad_ifgdates.append(ifgdates[i])
            ixs_bad_ifgdates.append(i)
            rm_flag = '*'
            os.symlink(os.path.relpath(rasorg, bad_ifg_rasdir),
                       os.path.join(bad_ifg_rasdir, rasname))
        else:
            os.symlink(os.path.relpath(rasorg, ifg_rasdir),
                       os.path.join(ifg_rasdir, rasname))
            rm_flag = ''

        ### For stats file
        ix_primary = imdates.index(ifgd[:8])
        ix_secondary = imdates.index(ifgd[-8:])
        bperp_ifg = bperp[ix_secondary] - bperp[ix_primary]
        mday = dt.datetime.strptime(ifgd[:8], '%Y%m%d').toordinal()
        sday = dt.datetime.strptime(ifgd[-8:], '%Y%m%d').toordinal()
        dt_ifg = sday - mday

        print('{0}  {1:6.1f}  {2:3}   {3:5.3f}   {4:5.3f} {5}'.format(
            ifgd, bperp_ifg, dt_ifg, rate_cov[i], coh_avg_ifg[i], rm_flag),
              file=fstats)

    fstats.close()

    ### Output list of bad ifg
    print('\n{0}/{1} ifgs are discarded from further processing.'.format(
        len(bad_ifgdates), n_ifg))
    print('ifg dates        unw_cov coh_av')
    bad_ifgfile = os.path.join(infodir, '11bad_ifg.txt')
    with open(bad_ifgfile, 'w') as f:
        for i, ifgd in enumerate(bad_ifgdates):
            print('{}'.format(ifgd), file=f)
            print('{}  {:5.3f}  {:5.3f}'.format(
                ifgd, rate_cov[ixs_bad_ifgdates[i]],
                coh_avg_ifg[ixs_bad_ifgdates[i]]),
                  flush=True)

    #%% Identify removed image and output file
    good_ifgdates = list(set(ifgdates) - set(bad_ifgdates))
    good_ifgdates.sort()
    good_imdates = tools_lib.ifgdates2imdates(good_ifgdates)
    bad_imdates = list(set(imdates) - set(good_imdates))
    bad_imdates.sort()

    ### Output list of removed image
    bad_imfile = os.path.join(infodir, '11removed_image.txt')
    with open(bad_imfile, 'w') as f:
        for i in bad_imdates:
            print('{}'.format(i), file=f)

    #%% Plot network
    pngfile = os.path.join(netdir, 'network11_all.png')
    plot_lib.plot_network(ifgdates, bperp, [], pngfile)

    pngfile = os.path.join(netdir, 'network11.png')
    plot_lib.plot_network(ifgdates, bperp, bad_ifgdates, pngfile)

    pngfile = os.path.join(netdir, 'network11_nobad.png')
    plot_lib.plot_network(ifgdates,
                          bperp,
                          bad_ifgdates,
                          pngfile,
                          plot_bad=False)

    #%% Finish
    print('\nCheck network/*, 11bad_ifg_ras/* and 11ifg_ras/* in TS dir.')
    print(
        'If you want to change the bad ifgs to be discarded, re-run with different thresholds or edit bad_ifg11.txt before next step.'
    )

    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(tsadir)))
Esempio n. 23
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = 1.6
    date = 20200911
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    #%% Set default
    frameID = []
    startdate = 20141001
    enddate = int(dt.date.today().strftime("%Y%m%d"))
    get_gacos = False
    n_para = 4

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hf:s:e:",
                                       ["help", "get_gacos", "n_para="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-f':
                frameID = a
            elif o == '-s':
                startdate = int(a)
            elif o == '-e':
                enddate = int(a)
            elif o == '--get_gacos':
                get_gacos = True
            elif o == '--n_para':
                n_para = int(a)

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Determine frameID
    wd = os.getcwd()
    if not frameID:  ## if frameID not indicated
        _tmp = re.findall(r'\d{3}[AD]_\d{5}_\d{6}', wd)
        ##e.g., 021D_04972_131213
        if len(_tmp) == 0:
            print('\nFrame ID cannot be identified from dir name!',
                  file=sys.stderr)
            print('Use -f option', file=sys.stderr)
            return
        else:
            frameID = _tmp[0]
            print('\nFrame ID is {}\n'.format(frameID), flush=True)
    else:
        print('\nFrame ID is {}\n'.format(frameID), flush=True)

    trackID = str(int(frameID[0:3]))

    #%% Directory and file setting
    outdir = os.path.join(wd, 'GEOC')
    if not os.path.exists(outdir): os.mkdir(outdir)
    os.chdir(outdir)

    LiCSARweb = 'http://gws-access.ceda.ac.uk/public/nceo_geohazards/LiCSAR_products/'

    #%% ENU and hgt
    for ENU in ['E', 'N', 'U', 'hgt']:
        enutif = '{}.geo.{}.tif'.format(frameID, ENU)
        url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', enutif)
        if os.path.exists(enutif):
            rc = tools_lib.comp_size_time(url, enutif)
            if rc == 0:
                print('{} already exist. Skip download.'.format(enutif),
                      flush=True)
                continue
            elif rc == 3:
                print('{} not available. Skip download.'.format(enutif),
                      flush=True)
                continue
            else:
                if rc == 1:
                    print("Size of {} is not identical.".format(enutif))
                elif rc == 2:
                    print("Newer {} available.".format(enutif))

        print('Download {}'.format(enutif), flush=True)
        tools_lib.download_data(url, enutif)

    #%% baselines and metadata.txt
    print('Download baselines', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', 'baselines')
    tools_lib.download_data(url, 'baselines')

    print('Download metadata.txt', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', 'metadata.txt')
    tools_lib.download_data(url, 'metadata.txt')

    #%% mli
    mlitif = frameID + '.geo.mli.tif'
    if os.path.exists(mlitif):
        print('{} already exist. Skip.'.format(mlitif), flush=True)
    else:
        ### Get available dates
        print('Searching earliest epoch for mli...', flush=True)
        url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
        response = requests.get(url)

        response.encoding = response.apparent_encoding  #avoid garble
        html_doc = response.text
        soup = BeautifulSoup(html_doc, "html.parser")
        tags = soup.find_all(href=re.compile(r"\d{8}"))
        imdates_all = [tag.get("href")[0:8] for tag in tags]
        _imdates = np.int32(np.array(imdates_all))
        _imdates = (_imdates[(_imdates >= startdate) *
                             (_imdates <= enddate)]).astype('str').tolist()

        ## Find earliest date in which mli is available
        imd1 = []
        for i, imd in enumerate(_imdates):
            if np.mod(i, 10) == 0:
                print("\r  {0:3}/{1:3}".format(i, len(_imdates)),
                      end='',
                      flush=True)
            url_epoch = os.path.join(url, imd)
            response = requests.get(url_epoch)
            response.encoding = response.apparent_encoding  #avoid garble
            html_doc = response.text
            soup = BeautifulSoup(html_doc, "html.parser")
            tag = soup.find(href=re.compile(r"\d{8}.geo.mli.tif"))
            if tag is not None:
                print('\n{} found as earliest.'.format(imd))
                imd1 = imd
                break

        ### Download
        if imd1:
            print('Donwnloading {}.geo.mli.tif as {}.geo.mli.tif...'.format(
                imd1, frameID),
                  flush=True)
            url_mli = os.path.join(url, imd1, imd1 + '.geo.mli.tif')
            tools_lib.download_data(url_mli, mlitif)
        else:
            print('\nNo mli available on {}'.format(url),
                  file=sys.stderr,
                  flush=True)

    #%% GACOS if specified
    if get_gacos:
        gacosdir = os.path.join(wd, 'GACOS')
        if not os.path.exists(gacosdir): os.mkdir(gacosdir)

        ### Get available dates
        print('\nDownload GACOS data', flush=True)
        url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
        response = requests.get(url)
        response.encoding = response.apparent_encoding  #avoid garble
        html_doc = response.text
        soup = BeautifulSoup(html_doc, "html.parser")
        tags = soup.find_all(href=re.compile(r"\d{8}"))
        imdates_all = [tag.get("href")[0:8] for tag in tags]
        _imdates = np.int32(np.array(imdates_all))
        _imdates = (_imdates[(_imdates >= startdate) *
                             (_imdates <= enddate)]).astype('str').tolist()
        print('  There are {} epochs from {} to {}'.format(
            len(_imdates), startdate, enddate),
              flush=True)

        ### Extract available dates
        print('  Searching available epochs ({} parallel)...'.format(n_para),
              flush=True)

        args = [(i, len(_imdates),
                 os.path.join(url, imd, '{}.sltd.geo.tif'.format(imd)),
                 os.path.join(gacosdir, imd + '.sltd.geo.tif'))
                for i, imd in enumerate(_imdates)]

        p = multi.Pool(n_para)
        rc = p.map(check_gacos_wrapper, args)
        p.close()

        n_im_existing = 0
        n_im_unavailable = 0
        imdates_dl = []
        for i, rc1 in enumerate(rc):
            if rc1 == 0:  ## No need to download
                n_im_existing = n_im_existing + 1
            if rc1 == 3 or rc1 == 5:  ## Can not download
                n_im_unavailable = n_im_unavailable + 1
            elif rc1 == 1 or rc1 == 2 or rc1 == 4:  ## Need download
                imdates_dl.append(_imdates[i])

        n_im_dl = len(imdates_dl)

        if n_im_existing > 0:
            print('  {} GACOS data already downloaded'.format(n_im_existing),
                  flush=True)
        if n_im_unavailable > 0:
            print('  {} GACOS data unavailable'.format(n_im_unavailable),
                  flush=True)

        ### Download
        if n_im_dl > 0:
            print('{} GACOS data will be downloaded'.format(n_im_dl),
                  flush=True)
            print('Download GACOS ({} parallel)...'.format(n_para), flush=True)
            ### Download
            args = [(i, imd, n_im_dl,
                     os.path.join(url, imd, '{}.sltd.geo.tif'.format(imd)),
                     os.path.join(gacosdir, '{}.sltd.geo.tif'.format(imd)))
                    for i, imd in enumerate(imdates_dl)]

            p = multi.Pool(n_para)
            p.map(download_wrapper, args)
            p.close()
        else:
            print('No GACOS data available from {} to {}'.format(
                startdate, enddate),
                  flush=True)

    #%% unw and cc
    ### Get available dates
    print('\nDownload geotiff of unw and cc', flush=True)
    url_ifgdir = os.path.join(LiCSARweb, trackID, frameID, 'interferograms')
    response = requests.get(url_ifgdir)

    response.encoding = response.apparent_encoding  #avoid garble
    html_doc = response.text
    soup = BeautifulSoup(html_doc, "html.parser")
    tags = soup.find_all(href=re.compile(r"\d{8}_\d{8}"))
    ifgdates_all = [tag.get("href")[0:17] for tag in tags]

    ### Extract during start_date to end_date
    ifgdates = []
    for ifgd in ifgdates_all:
        mimd = int(ifgd[:8])
        simd = int(ifgd[-8:])
        if mimd >= startdate and simd <= enddate:
            ifgdates.append(ifgd)

    n_ifg = len(ifgdates)
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    print('{} IFGs available from {} to {}'.format(n_ifg, imdates[0],
                                                   imdates[-1]),
          flush=True)

    ### Check if both unw and cc already donwloaded, new, and same size
    print(
        'Checking existing unw and cc ({} parallel, may take time)...'.format(
            n_para),
        flush=True)

    ## unw
    args = [(i, n_ifg,
             os.path.join(url_ifgdir, ifgd, '{}.geo.unw.tif'.format(ifgd)),
             os.path.join(ifgd, '{}.geo.unw.tif'.format(ifgd)))
            for i, ifgd in enumerate(ifgdates)]

    p = multi.Pool(n_para)
    rc = p.map(check_exist_wrapper, args)
    p.close()

    n_unw_existing = 0
    unwdates_dl = []
    for i, rc1 in enumerate(rc):
        if rc1 == 0:  ## No need to download
            n_unw_existing = n_unw_existing + 1
        if rc1 == 3 or rc1 == 5:  ## Can not download
            print('  {}.geo.unw.tif not available.'.format(ifgdates[i]),
                  flush=True)
        elif rc1 == 1 or rc1 == 2 or rc1 == 4:  ## Need download
            unwdates_dl.append(ifgdates[i])

    ## cc
    args = [(i, n_ifg,
             os.path.join(url_ifgdir, ifgd, '{}.geo.cc.tif'.format(ifgd)),
             os.path.join(ifgd, '{}.geo.cc.tif'.format(ifgd)))
            for i, ifgd in enumerate(ifgdates)]

    p = multi.Pool(n_para)
    rc = p.map(check_exist_wrapper, args)
    p.close()

    n_cc_existing = 0
    ccdates_dl = []
    for i, rc1 in enumerate(rc):
        if rc1 == 0:  ## No need to download
            n_cc_existing = n_cc_existing + 1
        if rc1 == 3 or rc1 == 5:  ## Can not download
            print('  {}.geo.cc.tif not available.'.format(ifgdates[i]),
                  flush=True)
        elif rc1 == 1 or rc1 == 2 or rc1 == 4:  ## Need download
            ccdates_dl.append(ifgdates[i])

    n_unw_dl = len(unwdates_dl)
    n_cc_dl = len(ccdates_dl)
    print('{} unw already downloaded'.format(n_unw_existing), flush=True)
    print('{} unw will be downloaded'.format(n_unw_dl), flush=True)
    print('{} cc already downloaded'.format(n_cc_existing), flush=True)
    print('{} cc will be downloaded'.format(n_cc_dl), flush=True)

    ### Download unw with parallel
    if n_unw_dl != 0:
        print('Download unw ({} parallel)...'.format(n_para), flush=True)
        args = [(i, ifgd, n_unw_dl,
                 os.path.join(url_ifgdir, ifgd, '{}.geo.unw.tif'.format(ifgd)),
                 os.path.join(ifgd, '{}.geo.unw.tif'.format(ifgd)))
                for i, ifgd in enumerate(unwdates_dl)]

        p = multi.Pool(n_para)
        p.map(download_wrapper, args)
        p.close()

    ### Download cc with parallel
    if n_cc_dl != 0:
        print('Download cc ({} parallel)...'.format(n_para), flush=True)
        args = [(i, ifgd, n_cc_dl,
                 os.path.join(url_ifgdir, ifgd, '{}.geo.cc.tif'.format(ifgd)),
                 os.path.join(ifgd, '{}.geo.cc.tif'.format(ifgd)))
                for i, ifgd in enumerate(ccdates_dl)]

        p = multi.Pool(n_para)
        p.map(download_wrapper, args)
        p.close()

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(outdir))
Esempio n. 24
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = "1.3.5"
    date = 20210105
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    ### For paralell processing
    global ifgdates2, in_dir, out_dir, length, width, bool_mask, cycle, cmap_wrap

    #%% Set default
    in_dir = []
    out_dir = []
    coh_thre = []
    ex_range_str = []
    ex_range_file = []
    try:
        n_para = len(os.sched_getaffinity(0))
    except:
        n_para = multi.cpu_count()

    cmap_noise = 'viridis'
    cmap_wrap = SCM.romaO
    q = multi.get_context('fork')

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hi:o:c:r:f:",
                                       ["help", "n_para="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-i':
                in_dir = a
            elif o == '-o':
                out_dir = a
            elif o == '-c':
                coh_thre = float(a)
            elif o == '-r':
                ex_range_str = a
            elif o == '-f':
                ex_range_file = a
            elif o == '--n_para':
                n_para = int(a)

        if not in_dir:
            raise Usage('No input directory given, -i is not optional!')
        if not out_dir:
            raise Usage('No output directory given, -o is not optional!')
        if not coh_thre and not ex_range_str and not ex_range_file:
            raise Usage('Neither -r nor -f option is given!')
        elif not os.path.isdir(in_dir):
            raise Usage('No {} dir exists!'.format(in_dir))
        elif not os.path.exists(os.path.join(in_dir, 'slc.mli.par')):
            raise Usage('No slc.mli.par file exists in {}!'.format(in_dir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Read info and make dir
    in_dir = os.path.abspath(in_dir)
    out_dir = os.path.abspath(out_dir)

    ifgdates = tools_lib.get_ifgdates(in_dir)
    n_ifg = len(ifgdates)

    mlipar = os.path.join(in_dir, 'slc.mli.par')
    width = int(io_lib.get_param_par(mlipar, 'range_samples'))
    length = int(io_lib.get_param_par(mlipar, 'azimuth_lines'))

    speed_of_light = 299792458  #m/s
    radar_frequency = float(io_lib.get_param_par(mlipar,
                                                 'radar_frequency'))  #Hz
    wavelength = speed_of_light / radar_frequency  #meter
    if wavelength > 0.2:  ## L-band
        cycle = 1.5  # 2pi/cycle for png
    else:  ## C-band
        cycle = 3  # 2pi*3/cycle for png

    if not os.path.exists(out_dir):
        os.mkdir(out_dir)

    bool_mask = np.zeros((length, width), dtype=np.bool)

    #%% Check and set pixels to be masked based on coherence
    if coh_thre:
        ### Calc coh_avg
        print("\nCalculate coh_avg and define mask (<={})".format(coh_thre),
              flush=True)
        coh_avg = np.zeros((length, width), dtype=np.float32)
        n_coh = np.zeros((length, width), dtype=np.int16)
        for ifgix, ifgd in enumerate(ifgdates):
            ccfile = os.path.join(in_dir, ifgd, ifgd + '.cc')
            if os.path.getsize(ccfile) == length * width:
                coh = io_lib.read_img(ccfile, length, width, np.uint8)
                coh = coh.astype(np.float32) / 255
            else:
                coh = io_lib.read_img(ccfile, length, width)
                coh[np.isnan(coh)] = 0  # Fill nan with 0

            coh_avg += coh
            n_coh += (coh != 0)

        n_coh[n_coh == 0] = 99999  #to avoid zero division
        coh_avg = coh_avg / n_coh

        ### Set mask
        bool_mask[coh_avg <= coh_thre] = True

        ### Save image
        coh_avgfile = os.path.join(out_dir, 'coh_avg')
        coh_avg.tofile(coh_avgfile)
        title = 'Average coherence'
        plot_lib.make_im_png(coh_avg, coh_avgfile + '.png', cmap_noise, title)

    #%% Check and set range to be masked based on specified area
    ### Read -r option
    if ex_range_str:
        if not tools_lib.read_range(ex_range_str, width, length):
            print('ERROR in {}\n'.format(ex_range_str))
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range(ex_range_str, width, length)
            bool_mask[y1:y2, x1:x2] = True

    ### Read -f option
    if ex_range_file:
        with open(ex_range_file) as f:
            ex_range_str_all = f.readlines()

        for ex_range_str1 in ex_range_str_all:
            if not tools_lib.read_range(ex_range_str1, width, length):
                print('ERROR in {}\n'.format(ex_range_str1))
                return 1
            else:
                x1, x2, y1, y2 = tools_lib.read_range(ex_range_str1, width,
                                                      length)
                bool_mask[y1:y2, x1:x2] = True

    ### Save image of mask
    mask = np.float32(~bool_mask)
    maskfile = os.path.join(out_dir, 'mask')
    mask.tofile(maskfile)

    pngfile = maskfile + '.png'
    title = 'Mask'
    plot_lib.make_im_png(mask, pngfile, cmap_noise, title, 0, 1)

    print('\nMask defined.')

    #%% Mask unw
    print('\nMask unw and link cc', flush=True)
    ### First, check if already exist
    ifgdates2 = []
    for ifgix, ifgd in enumerate(ifgdates):
        out_dir1 = os.path.join(out_dir, ifgd)
        unwfile_m = os.path.join(out_dir1, ifgd + '.unw')
        ccfile_m = os.path.join(out_dir1, ifgd + '.cc')
        if not (os.path.exists(unwfile_m) and os.path.exists(ccfile_m)):
            ifgdates2.append(ifgd)

    n_ifg2 = len(ifgdates2)
    if n_ifg - n_ifg2 > 0:
        print("  {0:3}/{1:3} masked unw and cc already exist. Skip".format(
            n_ifg - n_ifg2, n_ifg),
              flush=True)

    if n_ifg2 > 0:
        ### Mask with parallel processing
        if n_para > n_ifg2:
            n_para = n_ifg2

        print('  {} parallel processing...'.format(n_para), flush=True)
        p = q.Pool(n_para)
        p.map(mask_wrapper, range(n_ifg2))
        p.close()

    print("", flush=True)

    #%% Copy other files
    files = glob.glob(os.path.join(in_dir, '*'))
    for file in files:
        if not os.path.isdir(file):  #not copy directory, only file
            print('Copy {}'.format(os.path.basename(file)), flush=True)
            shutil.copy(file, out_dir)

    print('\nMasked area can be check in:')
    print('{}'.format(os.path.relpath(maskfile + '.png')), flush=True)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(out_dir)))
Esempio n. 25
0
            raise Usage('No {} exists!'.format(infile))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        sys.exit(2)

    #%% Set cmap if SCM
    if cmap_name.startswith('SCM'):
        if cmap_name.endswith('_r'):
            exec("cmap = {}.reversed()".format(cmap_name[:-2]))
        else:
            exec("cmap = {}".format(cmap_name))
    elif cmap_name == 'insar':
        cdict = tools_lib.cmap_insar()
        plt.register_cmap(
            cmap=mpl.colors.LinearSegmentedColormap('insar', cdict))
        cmap = 'insar'
    else:
        cmap = cmap_name

    #%% Get info and Read data
    if gdal.IdentifyDriver(infile):  ## If Geotiff or grd
        geotiff = gdal.Open(infile)
        data = geotiff.ReadAsArray()
        if data.ndim > 2:
            print('\nERROR: {} has multiple bands and cannot be displayed.\n'.
                  format(infile),
                  file=sys.stderr)
            sys.exit(2)
Esempio n. 26
0
    def printcoords(event):
        global dph, lines1, lines2, lastevent
        #outputting x and y coords to console
        if event.inaxes != axv:
            return
        elif event.button != 1: ## Only left click
            return
        elif not event.dblclick: ## Only double click
            return
        else:
            lastevent = event  ## Update last event
            
        ii = np.int(np.round(event.ydata))
        jj = np.int(np.round(event.xdata))

        ### Plot on image window
        ii1h = ii-0.5; ii2h = ii+1-0.5 ## Shift half for plot
        jj1h = jj-0.5; jj2h = jj+1-0.5
        pax.set_data([jj1h, jj2h, jj2h, jj1h, jj1h], [ii1h, ii1h, ii2h, ii2h, ii1h])
        pax2.set_data(jj, ii)
        pv.canvas.draw()

        axts.cla()
        axts.grid(zorder=0)
        axts.set_axisbelow(True)
        axts.set_xlabel('Time')
        axts.set_ylabel('Displacement (mm)')

        ### Get values of noise indices and incidence angle
        noisetxt = ''
        for key in mapdict_data:
            val = mapdict_data[key][ii, jj]
            unit = mapdict_unit[key]
            if key.startswith('vel'): ## Not plot here
                continue
            elif key.startswith('n_') or key=='mask':
                noisetxt = noisetxt+'{}: {:d} {}\n'.format(key, int(val), unit)
            else:
                noisetxt = noisetxt+'{}: {:.2f} {}\n'.format(key, val, unit)

        if LOSuflag:
            noisetxt = noisetxt+'Inc_agl: {:.1f} deg\n'.format(inc_agl_deg[ii, jj])
            noisetxt = noisetxt+'LOS u: {:.3f}\n'.format(LOSu[ii, jj])

        ### Get lat lon and show Ref info at side 
        if geocod_flag:
            lat, lon = tools_lib.xy2bl(jj, ii, lat1, dlat, lon1, dlon)
            axtref.set_text('Lat:{:.5f}\nLon:{:.5f}\n\nRef area:\n X {}:{}\n Y {}:{}\n (start from 0)\nRef date:\n {}\n\n{}'.format(lat, lon, refx1, refx2, refy1, refy2, imdates[ix_m], noisetxt))
        else: 
            axtref.set_text('Ref area:\n X {}:{}\n Y {}:{}\n (start from 0)\nRef date:\n {}\n\n{}'.format(refx1, refx2, refy1, refy2, imdates[ix_m], noisetxt))

        ### If masked
        if np.isnan(mask[ii, jj]):
            axts.set_title('NaN @({}, {})'.format(jj, ii), fontsize=10)
            pts.canvas.draw()
            return

        try: # Only support from Matplotlib 3.1!
            axts.xaxis.set_major_formatter(mdates.ConciseDateFormatter(loc_ts))
        except:
            axts.xaxis.set_major_formatter(mdates.DateFormatter('%Y/%m/%d'))
            for label in axts.get_xticklabels():
                label.set_rotation(20)
                label.set_horizontalalignment('right')


        ### If not masked
        ### cumfile
        vel1p = vel[ii, jj]-np.nanmean((vel*mask)[refy1:refy2, refx1:refx2])
        
        dcum_ref = cum_ref[ii, jj]-np.nanmean(cum_ref[refy1:refy2, refx1:refx2]*mask[refy1:refy2, refx1:refx2])
#        dcum_ref = 0
        dph = cum[:, ii, jj]-np.nanmean(cum[:, refy1:refy2, refx1:refx2]*mask[refy1:refy2, refx1:refx2], axis=(1, 2)) - dcum_ref

        ## fit function
        lines1 = [0, 0, 0, 0]
        xvalues = np.arange(imdates_ordinal[0], imdates_ordinal[-1], 10)
        for model, vis in enumerate(visibilities):
            yvalues = calc_model(dph, imdates_ordinal, xvalues, model)
            lines1[model], = axts.plot(xvalues, yvalues, 'b-', visible=vis, alpha=0.6, zorder=3)

        axts.scatter(imdates_dt, dph, label=label1, c='b', alpha=0.6, zorder=5)
        axts.set_title('vel = {:.1f} mm/yr @({}, {})'.format(vel1p, jj, ii), fontsize=10)

        ### cumfile2
        if cumfile2:
            vel2p = vel2[ii, jj]-np.nanmean((vel2*mask)[refy1:refy2, refx1:refx2])
            dcum2_ref = cum2_ref[ii, jj]-np.nanmean(cum2_ref[refy1:refy2, refx1:refx2]*mask[refy1:refy2, refx1:refx2])
            dphf = cum2[:, ii, jj]-np.nanmean(cum2[:, refy1:refy2, refx1:refx2]*mask[refy1:refy2, refx1:refx2], axis=(1, 2)) - dcum2_ref

            ## fit function
            lines2 = [0, 0, 0, 0]
            for model, vis in enumerate(visibilities):
                yvalues = calc_model(dphf, imdates_ordinal, xvalues, model)
                lines2[model], = axts.plot(xvalues, yvalues, 'r-', visible=vis, alpha=0.6, zorder=2)
                
            axts.scatter(imdates_dt, dphf, c='r', label=label2, alpha=0.6, zorder=4)
            axts.set_title('vel(1) = {:.1f} mm/yr, vel(2) = {:.1f} mm/yr @({}, {})'.format(vel1p, vel2p, jj, ii), fontsize=10)

        ## gap
        if gap:
            gap1p = (gap[:, ii, jj]==1) # n_im-1, bool
            if not np.all(~gap1p): ## Not plot if no gap
                gap_ordinal = (imdates_ordinal[1:][gap1p]+imdates_ordinal[0:-1][gap1p])/2
                axts.vlines(gap_ordinal, 0, 1, transform=axts.get_xaxis_transform(), zorder=1, label=label_gap, alpha=0.6, colors='k')
        
        ### Y axis
        if ylen:
            vlim = [np.nanmedian(dph)-ylen/2, np.nanmedian(dph)+ylen/2]
            axts.set_ylim(vlim)

        ### Legend
        axts.legend()

        pts.canvas.draw()
Esempio n. 27
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = 1.3
    date = 20200907
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    global Aloop, ifgdates, ifgdir, length, width, loop_pngdir  ## for parallel processing

    #%% Set default
    ifgdir = []
    tsadir = []
    loop_thre = 1.5
    n_para = len(os.sched_getaffinity(0))

    cmap_noise = 'viridis'
    cmap_noise_r = 'viridis_r'

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hd:t:l:",
                                       ["help", "n_para="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-d':
                ifgdir = a
            elif o == '-t':
                tsadir = a
            elif o == '-l':
                loop_thre = float(a)
            elif o == '--n_para':
                n_para = int(a)

        if not ifgdir:
            raise Usage('No data directory given, -d is not optional!')
        elif not os.path.isdir(ifgdir):
            raise Usage('No {} dir exists!'.format(ifgdir))
        elif not os.path.exists(os.path.join(ifgdir, 'slc.mli.par')):
            raise Usage('No slc.mli.par file exists in {}!'.format(ifgdir))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    print("\nloop_thre : {} rad".format(loop_thre), flush=True)

    #%% Directory setting
    ifgdir = os.path.abspath(ifgdir)

    if not tsadir:
        tsadir = os.path.join(os.path.dirname(ifgdir),
                              'TS_' + os.path.basename(ifgdir))

    if not os.path.isdir(tsadir):
        print('\nNo {} exists!'.format(tsadir), file=sys.stderr)
        return 1

    tsadir = os.path.abspath(tsadir)

    loopdir = os.path.join(tsadir, '12loop')
    if not os.path.exists(loopdir): os.mkdir(loopdir)

    loop_pngdir = os.path.join(loopdir, 'good_loop_png')
    bad_loop_pngdir = os.path.join(loopdir, 'bad_loop_png')
    bad_loop_cand_pngdir = os.path.join(loopdir, 'bad_loop_cand_png')

    if os.path.exists(loop_pngdir):
        shutil.move(loop_pngdir + '/', loop_pngdir + '_old')  #move to old dir
    if os.path.exists(bad_loop_pngdir):
        for png in glob.glob(bad_loop_pngdir + '/*.png'):
            shutil.move(png, loop_pngdir + '_old')  #move to old dir
        shutil.rmtree(bad_loop_pngdir)
    if os.path.exists(bad_loop_cand_pngdir):
        for png in glob.glob(bad_loop_cand_pngdir + '/*.png'):
            shutil.move(png, loop_pngdir + '_old')  #move to old dir
        shutil.rmtree(bad_loop_cand_pngdir)

    os.mkdir(loop_pngdir)
    os.mkdir(bad_loop_pngdir)
    os.mkdir(bad_loop_cand_pngdir)

    ifg_rasdir = os.path.join(tsadir, '12ifg_ras')
    if os.path.isdir(ifg_rasdir): shutil.rmtree(ifg_rasdir)
    os.mkdir(ifg_rasdir)

    bad_ifgrasdir = os.path.join(tsadir, '12bad_ifg_ras')
    if os.path.isdir(bad_ifgrasdir): shutil.rmtree(bad_ifgrasdir)
    os.mkdir(bad_ifgrasdir)

    bad_ifg_candrasdir = os.path.join(tsadir, '12bad_ifg_cand_ras')
    if os.path.isdir(bad_ifg_candrasdir): shutil.rmtree(bad_ifg_candrasdir)
    os.mkdir(bad_ifg_candrasdir)

    no_loop_ifgrasdir = os.path.join(tsadir, '12no_loop_ifg_ras')
    if os.path.isdir(no_loop_ifgrasdir): shutil.rmtree(no_loop_ifgrasdir)
    os.mkdir(no_loop_ifgrasdir)

    infodir = os.path.join(tsadir, 'info')
    if not os.path.exists(infodir): os.mkdir(infodir)

    resultsdir = os.path.join(tsadir, 'results')
    if not os.path.exists(resultsdir): os.mkdir(resultsdir)

    netdir = os.path.join(tsadir, 'network')

    #%% Read date, network information and size
    ### Get dates
    ifgdates = tools_lib.get_ifgdates(ifgdir)

    ### Read bad_ifg11
    bad_ifg11file = os.path.join(infodir, '11bad_ifg.txt')
    bad_ifg11 = io_lib.read_ifg_list(bad_ifg11file)

    ### Remove bad ifgs and images from list
    ifgdates = list(set(ifgdates) - set(bad_ifg11))
    ifgdates.sort()

    imdates = tools_lib.ifgdates2imdates(ifgdates)

    n_ifg = len(ifgdates)
    n_im = len(imdates)

    ### Get size
    mlipar = os.path.join(ifgdir, 'slc.mli.par')
    width = int(io_lib.get_param_par(mlipar, 'range_samples'))
    length = int(io_lib.get_param_par(mlipar, 'azimuth_lines'))

    ### Get loop matrix
    Aloop = loop_lib.make_loop_matrix(ifgdates)
    n_loop = Aloop.shape[0]

    ### Extract no loop ifgs
    ns_loop4ifg = np.abs(Aloop).sum(axis=0)
    ixs_ifg_no_loop = np.where(ns_loop4ifg == 0)[0]
    no_loop_ifg = [ifgdates[ix] for ix in ixs_ifg_no_loop]

    #%% 1st loop closure check. First without reference
    print('\n1st Loop closure check and make png for all possible {} loops,'.
          format(n_loop),
          flush=True)
    print('with {} parallel processing...'.format(n_para), flush=True)

    bad_ifg_cand = []
    good_ifg = []

    ### Parallel processing
    p = multi.Pool(n_para)
    loop_ph_rms_ifg = np.array(p.map(loop_closure_1st_wrapper, range(n_loop)),
                               dtype=object)
    p.close()

    for i in range(n_loop):
        ### Find index of ifg
        ix_ifg12, ix_ifg23 = np.where(Aloop[i, :] == 1)[0]
        ix_ifg13 = np.where(Aloop[i, :] == -1)[0][0]
        ifgd12 = ifgdates[ix_ifg12]
        ifgd23 = ifgdates[ix_ifg23]
        ifgd13 = ifgdates[ix_ifg13]

        ### List as good or bad candidate
        if loop_ph_rms_ifg[i] >= loop_thre:  #Bad loop including bad ifg.
            bad_ifg_cand.extend([ifgd12, ifgd23, ifgd13])
        else:
            good_ifg.extend([ifgd12, ifgd23, ifgd13])

    if os.path.exists(loop_pngdir + '_old/'):
        shutil.rmtree(loop_pngdir + '_old/')

    #%% Identify bad ifgs and output text
    bad_ifg = loop_lib.identify_bad_ifg(bad_ifg_cand, good_ifg)

    bad_ifgfile = os.path.join(loopdir, 'bad_ifg_loop.txt')
    with open(bad_ifgfile, 'w') as f:
        for i in bad_ifg:
            print('{}'.format(i), file=f)

    ### Compute n_unw without bad_ifg11 and bad_ifg
    n_unw = np.zeros((length, width), dtype=np.int16)
    for ifgd in ifgdates:
        if ifgd in bad_ifg:
            continue

        unwfile = os.path.join(ifgdir, ifgd, ifgd + '.unw')
        unw = io_lib.read_img(unwfile, length, width)

        unw[unw == 0] = np.nan  # Fill 0 with nan
        n_unw += ~np.isnan(unw)  # Summing number of unnan unw

    #%% 2nd loop closure check without bad ifgs to define stable ref area
    print('\n2nd Loop closure check without bad ifgs to define ref area...',
          flush=True)
    ns_loop_ph = np.zeros((length, width), dtype=np.float32)
    ns_bad_loop = np.zeros((length, width), dtype=np.int16)
    loop_ph_rms_points = np.zeros((length, width), dtype=np.float32)

    for i in range(n_loop):
        if np.mod(i, 100) == 0:
            print("  {0:3}/{1:3}th loop...".format(i, n_loop), flush=True)

        ### Read unw
        unw12, unw23, unw13, ifgd12, ifgd23, ifgd13 = loop_lib.read_unw_loop_ph(
            Aloop[i, :], ifgdates, ifgdir, length, width)

        ### Skip if bad ifg is included
        if ifgd12 in bad_ifg or ifgd23 in bad_ifg or ifgd13 in bad_ifg:
            continue

        ## Calculate loop phase and rms at points
        loop_ph = unw12 + unw23 - unw13
        loop_2pin = int(np.round(np.nanmedian(loop_ph) /
                                 (2 * np.pi))) * 2 * np.pi
        loop_ph = loop_ph - loop_2pin  #unbias
        ns_loop_ph = ns_loop_ph + ~np.isnan(loop_ph)

        loop_ph_sq = loop_ph**2
        loop_ph_sq[np.isnan(loop_ph_sq)] = 0
        loop_ph_rms_points = loop_ph_rms_points + loop_ph_sq

        ns_bad_loop = ns_bad_loop + (loop_ph_sq > np.pi**2
                                     )  #suspected unw error
#        ns_bad_loop = ns_bad_loop+(np.abs(loop_ph)>loop_thre)
## multiple nan seem to generate RuntimeWarning

    ns_loop_ph[ns_loop_ph == 0] = np.nan  # To avoid 0 division
    loop_ph_rms_points = np.sqrt(loop_ph_rms_points / ns_loop_ph)

    ### Find stable ref area which have all n_unw and minimum ns_bad_loop and loop_ph_rms_points
    mask1 = (n_unw == np.nanmax(n_unw))
    min_ns_bad_loop = np.nanmin(ns_bad_loop)
    while True:
        mask2 = (ns_bad_loop == min_ns_bad_loop)
        if np.all(~(mask1 * mask2)):  ## All masked
            min_ns_bad_loop = min_ns_bad_loop + 1  ## Make mask2 again
        else:
            break
    loop_ph_rms_points_masked = loop_ph_rms_points * mask1 * mask2
    loop_ph_rms_points_masked[loop_ph_rms_points_masked == 0] = np.nan
    refyx = np.where(
        loop_ph_rms_points_masked == np.nanmin(loop_ph_rms_points_masked))
    refy1 = refyx[0][0]  # start from 0, not 1
    refy2 = refyx[0][
        0] + 1  # shift +1 for python custom. start from 1 end with width
    refx1 = refyx[1][0]
    refx2 = refyx[1][0] + 1

    ### Save 12ref.txt
    reffile = os.path.join(infodir, '12ref.txt')
    with open(reffile, 'w') as f:
        print('{0}:{1}/{2}:{3}'.format(refx1, refx2, refy1, refy2), file=f)

    ### Save loop_ph_rms_masked and png
    loop_ph_rms_maskedfile = os.path.join(loopdir, 'loop_ph_rms_masked')
    loop_ph_rms_points_masked.tofile(loop_ph_rms_maskedfile)

    cmax = np.nanpercentile(loop_ph_rms_points_masked, 95)
    pngfile = loop_ph_rms_maskedfile + '.png'
    title = 'RMS of loop phase (rad)'
    plot_lib.make_im_png(loop_ph_rms_points_masked, pngfile, cmap_noise_r,
                         title, None, cmax)

    ### Check ref exist in unw. If not, list as noref_ifg
    noref_ifg = []
    for ifgd in ifgdates:
        if ifgd in bad_ifg:
            continue

        unwfile = os.path.join(ifgdir, ifgd, ifgd + '.unw')
        unw_ref = io_lib.read_img(unwfile, length, width)[refy1:refy2,
                                                          refx1:refx2]

        unw_ref[unw_ref == 0] = np.nan  # Fill 0 with nan
        if np.all(np.isnan(unw_ref)):
            noref_ifg.append(ifgd)

    bad_ifgfile = os.path.join(loopdir, 'bad_ifg_noref.txt')
    with open(bad_ifgfile, 'w') as f:
        for i in noref_ifg:
            print('{}'.format(i), file=f)

    #%% 3rd loop closure check without bad ifgs wrt ref point
    print('\n3rd loop closure check taking into account ref phase...',
          flush=True)
    bad_ifg_cand2 = []
    good_ifg2 = []
    loop_ph_rms_ifg2 = []

    for i in range(n_loop):
        if np.mod(i, 100) == 0:
            print("  {0:3}/{1:3}th loop...".format(i, n_loop), flush=True)

        ### Read unw
        unw12, unw23, unw13, ifgd12, ifgd23, ifgd13 = loop_lib.read_unw_loop_ph(
            Aloop[i, :], ifgdates, ifgdir, length, width)

        ### Skip if bad ifg is included
        if ifgd12 in bad_ifg or ifgd23 in bad_ifg or ifgd13 in bad_ifg:
            loop_ph_rms_ifg2.append('--')
            continue

        ### Skip if noref ifg is included
        if ifgd12 in noref_ifg or ifgd23 in noref_ifg or ifgd13 in noref_ifg:
            loop_ph_rms_ifg2.append('--')
            continue

        ## Skip if no data in ref area in any unw. It is bad data.
        ref_unw12 = np.nanmean(unw12[refy1:refy2, refx1:refx2])
        ref_unw23 = np.nanmean(unw23[refy1:refy2, refx1:refx2])
        ref_unw13 = np.nanmean(unw13[refy1:refy2, refx1:refx2])

        ## Calculate loop phase taking into account ref phase
        loop_ph = unw12 + unw23 - unw13 - (ref_unw12 + ref_unw23 - ref_unw13)
        loop_ph_rms_ifg2.append(np.sqrt(np.nanmean((loop_ph)**2)))

        ### List as good or bad candidate
        if loop_ph_rms_ifg2[i] >= loop_thre:  #Bad loop including bad ifg.
            bad_ifg_cand2.extend([ifgd12, ifgd23, ifgd13])
        else:
            good_ifg2.extend([ifgd12, ifgd23, ifgd13])

    #%% Identify additional bad ifgs and output text
    bad_ifg2 = loop_lib.identify_bad_ifg(bad_ifg_cand2, good_ifg2)

    bad_ifgfile = os.path.join(loopdir, 'bad_ifg_loopref.txt')
    with open(bad_ifgfile, 'w') as f:
        for i in bad_ifg2:
            print('{}'.format(i), file=f)

    #%% Output all bad ifg list and identify remaining candidate of bad ifgs
    ### Merge bad ifg, bad_ifg2, noref_ifg
    bad_ifg_all = list(set(bad_ifg + bad_ifg2 + noref_ifg))  # Remove multiple
    bad_ifg_all.sort()

    ifgdates_good = list(set(ifgdates) - set(bad_ifg_all))
    ifgdates_good.sort()

    bad_ifgfile = os.path.join(infodir, '12bad_ifg.txt')
    with open(bad_ifgfile, 'w') as f:
        for i in bad_ifg_all:
            print('{}'.format(i), file=f)

    ### Identify removed image and output file
    imdates_good = tools_lib.ifgdates2imdates(ifgdates_good)
    imdates_bad = list(set(imdates) - set(imdates_good))
    imdates_bad.sort()

    bad_imfile = os.path.join(infodir, '12removed_image.txt')
    with open(bad_imfile, 'w') as f:
        for i in imdates_bad:
            print('{}'.format(i), file=f)

    ### Remaining candidate of bad ifg
    bad_ifg_cand_res = list(set(bad_ifg_cand2) - set(bad_ifg_all))
    bad_ifg_cand_res.sort()

    bad_ifg_candfile = os.path.join(infodir, '12bad_ifg_cand.txt')
    with open(bad_ifg_candfile, 'w') as f:
        for i in bad_ifg_cand_res:
            print('{}'.format(i), file=f)

    #%% 4th loop to be used to calc n_loop_err and n_ifg_noloop
    print('\n4th loop to compute statistics...', flush=True)
    ns_loop_err = np.zeros((length, width), dtype=np.int16)

    for i in range(n_loop):
        if np.mod(i, 100) == 0:
            print("  {0:3}/{1:3}th loop...".format(i, n_loop), flush=True)

        ### Read unw
        unw12, unw23, unw13, ifgd12, ifgd23, ifgd13 = loop_lib.read_unw_loop_ph(
            Aloop[i, :], ifgdates, ifgdir, length, width)

        ### Skip if bad ifg is included
        if ifgd12 in bad_ifg_all or ifgd23 in bad_ifg_all or ifgd13 in bad_ifg_all:
            continue

        ## Compute ref
        ref_unw12 = np.nanmean(unw12[refy1:refy2, refx1:refx2])
        ref_unw23 = np.nanmean(unw23[refy1:refy2, refx1:refx2])
        ref_unw13 = np.nanmean(unw13[refy1:refy2, refx1:refx2])

        ## Calculate loop phase taking into account ref phase
        loop_ph = unw12 + unw23 - unw13 - (ref_unw12 + ref_unw23 - ref_unw13)

        ## Count number of loops with suspected unwrap error (>pi)
        loop_ph[np.isnan(loop_ph)] = 0  #to avoid warning
        ns_loop_err = ns_loop_err + (np.abs(loop_ph) > np.pi
                                     )  #suspected unw error

    #%% Output loop info, move bad_loop_png
    loop_info_file = os.path.join(loopdir, 'loop_info.txt')
    f = open(loop_info_file, 'w')
    print('# loop_thre: {} rad. *: Removed w/o ref, **: Removed w/ ref'.format(
        loop_thre),
          file=f)
    print('# /: Candidates of bad loops but causative ifgs unidentified',
          file=f)
    print('# image1   image2   image3 RMS w/oref  w/ref', file=f)

    for i in range(n_loop):
        ### Find index of ifg
        ix_ifg12, ix_ifg23 = np.where(Aloop[i, :] == 1)[0]
        ix_ifg13 = np.where(Aloop[i, :] == -1)[0][0]
        ifgd12 = ifgdates[ix_ifg12]
        ifgd23 = ifgdates[ix_ifg23]
        ifgd13 = ifgdates[ix_ifg13]
        imd1 = ifgd12[:8]
        imd2 = ifgd23[:8]
        imd3 = ifgd23[-8:]

        ## Move loop_png if bad ifg or bad ifg_cand is included
        looppngfile = os.path.join(
            loop_pngdir, '{0}_{1}_{2}_loop.png'.format(imd1, imd2, imd3))
        badlooppngfile = os.path.join(
            bad_loop_pngdir, '{0}_{1}_{2}_loop.png'.format(imd1, imd2, imd3))
        badloopcandpngfile = os.path.join(
            bad_loop_cand_pngdir,
            '{0}_{1}_{2}_loop.png'.format(imd1, imd2, imd3))

        badloopflag1 = ' '
        badloopflag2 = '  '
        if ifgd12 in bad_ifg or ifgd23 in bad_ifg or ifgd13 in bad_ifg:
            badloopflag1 = '*'
            shutil.move(looppngfile, badlooppngfile)
        elif ifgd12 in bad_ifg2 or ifgd23 in bad_ifg2 or ifgd13 in bad_ifg2:
            badloopflag2 = '**'
            shutil.move(looppngfile, badlooppngfile)
        elif ifgd12 in bad_ifg_cand_res or ifgd23 in bad_ifg_cand_res or ifgd13 in bad_ifg_cand_res:
            badloopflag1 = '/'
            if os.path.exists(looppngfile):
                shutil.move(looppngfile, badloopcandpngfile)

        if type(loop_ph_rms_ifg2[i]) == np.float32:
            str_loop_ph_rms_ifg2 = "{:.2f}".format(loop_ph_rms_ifg2[i])
        else:  ## --
            str_loop_ph_rms_ifg2 = loop_ph_rms_ifg2[i]

        print('{0} {1} {2}    {3:5.2f} {4}  {5:5s} {6}'.format(
            imd1, imd2, imd3, loop_ph_rms_ifg[i], badloopflag1,
            str_loop_ph_rms_ifg2, badloopflag2),
              file=f)

    f.close()

    #%% Saving coh_avg, n_unw, and n_loop_err only for good ifgs
    print('\nSaving coh_avg, n_unw, and n_loop_err...', flush=True)
    ### Calc coh avg and n_unw
    coh_avg = np.zeros((length, width), dtype=np.float32)
    n_coh = np.zeros((length, width), dtype=np.int16)
    n_unw = np.zeros((length, width), dtype=np.int16)
    for ifgd in ifgdates_good:
        ccfile = os.path.join(ifgdir, ifgd, ifgd + '.cc')
        if os.path.getsize(ccfile) == length * width:
            coh = io_lib.read_img(ccfile, length, width, np.uint8)
            coh = coh.astype(np.float32) / 255
        else:
            coh = io_lib.read_img(ccfile, length, width)
            coh[np.isnan(coh)] = 0  # Fill nan with 0

        coh_avg += coh
        n_coh += (coh != 0)

        unwfile = os.path.join(ifgdir, ifgd, ifgd + '.unw')
        unw = io_lib.read_img(unwfile, length, width)

        unw[unw == 0] = np.nan  # Fill 0 with nan
        n_unw += ~np.isnan(unw)  # Summing number of unnan unw

    coh_avg[n_coh == 0] = np.nan
    n_coh[n_coh == 0] = 1  #to avoid zero division
    coh_avg = coh_avg / n_coh

    ### Save files
    n_unwfile = os.path.join(resultsdir, 'n_unw')
    np.float32(n_unw).tofile(n_unwfile)

    coh_avgfile = os.path.join(resultsdir, 'coh_avg')
    coh_avg.tofile(coh_avgfile)

    n_loop_errfile = os.path.join(resultsdir, 'n_loop_err')
    np.float32(ns_loop_err).tofile(n_loop_errfile)

    ### Save png
    title = 'Average coherence'
    plot_lib.make_im_png(coh_avg, coh_avgfile + '.png', cmap_noise, title)
    title = 'Number of used unw data'
    plot_lib.make_im_png(n_unw, n_unwfile + '.png', cmap_noise, title, n_im)

    title = 'Number of unclosed loops'
    plot_lib.make_im_png(ns_loop_err, n_loop_errfile + '.png', cmap_noise_r,
                         title)

    #%% Link ras
    ### First, identify suffix of raster image (ras, bmp, or png?)
    unwfile = os.path.join(ifgdir, ifgdates[0], ifgdates[0] + '.unw')
    if os.path.exists(unwfile + '.ras'):
        suffix = '.ras'
    elif os.path.exists(unwfile + '.bmp'):
        suffix = '.bmp'
    elif os.path.exists(unwfile + '.png'):
        suffix = '.png'

    for ifgd in ifgdates:
        rasname = ifgd + '.unw' + suffix
        rasorg = os.path.join(ifgdir, ifgd, rasname)
        ### Bad ifgs
        if ifgd in bad_ifg_all:
            os.symlink(os.path.relpath(rasorg, bad_ifgrasdir),
                       os.path.join(bad_ifgrasdir, rasname))
        ### Remaining bad ifg candidates
        elif ifgd in bad_ifg_cand_res:
            os.symlink(os.path.relpath(rasorg, bad_ifg_candrasdir),
                       os.path.join(bad_ifg_candrasdir, rasname))
        ### Good ifgs
        else:
            os.symlink(os.path.relpath(rasorg, ifg_rasdir),
                       os.path.join(ifg_rasdir, rasname))

        if ifgd in no_loop_ifg:
            os.symlink(os.path.relpath(rasorg, no_loop_ifgrasdir),
                       os.path.join(no_loop_ifgrasdir, rasname))

    #%% Plot network
    ## Read bperp data or dummy
    bperp_file = os.path.join(ifgdir, 'baselines')
    if os.path.exists(bperp_file):
        bperp = io_lib.read_bperp_file(bperp_file, imdates)
    else:  #dummy
        bperp = np.random.random(n_im).tolist()

    pngfile = os.path.join(netdir, 'network12_all.png')
    plot_lib.plot_network(ifgdates, bperp, [], pngfile)

    pngfile = os.path.join(netdir, 'network12.png')
    plot_lib.plot_network(ifgdates, bperp, bad_ifg_all, pngfile)

    pngfile = os.path.join(netdir, 'network12_nobad.png')
    plot_lib.plot_network(ifgdates,
                          bperp,
                          bad_ifg_all,
                          pngfile,
                          plot_bad=False)

    ### Network info
    ## Identify gaps
    G = inv_lib.make_sb_matrix(ifgdates_good)
    ixs_inc_gap = np.where(G.sum(axis=0) == 0)[0]

    ## Connected network
    ix1 = 0
    connected_list = []
    for ix2 in np.append(ixs_inc_gap,
                         len(imdates_good) - 1):  #append for last image
        imd1 = imdates_good[ix1]
        imd2 = imdates_good[ix2]
        dyear = (dt.datetime.strptime(imd2, '%Y%m%d').toordinal() -
                 dt.datetime.strptime(imd1, '%Y%m%d').toordinal()) / 365.25
        n_im_connect = ix2 - ix1 + 1
        connected_list.append(
            [imdates_good[ix1], imdates_good[ix2], dyear, n_im_connect])
        ix1 = ix2 + 1  # Next connection

    #%% Caution about no_loop ifg, remaining large RMS loop and gap
    ### no_loop ifg
    if len(no_loop_ifg) != 0:
        no_loop_ifgfile = os.path.join(infodir, '12no_loop_ifg.txt')
        with open(no_loop_ifgfile, 'w') as f:
            print(
                "\nThere are {} ifgs without loop, recommend to check manually in no_loop_ifg_ras12"
                .format(len(no_loop_ifg)),
                flush=True)
            for ifgd in no_loop_ifg:
                print('{}'.format(ifgd), flush=True)
                print('{}'.format(ifgd), file=f)

    ### Remaining candidates of bad ifgs
    if len(bad_ifg_cand_res) != 0:
        print(
            "\nThere are {} remaining candidates of bad ifgs but not identified."
            .format(len(bad_ifg_cand_res)),
            flush=True)
        print("Check 12bad_ifg_cand_ras and loop/bad_loop_cand_png.",
              flush=True)
#        for ifgd in bad_ifg_cand_res:
#            print('{}'.format(ifgd))

    print('\n{0}/{1} ifgs are discarded from further processing.'.format(
        len(bad_ifg_all), n_ifg),
          flush=True)
    for ifgd in bad_ifg_all:
        print('{}'.format(ifgd), flush=True)

    ### Gap
    gap_infofile = os.path.join(infodir, '12network_gap_info.txt')
    with open(gap_infofile, 'w') as f:
        if ixs_inc_gap.size != 0:
            print("Gaps between:", file=f)
            print("\nGaps in network between:", flush=True)
            for ix in ixs_inc_gap:
                print("{} {}".format(imdates_good[ix], imdates_good[ix + 1]),
                      file=f)
                print("{} {}".format(imdates_good[ix], imdates_good[ix + 1]),
                      flush=True)

        print("\nConnected network (year, n_image):", file=f)
        print("\nConnected network (year, n_image):", flush=True)
        for list1 in connected_list:
            print("{0}-{1} ({2:.2f}, {3})".format(list1[0], list1[1], list1[2],
                                                  list1[3]),
                  file=f)
            print("{0}-{1} ({2:.2f}, {3})".format(list1[0], list1[1], list1[2],
                                                  list1[3]),
                  flush=True)

    print(
        '\nIf you want to change the bad ifgs to be discarded, re-run with different thresholds before next step.',
        flush=True)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(os.path.relpath(tsadir)))
Esempio n. 28
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = 1.2
    date = 20200227
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    #%% Set default
    frameID = []
    startdate = 20141001
    enddate = int(dt.date.today().strftime("%Y%m%d"))
    get_gacos = False

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hf:s:e:",
                                       ["help", "get_gacos"])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-f':
                frameID = a
            elif o == '-s':
                startdate = int(a)
            elif o == '-e':
                enddate = int(a)
            elif o == '--get_gacos':
                get_gacos = True

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Determine frameID
    wd = os.getcwd()
    if not frameID:  ## if frameID not indicated
        _tmp = re.findall(r'\d{3}[AD]_\d{5}_\d{6}', wd)
        ##e.g., 021D_04972_131213
        if len(_tmp) == 0:
            print('\nFrame ID cannot be identified from dir name!',
                  file=sys.stderr)
            print('Use -f option', file=sys.stderr)
            return
        else:
            frameID = _tmp[0]
            print('\nFrame ID is {}\n'.format(frameID), flush=True)
    trackID = str(int(frameID[0:3]))

    #%% Directory and file setting
    outdir = os.path.join(wd, 'GEOC')
    if not os.path.exists(outdir): os.mkdir(outdir)
    os.chdir(outdir)

    LiCSARweb = 'http://gws-access.ceda.ac.uk/public/nceo_geohazards/LiCSAR_products/'

    #%% ENU and hgt
    for ENU in ['E', 'N', 'U', 'hgt']:
        enutif = '{}.geo.{}.tif'.format(frameID, ENU)
        if os.path.exists(enutif):
            print('{} already exist. Skip download.'.format(enutif),
                  flush=True)
            continue

        print('Download {}'.format(enutif), flush=True)

        url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', enutif)
        if not tools_lib.download_data(url, enutif):
            print('  Error while downloading from {}'.format(url),
                  file=sys.stderr,
                  flush=True)
            continue

    #%% baselines and metadata.txt
    print('Download baselines', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', 'baselines')
    if not tools_lib.download_data(url, 'baselines'):
        print('  Error while downloading from {}'.format(url),
              file=sys.stderr,
              flush=True)

    print('Download metadata.txt', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'metadata', 'metadata.txt')
    if not tools_lib.download_data(url, 'metadata.txt'):
        print('  Error while downloading from {}'.format(url),
              file=sys.stderr,
              flush=True)

    #%% mli
    ### Get available dates
    url = os.path.join(LiCSARweb, trackID, frameID, 'products', 'epochs')
    response = requests.get(url)
    if not response.ok:  ## Try new structure
        url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
        response = requests.get(url)

    response.encoding = response.apparent_encoding  #avoid garble
    html_doc = response.text
    soup = BeautifulSoup(html_doc, "html.parser")
    tags = soup.find_all(href=re.compile(r"\d{8}"))
    imdates_all = [tag.get("href")[0:8] for tag in tags]
    _imdates = np.int32(np.array(imdates_all))
    _imdates = (_imdates[(_imdates >= startdate) *
                         (_imdates <= enddate)]).astype('str').tolist()

    ## Find earliest date in which mli is available
    imd1 = []
    for imd in _imdates:
        url_mli = os.path.join(url, imd, imd + '.geo.mli.tif')
        response = requests.get(url_mli)
        if response.ok:
            imd1 = imd
            break

    ### Download
    if imd1:
        print('Donwnloading {}.geo.mli.tif as {}.geo.mli.tif...'.format(
            imd1, frameID),
              flush=True)
        url_mli = os.path.join(url, imd1, imd1 + '.geo.mli.tif')
        mlitif = frameID + '.geo.mli.tif'
        if os.path.exists(mlitif):
            print('    {} already exist. Skip'.format(mlitif), flush=True)
        elif not tools_lib.download_data(url_mli, mlitif):
            print('    Error while downloading from {}'.format(url_mli),
                  file=sys.stderr,
                  flush=True)
    else:
        print('No mli available on {}'.format(url),
              file=sys.stderr,
              flush=True)

    #%% GACOS if specified
    if get_gacos:
        gacosdir = os.path.join(wd, 'GACOS')
        if not os.path.exists(gacosdir): os.mkdir(gacosdir)

        ### Get available dates
        print('\nDownload GACOS data', flush=True)
        url = os.path.join(LiCSARweb, trackID, frameID, 'epochs')
        response = requests.get(url)
        response.encoding = response.apparent_encoding  #avoid garble
        html_doc = response.text
        soup = BeautifulSoup(html_doc, "html.parser")
        tags = soup.find_all(href=re.compile(r"\d{8}"))
        imdates_all = [tag.get("href")[0:8] for tag in tags]
        _imdates = np.int32(np.array(imdates_all))
        _imdates = (_imdates[(_imdates >= startdate) *
                             (_imdates <= enddate)]).astype('str').tolist()

        ### Extract available dates
        imdates = []
        for imd in _imdates:
            url_sltd = os.path.join(url, imd, imd + '.sltd.geo.tif')
            response = requests.get(url_sltd)
            if response.ok:
                imdates.append(imd)

        n_im = len(imdates)
        if n_im > 0:
            print('{} GACOS data available from {} to {}'.format(
                n_im, imdates[0], imdates[-1]),
                  flush=True)
        else:
            print('No GACOS data available from {} to {}'.format(
                startdate, enddate),
                  flush=True)

        ### Download
        for i, imd in enumerate(imdates):
            print('  Donwnloading {} ({}/{})...'.format(imd, i + 1, n_im),
                  flush=True)
            url_sltd = os.path.join(url, imd, imd + '.sltd.geo.tif')
            path_sltd = os.path.join(gacosdir, imd + '.sltd.geo.tif')
            if os.path.exists(path_sltd):
                print('    {}.sltd.geo.tif already exist. Skip'.format(imd),
                      flush=True)
            elif not tools_lib.download_data(url_sltd, path_sltd):
                print('    Error while downloading from {}'.format(url_sltd),
                      file=sys.stderr,
                      flush=True)

    #%% unw and cc
    ### Get available dates
    print('\nDownload geotiff of unw and cc', flush=True)
    url = os.path.join(LiCSARweb, trackID, frameID, 'products')
    response = requests.get(url)
    if not response.ok:  ## Try new structure
        url = os.path.join(LiCSARweb, trackID, frameID, 'interferograms')
        response = requests.get(url)

    response.encoding = response.apparent_encoding  #avoid garble
    html_doc = response.text
    soup = BeautifulSoup(html_doc, "html.parser")
    tags = soup.find_all(href=re.compile(r"\d{8}_\d{8}"))
    ifgdates_all = [tag.get("href")[0:17] for tag in tags]

    ### Extract during start_date to end_date
    ifgdates = []
    for ifgd in ifgdates_all:
        mimd = int(ifgd[:8])
        simd = int(ifgd[-8:])
        if mimd >= startdate and simd <= enddate:
            ifgdates.append(ifgd)

    n_ifg = len(ifgdates)
    imdates = tools_lib.ifgdates2imdates(ifgdates)
    print('{} IFGs available from {} to {}'.format(n_ifg, imdates[0],
                                                   imdates[-1]),
          flush=True)

    ### Download
    for i, ifgd in enumerate(ifgdates):
        print('  Donwnloading {} ({}/{})...'.format(ifgd, i + 1, n_ifg),
              flush=True)
        url_unw = os.path.join(url, ifgd, ifgd + '.geo.unw.tif')
        path_unw = os.path.join(ifgd, ifgd + '.geo.unw.tif')
        if not os.path.exists(ifgd): os.mkdir(ifgd)
        if os.path.exists(path_unw):
            print('    {}.geo.unw.tif already exist. Skip'.format(ifgd),
                  flush=True)
        elif not tools_lib.download_data(url_unw, path_unw):
            print('    Error while downloading from {}'.format(url_unw),
                  file=sys.stderr,
                  flush=True)

        url_cc = os.path.join(url, ifgd, ifgd + '.geo.cc.tif')
        path_cc = os.path.join(ifgd, ifgd + '.geo.cc.tif')
        if os.path.exists(path_cc):
            print('    {}.geo.cc.tif already exist. Skip.'.format(ifgd),
                  flush=True)
        if not tools_lib.download_data(url_cc, path_cc):
            print('    Error while downloading from {}'.format(url_cc),
                  file=sys.stderr,
                  flush=True)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output directory: {}\n'.format(outdir))
Esempio n. 29
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = 1.2
    date = 20200703
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    #%% Set default
    xy_str = []
    lonlat_str = []
    cumfile = 'cum_filt.h5'
    tsfile = []
    refarea = []
    refarea_geo = []
    maskfile = []

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "hp:g:i:o:r:",
                                       ["help", "ref_geo=", "mask="])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-p':
                xy_str = a
            elif o == '-g':
                lonlat_str = a
            elif o == '-i':
                cumfile = a
            elif o == '-o':
                tsfile = a
            elif o == '-r':
                refarea = a
            elif o == '--ref_geo':
                refarea_geo = a
            elif o == '--mask':
                maskfile = a

        if not xy_str and not lonlat_str:
            raise Usage('No point location given, use either -p or -g!')
        elif not os.path.exists(cumfile):
            raise Usage('No {} exists! Use -i option.'.format(cumfile))

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Read info
    ### Read cumfile
    cumh5 = h5.File(cumfile, 'r')
    cum = cumh5['cum']
    gap = cumh5['gap']
    imdates = cumh5['imdates'][()].astype(str).tolist()
    n_im, length, width = cum.shape

    if 'corner_lat' in list(cumh5.keys()):
        geocod_flag = True
        lat1 = float(cumh5['corner_lat'][()])
        lon1 = float(cumh5['corner_lon'][()])
        dlat = float(cumh5['post_lat'][()])
        dlon = float(cumh5['post_lon'][()])
    else:
        geocod_flag = False

    if 'deramp_flag' in list(cumh5.keys()):
        deramp_flag = cumh5['deramp_flag'][()]
    else:
        deramp_flag = None

    if 'hgt_linear_flag' in list(cumh5.keys()):
        hgt_linear_flag = cumh5['hgt_linear_flag'][()]
    else:
        hgt_linear_flag = None

    if 'filtwidth_km' in list(cumh5.keys()):
        filtwidth_km = float(cumh5['filtwidth_km'][()])
        filtwidth_yr = float(cumh5['filtwidth_yr'][()])
    else:
        filtwidth_km = filtwidth_yr = None

    #%% Set info
    ###Set ref area
    if refarea:
        if not tools_lib.read_range(refarea, width, length):
            print('\nERROR in {}\n'.format(refarea), file=sys.stderr)
            return 2
        else:
            refx1, refx2, refy1, refy2 = tools_lib.read_range(
                refarea, width, length)
    elif refarea_geo and geocod_flag:
        lat1 = float(cumh5['corner_lat'][()])
        lon1 = float(cumh5['corner_lon'][()])
        dlat = float(cumh5['post_lat'][()])
        dlon = float(cumh5['post_lon'][()])
        if not tools_lib.read_range_geo(refarea_geo, width, length, lat1, dlat,
                                        lon1, dlon):
            print('\nERROR in {}\n'.format(refarea_geo), file=sys.stderr)
            return 2
        else:
            refx1, refx2, refy1, refy2 = tools_lib.read_range_geo(
                refarea_geo, width, length, lat1, dlat, lon1, dlon)
    else:
        refarea = cumh5['refarea'][()]
        refx1, refx2, refy1, refy2 = [
            int(s) for s in re.split('[:/]', refarea)
        ]

    if geocod_flag:
        reflat2, reflon1 = tools_lib.xy2bl(refx1, refy1, lat1, dlat, lon1,
                                           dlon)
        reflat1, reflon2 = tools_lib.xy2bl(refx2 - 1, refy2 - 1, lat1, dlat,
                                           lon1, dlon)
    else:
        reflat1 = reflon1 = reflat2 = reflon2 = None

    ### Set point
    if xy_str:  ## -p option
        x, y = [int(s) for s in xy_str.split('/')]
        if not 1 <= x <= width:
            print("\nERROR: {} is out of range ({}-{})".format(
                x, 0, width - 1),
                  file=sys.stderr)
            return 2
        elif not 1 <= y <= length:
            print("\nERROR: {} is out of range ({}-{})".format(
                y, 0, length - 1),
                  file=sys.stderr)
            return 2

        if geocod_flag:
            lat, lon = tools_lib.xy2bl(x, y, lat1, dlat, lon1, dlon)
        else:
            lat = lon = None

    else:  ## -g option
        if not geocod_flag:
            print('\nERROR: not geocoded, -g option unavailable\n',
                  file=sys.stderr)
            return 2

        lat2 = lat1 + dlat * (length - 1)
        lon2 = lon1 + dlon * (width - 1)
        lon, lat = [float(s) for s in lonlat_str.split('/')]
        if not lon1 <= lon <= lon2:
            print("\nERROR: {} is out of range ({}-{})".format(
                lon, lon1, lon2),
                  file=sys.stderr)
            return 2
        elif not lat2 <= lat <= lat1:
            print("\nERROR: {} is out of range ({}-{})".format(
                lat, lat2, lat1),
                  file=sys.stderr)
            return 2

        x, y = tools_lib.bl2xy(lon, lat, width, length, lat1, dlat, lon1, dlon)
        ## update latlon
        lat, lon = tools_lib.xy2bl(x, y, lat1, dlat, lon1, dlon)

    if geocod_flag:
        print('Location: {:.5f}/{:.5f}'.format(lon, lat))

    if not tsfile:
        tsfile = 'ts_{}_{}.txt'.format(x, y)

    ### Gaps
    gap1 = gap[:, y, x]

    ### mask
    if maskfile:
        mask = io_lib.read_img(maskfile, length, width)
        mask[mask == 0] = np.nan
    else:
        mask = np.ones((length, width), dtype=np.float32)

    #%% Read cum data
    ts = cum[:, y, x] * mask[y, x]
    if np.all(np.isnan(ts)):
        print('\nERROR: All cum data are Nan at {}/{}!\n'.format(x, y),
              file=sys.stderr)
        return 2

    ts_ref = np.nanmean(cum[:, refy1:refy2, refx1:refx2] *
                        mask[refy1:refy2, refx1:refx2],
                        axis=(1, 2))
    if np.all(np.isnan(ts_ref)):
        print('\nERROR: Ref area has only NaN value!\n', file=sys.stderr)
        return 2

    ts_dif = ts - ts_ref
    ts_dif = ts_dif - ts_dif[0]  ## Make first date zero

    ### Make txt
    io_lib.make_tstxt(x,
                      y,
                      imdates,
                      ts_dif,
                      tsfile,
                      refx1,
                      refx2,
                      refy1,
                      refy2,
                      gap1,
                      lat=lat,
                      lon=lon,
                      reflat1=reflat1,
                      reflat2=reflat2,
                      reflon1=reflon1,
                      reflon2=reflon2,
                      deramp_flag=deramp_flag,
                      hgt_linear_flag=hgt_linear_flag,
                      filtwidth_km=filtwidth_km,
                      filtwidth_yr=filtwidth_yr)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output: {}\n'.format(tsfile), flush=True)
Esempio n. 30
0
def main(argv=None):

    #%% Check argv
    if argv == None:
        argv = sys.argv

    start = time.time()
    ver = "1.4.5"
    date = 20201124
    author = "Y. Morishita"
    print("\n{} ver{} {} {}".format(os.path.basename(argv[0]), ver, date,
                                    author),
          flush=True)
    print("{} {}".format(os.path.basename(argv[0]), ' '.join(argv[1:])),
          flush=True)

    ## for parallel processing
    global cum, mask, deg_ramp, hgt_linearflag, hgt, hgt_min, hgt_max,\
    filtcumdir, filtincdir, imdates, cycle, coef_r2m, models, \
    filtwidth_yr, filtwidth_km, dt_cum, x_stddev, y_stddev, mask2, cmap_wrap
    ## global cum_org from hdf5 contaminate in paralell warpper? So pass them by arg.

    #%% Set default
    tsadir = []
    filtwidth_km = 2
    filtwidth_yr = []
    deg_ramp = []
    hgt_linearflag = False
    hgt_min = 200  ## meter
    hgt_max = 10000  ## meter
    maskflag = True
    try:
        n_para = len(os.sched_getaffinity(0))
    except:
        n_para = multi.cpu_count()

    range_str = []
    range_geo_str = []
    ex_range_str = []
    ex_range_geo_str = []

    cumname = 'cum.h5'

    cmap_vel = SCM.roma.reversed()
    cmap_noise_r = 'viridis_r'
    cmap_wrap = SCM.romaO
    # q = multi.get_context('fork')
    q = multi.get_context('spawn')
    compress = 'gzip'

    #%% Read options
    try:
        try:
            opts, args = getopt.getopt(argv[1:], "ht:s:y:r:", [
                "help", "hgt_linear", "hgt_min=", "hgt_max=", "nomask",
                "n_para=", "range=", "range_geo=", "ex_range=", "ex_range_geo="
            ])
        except getopt.error as msg:
            raise Usage(msg)
        for o, a in opts:
            if o == '-h' or o == '--help':
                print(__doc__)
                return 0
            elif o == '-t':
                tsadir = a
            elif o == '-s':
                filtwidth_km = float(a)
            elif o == '-y':
                filtwidth_yr = float(a)
            elif o == '-r':
                deg_ramp = a
            elif o == '--hgt_linear':
                hgt_linearflag = True
            elif o == '--hgt_min':
                hgt_min = int(a)
            elif o == '--hgt_max':
                hgt_max = int(a)
            elif o == '--nomask':
                maskflag = False
            elif o == '--n_para':
                n_para = int(a)
            elif o == '--range':
                range_str = a
            elif o == '--range_geo':
                range_geo_str = a
            elif o == '--ex_range':
                ex_range_str = a
            elif o == '--ex_range_geo':
                ex_range_geo_str = a

        if not tsadir:
            raise Usage('No tsa directory given, -t is not optional!')
        elif not os.path.isdir(tsadir):
            raise Usage('No {} dir exists!'.format(tsadir))
        elif not os.path.exists(os.path.join(tsadir, cumname)):
            raise Usage('No {} exists in {}!'.format(cumname, tsadir))
        if range_str and range_geo_str:
            raise Usage(
                'Both --range and --range_geo given, use either one not both!')
        if ex_range_str and ex_range_geo_str:
            raise Usage(
                'Both --ex_range and --ex_range_geo given, use either one not both!'
            )

    except Usage as err:
        print("\nERROR:", file=sys.stderr, end='')
        print("  " + str(err.msg), file=sys.stderr)
        print("\nFor help, use -h or --help.\n", file=sys.stderr)
        return 2

    #%% Directory and file setting
    tsadir = os.path.abspath(tsadir)
    cumfile = os.path.join(tsadir, cumname)
    resultsdir = os.path.join(tsadir, 'results')
    infodir = os.path.join(tsadir, 'info')
    inparmfile = os.path.join(infodir, '13parameters.txt')
    if not os.path.exists(inparmfile):  ## for old LiCSBAS13 <v1.2
        inparmfile = os.path.join(infodir, 'parameters.txt')
    outparmfile = os.path.join(infodir, '16parameters.txt')

    pixsp_r = float(io_lib.get_param_par(inparmfile, 'pixel_spacing_r'))
    pixsp_a = float(io_lib.get_param_par(inparmfile, 'pixel_spacing_a'))
    x_stddev = filtwidth_km * 1000 / pixsp_r
    y_stddev = filtwidth_km * 1000 / pixsp_a

    wavelength = float(io_lib.get_param_par(inparmfile, 'wavelength'))  #meter
    coef_r2m = -wavelength / 4 / np.pi * 1000  #rad -> mm, positive is -LOS

    if wavelength > 0.2:  ## L-band
        cycle = 1.5  # 2pi/cycle for comparison png
    elif wavelength <= 0.2:  ## C-band
        cycle = 3  # 3*2pi/cycle for comparison png

    filtincdir = os.path.join(tsadir, '16filt_increment')
    if os.path.exists(filtincdir): shutil.rmtree(filtincdir)
    os.mkdir(filtincdir)
    filtcumdir = os.path.join(tsadir, '16filt_cum')
    if os.path.exists(filtcumdir): shutil.rmtree(filtcumdir)
    os.mkdir(filtcumdir)

    cumffile = os.path.join(tsadir, 'cum_filt.h5')

    vconstfile = os.path.join(resultsdir, 'vintercept.filt')
    velfile = os.path.join(resultsdir, 'vel.filt')

    cumh5 = h5.File(cumfile, 'r')

    if os.path.exists(cumffile): os.remove(cumffile)
    cumfh5 = h5.File(cumffile, 'w')

    #%% Dates
    imdates = cumh5['imdates'][()].astype(str).tolist()
    cum_org = cumh5['cum']
    n_im, length, width = cum_org.shape

    if n_para > n_im:
        n_para = n_im

    ### Calc dt in year
    imdates_dt = ([
        dt.datetime.strptime(imd, '%Y%m%d').toordinal() for imd in imdates
    ])
    dt_cum = np.float32((np.array(imdates_dt) - imdates_dt[0]) / 365.25)

    ### Save dates and other info into cumf
    cumfh5.create_dataset('imdates', data=cumh5['imdates'])
    cumfh5.create_dataset('gap', data=cumh5['gap'], compression=compress)
    if 'bperp' in list(cumh5.keys()):  ## if dummy, no bperp field
        cumfh5.create_dataset('bperp', data=cumh5['bperp'])
    else:
        print('No bperp field found in {}. Skip.'.format(cumname))

    if 'corner_lat' in list(cumh5.keys()):
        lat1 = float(cumh5['corner_lat'][()])
        lon1 = float(cumh5['corner_lon'][()])
        dlat = float(cumh5['post_lat'][()])
        dlon = float(cumh5['post_lon'][()])
        cumfh5.create_dataset('corner_lat', data=cumh5['corner_lat'])
        cumfh5.create_dataset('corner_lon', data=cumh5['corner_lon'])
        cumfh5.create_dataset('post_lat', data=cumh5['post_lat'])
        cumfh5.create_dataset('post_lon', data=cumh5['post_lon'])
    else:  ## not geocoded
        print('No latlon field found in {}. Skip.'.format(cumname))

    ### temporal filter width
    if not filtwidth_yr and filtwidth_yr != 0:
        filtwidth_yr = dt_cum[-1] / (n_im - 1) * 3  ## avg interval*3

    ### hgt_linear
    if hgt_linearflag:
        hgtfile = os.path.join(resultsdir, 'hgt')
        if not os.path.exists(hgtfile):
            print('\nERROR: No hgt file exist in results dir!',
                  file=sys.stderr)
            print('--hgt_linear option cannot be used.', file=sys.stderr)
            return 2
        hgt = io_lib.read_img(hgtfile, length, width)
        hgt[np.isnan(hgt)] = 0
    else:
        hgt = []

    #%% --range[_geo] and --ex_range[_geo]
    if range_str:  ## --range
        if not tools_lib.read_range(range_str, width, length):
            print('\nERROR in {}\n'.format(range_str), file=sys.stderr)
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range(range_str, width, length)
            range_str = '{}:{}/{}:{}'.format(x1, x2, y1, y2)
    elif range_geo_str:  ## --range_geo
        if not tools_lib.read_range_geo(range_geo_str, width, length, lat1,
                                        dlat, lon1, dlon):
            print('\nERROR in {}\n'.format(range_geo_str), file=sys.stderr)
            return 1
        else:
            x1, x2, y1, y2 = tools_lib.read_range_geo(range_geo_str, width,
                                                      length, lat1, dlat, lon1,
                                                      dlon)
            range_str = '{}:{}/{}:{}'.format(x1, x2, y1, y2)

    if ex_range_str:  ## --ex_range
        if not tools_lib.read_range(ex_range_str, width, length):
            print('\nERROR in {}\n'.format(ex_range_str), file=sys.stderr)
            return 1
        else:
            ex_x1, ex_x2, ex_y1, ex_y2 = tools_lib.read_range(
                ex_range_str, width, length)
            ex_range_str = '{}:{}/{}:{}'.format(ex_x1, ex_x2, ex_y1, ex_y2)
    elif ex_range_geo_str:  ## --ex_range_geo
        if not tools_lib.read_range_geo(ex_range_geo_str, width, length, lat1,
                                        dlat, lon1, dlon):
            print('\nERROR in {}\n'.format(ex_range_geo_str), file=sys.stderr)
            return 1
        else:
            ex_x1, ex_x2, ex_y1, ex_y2 = tools_lib.read_range_geo(
                ex_range_geo_str, width, length, lat1, dlat, lon1, dlon)
            ex_range_str = '{}:{}/{}:{}'.format(ex_x1, ex_x2, ex_y1, ex_y2)

    ### Make range mask
    mask2 = np.ones((length, width), dtype=np.float32)
    if range_str:
        mask2 = mask2 * np.nan
        mask2[y1:y2, x1:x2] = 1
    if ex_range_str:
        mask2[ex_y1:ex_y2, ex_x1:ex_x2] = np.nan

    #%% Display settings
    print('')
    print('Size of image (w,l)      : {0}, {1}'.format(width, length))
    print('Number of images         : {}'.format(n_im))
    print('Width of filter in space : {} km ({:.1f}x{:.1f} pixel)'.format(
        filtwidth_km, x_stddev, y_stddev))
    print('Width of filter in time  : {:.3f} yr ({} days)'.format(
        filtwidth_yr, int(filtwidth_yr * 365.25)))
    print('Deramp flag              : {}'.format(deg_ramp), flush=True)
    print('hgt-linear flag          : {}'.format(hgt_linearflag), flush=True)
    if hgt_linearflag:
        print('Minimum hgt              : {} m'.format(hgt_min), flush=True)
        print('Maximum hgt              : {} m'.format(hgt_max), flush=True)
    if range_str:
        print('Range                    : {}'.format(range_str), flush=True)
    if ex_range_str:
        print('Excluded range           : {}'.format(ex_range_str), flush=True)
    with open(outparmfile, "w") as f:
        print('filtwidth_km:  {}'.format(filtwidth_km), file=f)
        print('filtwidth_xpixels:  {:.1f}'.format(x_stddev), file=f)
        print('filtwidth_ypixels:  {:.1f}'.format(y_stddev), file=f)
        print('filtwidth_yr:  {:.3f}'.format(filtwidth_yr), file=f)
        print('filtwidth_day:  {}'.format(int(filtwidth_yr * 365.25)), file=f)
        print('deg_ramp:  {}'.format(deg_ramp), file=f)
        print('hgt_linear:  {}'.format(hgt_linearflag * 1), file=f)
        print('hgt_min: {}'.format(hgt_min), file=f)
        print('hgt_max: {}'.format(hgt_max), file=f)
        print('range: {}'.format(range_str), file=f)
        print('ex_range: {}'.format(ex_range_str), file=f)

    #%% Load Mask (1: unmask, 0: mask, nan: no cum data)
    if maskflag:
        maskfile = os.path.join(resultsdir, 'mask')
        mask = io_lib.read_img(maskfile, length, width)
        mask[mask == 0] = np.nan  ## 0->nan
    else:
        mask = np.ones((length, width), dtype=np.float32)
        mask[np.isnan(cum_org[0, :, :])] = np.nan

    #%% First, deramp and hgt-linear if indicated
    cum = np.zeros((cum_org.shape), dtype=np.float32) * np.nan
    if not deg_ramp and not hgt_linearflag:
        cum = cum_org[()]

    else:
        if not deg_ramp:
            print('\nEstimate hgt-linear component,', flush=True)
        elif not hgt_linearflag:
            print('\nDeramp ifgs with the degree of {},'.format(deg_ramp),
                  flush=True)
        else:
            print('\nDeramp ifgs with the degree of {} and hgt-linear,'.format(
                deg_ramp),
                  flush=True)
        print('with {} parallel processing...'.format(n_para), flush=True)

        args = [(i, cum_org[i, :, :]) for i in range(n_im)]

        ### Parallel processing
        p = q.Pool(n_para)
        _result = np.array(p.map(deramp_wrapper, args), dtype=object)
        p.close()
        del args

        models = _result[:, 1]
        for i in range(n_im):
            cum[i, :, :] = _result[i, 0]
        del _result

        ### Only for output increment png files
        print(
            '\nCreate png for increment with {} parallel processing...'.format(
                n_para),
            flush=True)
        args = [(i, cum_org[i, :, :], cum_org[i - 1, :, :])
                for i in range(1, n_im)]
        p = q.Pool(n_para)
        p.map(deramp_wrapper2, args)
        p.close()
        del args

    #%% Filter each image
    cum_filt = cumfh5.require_dataset('cum', (n_im, length, width),
                                      dtype=np.float32,
                                      compression=compress)

    print('\nHP filter in time, LP filter in space,', flush=True)
    print('with {} parallel processing...'.format(n_para), flush=True)

    ### Parallel processing
    p = q.Pool(n_para)
    # cum_filt[:, :, :] = np.array(p.map(filter_wrapper, range(n_im)), dtype=np.float32)
    cum_filt[:, :, :] = np.array(
        p.map(filter_wrapper,
              [(i, cum, filtwidth_yr, dt_cum, x_stddev, y_stddev, mask,
                coef_r2m, cycle, filtcumdir, imdates, cmap_wrap, filtwidth_km)
               for i in range(n_im)]),
        dtype=np.float32)
    p.close()

    ### Only for output increment png files
    print('\nCreate png for increment with {} parallel processing...'.format(
        n_para),
          flush=True)
    args = [(i, cum_filt[i, :, :] - cum_filt[i - 1, :, :])
            for i in range(1, n_im)]
    p = q.Pool(n_para)
    p.map(filter_wrapper2,
          [(i, cum, coef_r2m, cycle, mask, filtincdir, imdates, cmap_wrap)
           for i in args])
    p.close()
    del args

    #%% Find stable ref point
    print('\nFind stable reference point...', flush=True)
    ### Compute RMS of time series with reference to all points
    sumsq_cum_wrt_med = np.zeros((length, width), dtype=np.float32)
    for i in range(n_im):
        sumsq_cum_wrt_med = sumsq_cum_wrt_med + (
            cum_filt[i, :, :] - np.nanmedian(cum_filt[i, :, :]))**2
    rms_cum_wrt_med = np.sqrt(sumsq_cum_wrt_med / n_im) * mask

    ### Mask by minimum n_gap
    n_gap = io_lib.read_img(os.path.join(resultsdir, 'n_gap'), length, width)
    min_n_gap = np.nanmin(n_gap)
    mask_n_gap = np.float32(n_gap == min_n_gap)
    mask_n_gap[mask_n_gap == 0] = np.nan
    rms_cum_wrt_med = rms_cum_wrt_med * mask_n_gap

    ### Find stable reference
    min_rms = np.nanmin(rms_cum_wrt_med)
    refy1s, refx1s = np.where(rms_cum_wrt_med == min_rms)
    refy1s, refx1s = refy1s[0], refx1s[0]  ## Only first index
    refy2s, refx2s = refy1s + 1, refx1s + 1
    print('Selected ref: {}:{}/{}:{}'.format(refx1s, refx2s, refy1s, refy2s),
          flush=True)

    ### Rerferencing cumulative displacement to new stable ref
    for i in range(n_im):
        cum_filt[i, :, :] = cum_filt[i, :, :] - cum[i, refy1s, refx1s]

    ### Save image
    rms_cum_wrt_med_file = os.path.join(infodir, '16rms_cum_wrt_med')
    with open(rms_cum_wrt_med_file, 'w') as f:
        rms_cum_wrt_med.tofile(f)

    pngfile = os.path.join(infodir, '16rms_cum_wrt_med.png')
    plot_lib.make_im_png(rms_cum_wrt_med, pngfile, cmap_noise_r,
                         'RMS of cum wrt median (mm)',
                         np.nanpercentile(rms_cum_wrt_med, 1),
                         np.nanpercentile(rms_cum_wrt_med, 99))

    ### Save ref
    cumfh5.create_dataset('refarea',
                          data='{}:{}/{}:{}'.format(refx1s, refx2s, refy1s,
                                                    refy2s))
    refsfile = os.path.join(infodir, '16ref.txt')
    with open(refsfile, 'w') as f:
        print('{}:{}/{}:{}'.format(refx1s, refx2s, refy1s, refy2s), file=f)

    if 'corner_lat' in list(cumh5.keys()):  ## Geocoded
        ### Make ref_stable.kml
        reflat = lat1 + dlat * refy1s
        reflon = lon1 + dlon * refx1s
        io_lib.make_point_kml(reflat, reflon,
                              os.path.join(infodir, '16ref.kml'))

    #%% Calc filtered velocity
    print('\nCalculate velocity of filtered time series...', flush=True)
    G = np.stack((np.ones_like(dt_cum), dt_cum), axis=1)
    vconst = np.zeros((length, width), dtype=np.float32) * np.nan
    vel = np.zeros((length, width), dtype=np.float32) * np.nan

    bool_unnan = ~np.isnan(cum_filt[0, :, :]).reshape(length,
                                                      width)  ## not all nan
    cum_pt = cum_filt[()].reshape(n_im, length *
                                  width)[:, bool_unnan.ravel()]  #n_im x n_pt
    n_pt_unnan = bool_unnan.sum()
    vconst_tmp = np.zeros((n_pt_unnan), dtype=np.float32) * np.nan
    vel_tmp = np.zeros((n_pt_unnan), dtype=np.float32) * np.nan

    bool_nonan_pt = np.all(~np.isnan(cum_pt), axis=0)

    ### First, calc vel point without nan
    print('  First, solving {0:6}/{1:6}th points with full cum...'.format(
        bool_nonan_pt.sum(), n_pt_unnan),
          flush=True)
    vconst_tmp[bool_nonan_pt], vel_tmp[bool_nonan_pt] = np.linalg.lstsq(
        G, cum_pt[:, bool_nonan_pt], rcond=None)[0]

    ### Next, calc vel point with nan
    print('  Next, solving {0:6}/{1:6}th points with nan in cum...'.format(
        (~bool_nonan_pt).sum(), n_pt_unnan),
          flush=True)

    mask_cum = ~np.isnan(cum_pt[:, ~bool_nonan_pt])
    vconst_tmp[~bool_nonan_pt], vel_tmp[
        ~bool_nonan_pt] = inv_lib.censored_lstsq_slow(
            G, cum_pt[:, ~bool_nonan_pt], mask_cum)
    vconst[bool_unnan], vel[bool_unnan] = vconst_tmp, vel_tmp

    vconst.tofile(vconstfile)
    vel.tofile(velfile)

    if maskflag:
        vel_mskd = vel * mask
        vconst_mskd = vconst * mask
        vconst_mskd.tofile(vconstfile + '.mskd')
        vel_mskd.tofile(velfile + '.mskd')

    cumfh5.create_dataset('vel',
                          data=vel.reshape(length, width),
                          compression=compress)
    cumfh5.create_dataset('vintercept',
                          data=vconst.reshape(length, width),
                          compression=compress)

    #%% Add info and close
    cumfh5.create_dataset('filtwidth_yr', data=filtwidth_yr)
    cumfh5.create_dataset('filtwidth_km', data=filtwidth_km)
    cumfh5.create_dataset('deramp_flag', data=deg_ramp)
    cumfh5.create_dataset('hgt_linear_flag', data=hgt_linearflag * 1)

    cumh5.close()
    cumfh5.close()

    #%% Output image
    pngfile = os.path.join(resultsdir, 'vel.filt.png')
    title = 'Filtered velocity (mm/yr)'
    vmin = np.nanpercentile(vel, 1)
    vmax = np.nanpercentile(vel, 99)
    plot_lib.make_im_png(vel, pngfile, cmap_vel, title, vmin, vmax)

    ## vintercept
    pngfile = os.path.join(resultsdir, 'vintercept.filt.png')
    title = 'Intercept of filtered velocity (mm)'
    vmin = np.nanpercentile(vconst, 1)
    vmax = np.nanpercentile(vconst, 99)
    plot_lib.make_im_png(vconst, pngfile, cmap_vel, title, vmin, vmax)

    if maskflag:
        pngfile = os.path.join(resultsdir, 'vel.filt.mskd.png')
        title = 'Masked filtered velocity (mm/yr)'
        vmin = np.nanpercentile(vel_mskd, 1)
        vmax = np.nanpercentile(vel_mskd, 99)
        plot_lib.make_im_png(vel_mskd, pngfile, cmap_vel, title, vmin, vmax)

        ## vintercept
        pngfile = os.path.join(resultsdir, 'vintercept.filt.mskd.png')
        title = 'Masked intercept of filtered velocity (mm)'
        vmin = np.nanpercentile(vconst_mskd, 1)
        vmax = np.nanpercentile(vconst_mskd, 99)
        plot_lib.make_im_png(vconst_mskd, pngfile, cmap_vel, title, vmin, vmax)

    #%% Finish
    elapsed_time = time.time() - start
    hour = int(elapsed_time / 3600)
    minite = int(np.mod((elapsed_time / 60), 60))
    sec = int(np.mod(elapsed_time, 60))
    print("\nElapsed time: {0:02}h {1:02}m {2:02}s".format(hour, minite, sec))

    print('\n{} Successfully finished!!\n'.format(os.path.basename(argv[0])))
    print('Output: {}\n'.format(os.path.relpath(cumffile)), flush=True)

    print('To plot the time-series:')
    print('LiCSBAS_plot_ts.py -i "{}" &\n'.format(os.path.relpath(cumffile)))