def stack_tile(tile, tmp_ref_dir, tmp_aster_dir, tmp_setsm_dir, out_dir):

        lat, lon = SRTMGL1_naming_to_latlon(tile)
        epsg, utm = latlon_to_UTM(lat, lon)

        outfile = os.path.join(out_dir, utm, tile + '.nc')

        if not os.path.exists(outfile):

            print('Stacking tile: ' + tile + ' in UTM zone ' + utm)

            # reference DEM
            ref_utm_dir = os.path.join(tmp_ref_dir, utm)
            ref_vrt = os.path.join(ref_utm_dir, 'tmp_' + utm + '.vrt')
            ref_list = glob(os.path.join(ref_utm_dir, '**/*.tif'),
                            recursive=True)
            if not os.path.exists(ref_vrt):
                gdal.BuildVRT(ref_vrt, ref_list, resampleAlg='bilinear')

            # DEMs to stack
            flist1 = glob(os.path.join(tmp_aster_dir, '**/*_final.zip'),
                          recursive=True)
            if os.path.exists(tmp_setsm_dir):
                flist2 = glob(os.path.join(tmp_setsm_dir, '**/*.tif'),
                              recursive=True)
            else:
                flist2 = []

            flist = flist1 + flist2

            extent = niceextent_utm_latlontile(tile, utm, res)
            bobformat_extent = [extent[0], extent[2], extent[1], extent[3]]

            print('Nice extent is:')
            print(extent)
            if len(flist) > 0:
                nco = create_mmaster_stack(flist,
                                           extent=bobformat_extent,
                                           epsg=int(epsg),
                                           mst_tiles=ref_vrt,
                                           res=res,
                                           outfile=outfile,
                                           coreg=False,
                                           uncert=True,
                                           clobber=True,
                                           add_ref=True,
                                           add_corr=True,
                                           latlontile_nodata=tile,
                                           filt_mm_corr=False,
                                           l1a_zipped=True,
                                           y0=y0,
                                           tmptag=tile)
                nco.close()
            else:
                print('No DEM intersecting tile found. Skipping...')

        else:
            print('Tile ' + tile + ' already exists.')
Exemplo n.º 2
0
def raster_to_point(fn_dem):

    extent, proj_wkt = ot.extent_rast(fn_dem)
    poly = ot.poly_from_extent(extent)
    transform = ot.coord_trans(True, proj_wkt, False, 4326)
    poly.Transform(transform)
    center_lon, center_lat = ot.get_poly_centroid(poly)

    epsg, utm_zone = ot.latlon_to_UTM(center_lat, center_lon)

    print('Reprojecting in ' + str(epsg))

    img_vhr = GeoImg(fn_dem)

    dest = gdal.Warp('',
                     img_vhr.gd,
                     format='MEM',
                     dstSRS='EPSG:{}'.format(epsg),
                     xRes=out_res,
                     yRes=out_res,
                     resampleAlg=gdal.GRA_Bilinear,
                     dstNodata=-9999)

    img_lr = GeoImg(dest)

    print('Extracting coords...')

    elevs = img_lr.img.flatten()
    x, y = img_lr.xy(ctype='center')
    coords = list(zip(x.flatten(), y.flatten()))
    coords_latlon = point_to_lonlat_trans(int(epsg), coords)
    lon, lat = zip(*coords_latlon)
    lon = np.array(lon)
    lat = np.array(lat)

    keep = ~np.isnan(elevs)
    h = elevs[keep]
    lat = lat[keep]
    lon = lon[keep]

    print('Done for this DEM')

    return h, lat, lon
Exemplo n.º 3
0
filt_ls=False
conf_filt_ls=0.99
#specify the exact temporal extent needed to be able to merge neighbouring stacks properly
tlim=[np.datetime64('2000-01-01'),np.datetime64('2019-01-01')]

# dir_stacks='/data/icesat/travail_en_cours/romain/data/stacks/06_rgi60/'
dir_stacks = '/calcul/santo/hugonnet/worldwide/18_rgi60/stacks'
ref_dir = '/calcul/santo/hugonnet/worldwide/18_rgi60/ref'
ref_gla_csv = '/data/icesat/travail_en_cours/romain/ww_tvol_study/worldwide/18_rgi60/cov/list_glacierized_tiles_06_rgi60.csv'
df = pd.read_csv(ref_gla_csv)
tilelist = df['Tile_name'].tolist()

for tile in tilelist:

    lat, lon = SRTMGL1_naming_to_latlon(tile)
    epsg, utm = latlon_to_UTM(lat, lon)

    print('Fitting tile: ' + tile + ' in UTM zone ' + utm)

    # reference DEM
    ref_utm_dir = os.path.join(ref_dir, utm)
    ref_vrt = os.path.join(ref_utm_dir, 'tmp_' + utm + '.vrt')
    ref_list = glob(os.path.join(ref_utm_dir, '**/*.tif'), recursive=True)
    if not os.path.exists(ref_vrt):
        gdal.BuildVRT(ref_vrt, ref_list, resampleAlg='bilinear')

    dir_utm_stacks=os.path.join(dir_stacks,utm)

    fn_stack = os.path.join(dir_utm_stacks,tile+'.nc')
    outfile=os.path.join(dir_utm_stacks,tile+'_final.nc')
Exemplo n.º 4
0
    def fit_tile(tile, tmp_ref_dir, base_dir, out_dir):

        method = 'gpr'
        subspat = None
        ref_dem_date = np.datetime64('2013-01-01')
        gla_mask = '/calcul/santo/hugonnet/outlines/rgi60_merge.shp'
        inc_mask = '/calcul/santo/hugonnet/outlines/rgi60_buff_10.shp'
        write_filt = True
        clobber = True
        tstep = 1. / 12.
        time_filt_thresh = [-50, 50]
        opt_gpr = False
        filt_ref = 'both'
        filt_ls = False
        conf_filt_ls = 0.99
        # specify the exact temporal extent needed to be able to merge neighbouring stacks properly
        tlim = [np.datetime64('2000-01-01'), np.datetime64('2020-01-01')]

        #for sensitivity test: force final fit only, and change kernel parameters in entry of script
        force_final_fit = True
        k1 = PairwiseKernel(1, metric='linear')  # linear kernel
        k2 = C(period_var) * ESS(length_scale=1,
                                 periodicity=1)  # periodic kernel
        k3 = C(base_var * 0.6) * RBF(base_length * 0.75) + C(
            base_var * 0.3) * RBF(base_length * 1.5) + C(base_var * 0.1) * RBF(
                base_length * 3)
        k4 = PairwiseKernel(1, metric='linear') * C(nonlin_var) * RQ(
            nonlin_length, nonlin_alpha)
        kernel = k1 + k2 + k3 + k4

        lat, lon = SRTMGL1_naming_to_latlon(tile)
        epsg, utm = latlon_to_UTM(lat, lon)
        print('Fitting tile: ' + tile + ' in UTM zone ' + utm)

        # reference DEM
        ref_utm_dir = os.path.join(tmp_ref_dir, utm)
        ref_vrt = os.path.join(ref_utm_dir, 'tmp_' + utm + '.vrt')
        infile = os.path.join(base_dir, utm, tile + '.nc')
        outfile = os.path.join(out_dir, utm, tile + '_final.nc')

        fn_filt = os.path.join(base_dir, utm, tile + '_filtered.nc')

        if True:  #not os.path.exists(outfile):
            ft.fit_stack(infile,
                         fn_filt=fn_filt,
                         fit_extent=subspat,
                         fn_ref_dem=ref_vrt,
                         ref_dem_date=ref_dem_date,
                         exc_mask=gla_mask,
                         tstep=tstep,
                         tlim=tlim,
                         inc_mask=inc_mask,
                         filt_ref=filt_ref,
                         time_filt_thresh=time_filt_thresh,
                         write_filt=True,
                         outfile=outfile,
                         method=method,
                         filt_ls=filt_ls,
                         conf_filt_ls=conf_filt_ls,
                         nproc=nproc,
                         clobber=True,
                         kernel=kernel,
                         force_final_fit=force_final_fit)

            # write dh/dts for visualisation
            ds = xr.open_dataset(outfile)

            t0 = np.datetime64('2000-01-01')
            t1 = np.datetime64('2020-01-01')

            ft.get_full_dh(ds,
                           os.path.join(
                               os.path.dirname(outfile),
                               os.path.splitext(os.path.basename(outfile))[0]),
                           t0=t0,
                           t1=t1)

        else:
            print('Tile already processed.')
Exemplo n.º 5
0
def get_tinterpcorr(df,
                    outfile,
                    cutoffs=[10000, 100000, 1000000],
                    nlags=100,
                    nproc=1,
                    nmax=10000):
    """
    Sample empirical spatial variograms with time lags to observation

    :param df: DataFrame of differences between ICESat and GP data aggregated for all regions
    :param outfile: Filename of csv for outputs
    :param cutoffs: Maximum successive ranges for sampling variogram
    :param nlags: Number of lags to sample up to cutoff
    :param nproc: Number of cores to use for multiprocessing [1]
    :param nmax: Maximum number of  observations to use for pairwise sampling (drawn randomly)

    :returns:
    """

    #df is a subset dataframe for points of interest, standardized
    #with an attribute .reg for regions, that must be close enough for UTM zones coordinates to be relevant?

    list_reg = list(set(list(df.reg.values)))
    df_out = pd.DataFrame()

    for k in range(len(list_reg)):

        print('Working on region: ' + str(list_reg[k]))

        df_reg = df[df.reg == list_reg[k]]
        #this works for ICESat campaigns, might have to put into close groups of similar dates for IceBridge
        list_dates = list(set(list(df_reg.t.values)))

        list_ns = []
        list_dt = []
        list_camp = []
        list_vals = []
        list_coords = []

        bin_dt = [
            0, 5, 30, 60, 90, 120, 150, 200, 260, 460, 620, 820, 980, 1180,
            1500, 2000, 2500
        ]

        for i in range(len(list_dates)):

            print('Pack of dates number ' + str(i + 1) + ' out of ' +
                  str(len(list_dates)) + ':' + str(list_dates[i]))

            for j in range(len(bin_dt) - 1):
                print('Day spacing number ' + str(j + 1) + ' out of ' +
                      str(len(bin_dt) - 1) + ': ' + str(bin_dt[j]) + ' to ' +
                      str(bin_dt[j + 1]))

                ind = np.logical_and.reduce(
                    (df_reg.t == list_dates[i], np.abs(df_reg.dt) >= bin_dt[j],
                     np.abs(df_reg.dt) < bin_dt[j + 1]))
                df_tmp = df_reg[ind]

                print('Found ' + str(len(df_tmp)) + ' observations')
                vals = df_tmp.dh.values

                if len(vals) > 10:

                    lat = df_tmp.lat
                    lon = df_tmp.lon
                    list_tup = list(zip(lon, lat))
                    med_lat = np.median(lat)
                    med_lon = np.median(lon)

                    print('Median latitude is:' + str(med_lat))
                    print('Median longitude is:' + str(med_lon))
                    print('Transforming coordinates...')

                    epsg, _ = latlon_to_UTM(med_lat, med_lon)
                    list_tup_out = point_lonlat_trans(int(epsg), list_tup)

                    print('Estimating spatial correlation...')

                    list_coords.append(np.array(list_tup_out))
                    list_vals.append(vals)
                    list_dt.append(bin_dt[j] + 0.5 *
                                   (bin_dt[j + 1] - bin_dt[j]))
                    list_ns.append(len(df_tmp))
                    list_camp.append(list_dates[i])

        if len(list_coords) > 0:
            if nproc == 1:
                print('Processing with 1 core...')
                list_arr_exps, list_arr_bins, list_arr_counts = (
                    [] for i in range(3))
                for i in range(len(list_coords)):
                    exps, bins, counts = get_spatial_corr(
                        (list_coords[i], list_vals[i], i, cutoffs, nlags,
                         nmax))

                    list_arr_exps.append(exps)
                    list_arr_bins.append(bins)
                    list_arr_counts.append(counts)
            else:
                print('Processing with ' + str(nproc) + ' cores...')
                arglist = [(list_coords[i], list_vals[i], i, cutoffs, nlags,
                            nmax) for i in range(len(list_coords))]
                pool = mp.Pool(nproc, maxtasksperchild=1)
                outputs = pool.map(get_spatial_corr, arglist, chunksize=1)
                pool.close()
                pool.join()

                print('Finished processing, compiling results...')

                zipped = list(zip(*outputs))

                list_arr_exps = zipped[0]
                list_arr_bins = zipped[1]
                list_arr_counts = zipped[2]

            for l in range(len(cutoffs)):
                for c in range(len(list_camp)):
                    df_var = pd.DataFrame()
                    df_var = df_var.assign(reg=[list_reg[k]] * nlags,
                                           nb_dt=[list_dt[c]] * nlags,
                                           bins=list_arr_bins[c][l, :],
                                           exp=list_arr_exps[c][l, :],
                                           count=list_arr_counts[c][l, :],
                                           cutoff=cutoffs[l],
                                           t=list_camp[c])
                    df_out = df_out.append(df_var)

    df_out.to_csv(outfile)
def fit_tile(tile, tmp_ref_dir, out_dir):

    method = 'gpr'
    # subspat = [383000,400000,5106200,5094000]
    subspat = None
    ref_dem_date = np.datetime64('2013-01-01')
    gla_mask = '/calcul/santo/hugonnet/outlines/rgi60_merge.shp'
    inc_mask = '/calcul/santo/hugonnet/outlines/rgi60_buff_10.shp'
    write_filt = True
    clobber = True
    tstep = 1. / 12.
    time_filt_thresh = [-50, 50]
    opt_gpr = False
    kernel = None
    filt_ref = 'both'
    filt_ls = False
    conf_filt_ls = 0.99
    # specify the exact temporal extent needed to be able to merge neighbouring stacks properly
    tlim = [np.datetime64('2000-01-01'), np.datetime64('2020-01-01')]

    lat, lon = SRTMGL1_naming_to_latlon(tile)
    epsg, utm = latlon_to_UTM(lat, lon)
    print('Fitting tile: ' + tile + ' in UTM zone ' + utm)

    # reference DEM
    ref_utm_dir = os.path.join(tmp_ref_dir, utm)
    ref_vrt = os.path.join(ref_utm_dir, 'tmp_' + utm + '.vrt')
    infile = os.path.join(out_dir, utm, tile + '.nc')
    outfile = os.path.join(out_dir, utm, tile + '_final.nc')

    if True:  #not os.path.exists(outfile):
        ft.fit_stack(infile,
                     fit_extent=subspat,
                     fn_ref_dem=ref_vrt,
                     ref_dem_date=ref_dem_date,
                     gla_mask=gla_mask,
                     tstep=tstep,
                     tlim=tlim,
                     inc_mask=inc_mask,
                     filt_ref=filt_ref,
                     time_filt_thresh=time_filt_thresh,
                     write_filt=True,
                     outfile=outfile,
                     method=method,
                     filt_ls=filt_ls,
                     conf_filt_ls=conf_filt_ls,
                     nproc=nproc,
                     clobber=True)

        # write dh/dts for visualisation
        ds = xr.open_dataset(outfile)

        t0 = np.datetime64('2000-01-01')
        t1 = np.datetime64('2020-01-01')

        ft.get_full_dh(ds,
                       os.path.join(
                           os.path.dirname(outfile),
                           os.path.splitext(os.path.basename(outfile))[0]),
                       t0=t0,
                       t1=t1)

    else:
        print('Tile already processed.')
Exemplo n.º 7
0
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
layer_out = ds_shp_out.CreateLayer('buff', srs=srs, geom_type=ogr.wkbPolygon)

layer_in = ds_shp_in.GetLayer()

multipoly = ogr.Geometry(ogr.wkbMultiPolygon)

for feature in layer_in:
    geom = feature.GetGeometryRef()

    print('Working on:' + feature.GetField('RGIId'))

    centroid = geom.Centroid()
    center_lon, center_lat, _ = centroid.GetPoint()
    epsg, utm = ot.latlon_to_UTM(center_lat, center_lon)

    trans = ot.coord_trans(False, 4326, False, int(epsg))

    geom.Transform(trans)
    geom = geom.Buffer(buffer_km * 1000)

    if utm == '01N' or utm == '01S':
        utm_ext = [(-179.999, -85), (-179.999, 85), (-168, 85), (-168, -85),
                   (-179.999, -85)]
        utm_poly = ot.poly_from_coords(inter_poly_coords(utm_ext))

        utm_poly.Transform(trans)

        geom = geom.Intersection(utm_poly)
    elif utm == '60N' or utm == '60S':