예제 #1
0
def ATL06_crossovers(xy0, tile_dir, different_cycles=False):
    D = read_tile(xy0, tile_dir)
    for Di in D:
        segDifferenceFilter(Di, tol=2, setValid=True, toNaN=True, subset=False)
        Di.assign({'time': Di.delta_time})
        Di.index(np.isfinite(Di.h_li))
    xover_list = list()
    #plt.clf()
    for ii in np.arange(len(D)):
        for jj in np.arange(len(D)):
            if (D[ii].size < 2) or (D[jj].size < 2) or (ii >= jj) or (
                    D[ii].rgt[0] == D[jj].rgt[0]):
                continue
            if different_cycles and D[ii].cycle[0] == D[jj].cycle[0]:
                continue
            xyC, inds, L = cross_tracks([D[ii], D[jj]],
                                        delta=20,
                                        delta_coarse=1000)
            if xyC is not None:
                try:
                    xover_list.append({
                        'xyC': xyC,
                        'data_0': D[ii].subset(inds[0]),
                        'data_1': D[jj].subset(inds[1]),
                        'L0': L[0],
                        'L1': L[1]
                    })
                except Exception as e:
                    print("HERE")
    return xover_list
예제 #2
0
def read_ICESat2(xy0,
                 W,
                 gI_file,
                 sensor=2,
                 SRS_proj4=None,
                 tiled=True,
                 seg_diff_tol=2):
    field_dict = {
        None: [
            'delta_time', 'h_li', 'h_li_sigma', 'latitude', 'longitude',
            'atl06_quality_summary', 'segment_id', 'sigma_geo_h'
        ],
        'fit_statistics': ['dh_fit_dx'],
        'ground_track': ['x_atc'],
        'geophysical': ['dac'],
        'orbit_info': ['rgt', 'cycle_number'],
        'derived': ['valid', 'matlab_time', 'BP', 'LR']
    }
    if tiled:
        fields = []
        for key in field_dict:
            fields += field_dict[key]
        fields += ['x', 'y']
    else:
        fields = field_dict
    px, py = np.meshgrid(
        np.arange(xy0[0] - W['x'] / 2, xy0[0] + W['x'] / 2 + 1.e4, 1.e4),
        np.arange(xy0[1] - W['y'] / 2, xy0[1] + W['y'] / 2 + 1.e4, 1.e4))
    D0 = geo_index().from_file(gI_file).query_xy((px.ravel(), py.ravel()),
                                                 fields=fields)
    if D0 is None or len(D0) == 0:
        return [None]
    # check the D6 filenames against the blacklist of bad files
    if tiled:
        D0 = reconstruct_tracks(
            point_data(list_of_fields=fields).from_list(D0))
    blacklist = None
    D1 = list()
    for ind, D in enumerate(D0):
        if D.size < 2:
            continue
        delete_file, blacklist = check_rgt_cycle_blacklist(
            rgt_cycle=[D.rgt[0], D.cycle_number[0]], blacklist=blacklist)
        if delete_file == 0:
            D1.append(D)

    for ind, D in enumerate(D1):
        try:
            segDifferenceFilter(D,
                                setValid=False,
                                toNaN=True,
                                tol=seg_diff_tol)
        except TypeError as e:
            print("HERE")
            print(e)
        D.h_li[D.atl06_quality_summary == 1] = np.NaN
        # rename the h_li field to 'z', and rename the 'matlab_time' to 'day'
        D.assign({
            'z': D.h_li + D.dac,
            'time': D.matlab_time,
            'sigma': D.h_li_sigma,
            'sigma_corr': D.sigma_geo_h,
            'cycle': D.cycle_number
        })
        D.assign({'year': D.delta_time / 24. / 3600. / 365.25 + 2018})
        # thin to 40 m
        D.index(np.mod(D.segment_id, 2) == 0)
        if 'x' not in D.list_of_fields:
            D.get_xy(SRS_proj4)
        #D.index(np.isfinite(D.h_li), list_of_fields=['x','y','z','time','year','sigma','sigma_corr','rgt','cycle'])

        D.assign({'sensor': np.zeros_like(D.x) + sensor})
        D1[ind] = D.subset(np.isfinite(D.h_li),
                           datasets=[
                               'x', 'y', 'z', 'time', 'delta_time', 'year',
                               'sigma', 'sigma_corr', 'rgt', 'cycle', 'sensor',
                               'BP', 'LR'
                           ])
    return D1
예제 #3
0
def make_tile(args):

    xy0 = args['xy0']
    SRS_proj4 = args['SRS_proj4']
    tile_spacing = args['tile_spacing']
    bin_W = args['bin_W']
    GI_file = args['GI_file']
    out_dir = args['out_dir']
    field_dict = args['field_dict']
    seg_diff_scale = args['seg_diff_scale']
    blockmedian_scale = args['blockmedian_scale']
    dxb, dyb = np.meshgrid(
        np.arange(-tile_spacing / 2, tile_spacing / 2 + bin_W, bin_W),
        np.arange(-tile_spacing / 2, tile_spacing / 2 + bin_W, bin_W))
    dxb = dxb.ravel()
    dyb = dyb.ravel()

    list_of_fields = []
    for group in field_dict:
        for ds in field_dict[group]:
            list_of_fields.append(ds)

    gI = geo_index().from_file(GI_file, read_file=False)
    out_file = out_dir + ('/E%d_N%d.h5' % (xy0[0] / 1.e3, xy0[1] / 1.e3))
    if os.path.isfile(out_file):
        os.remove(out_file)

    D = gI.query_xy((xy0[0] + dxb, xy0[1] + dyb), fields=field_dict)
    if D is None:
        return
    file_dict = {}
    delete_list = []
    for file_num, Di in enumerate(D):
        Di.get_xy(SRS_proj4)
        Di.assign(
            {'source_file_num': np.zeros_like(Di.x, dtype=int) + file_num})
        if seg_diff_scale is not None:
            Di.h_li[Di.atl06_quality_summary == 1] = np.NaN
            segDifferenceFilter(Di, setValid=False, toNaN=True)
        Di.ravel_fields()
        if blockmedian_scale is not None:
            Di.index(np.isfinite(Di.h_li) & (Di.atl06_quality_summary == 0))
            try:
                ind = pt_blockmedian(Di.x,
                                     Di.y,
                                     Di.h_li,
                                     blockmedian_scale,
                                     return_index=True)[3]
            except Exception:
                delete_list.append(Di)
                continue
            Di.index(ind[:, 0])
        else:
            Di.index(np.isfinite(Di.h_li))
        file_dict[file_num] = Di.filename
    D_all = point_data(list_of_fields=list_of_fields +
                       ['x', 'y', 'source_file_num']).from_list(D)

    y_bin_function = np.round(D_all.y / bin_W)
    x_bin_function = np.round(D_all.x / bin_W)
    x_scale = np.nanmax(x_bin_function) - np.nanmin(x_bin_function)
    t_scale = np.nanmax(D_all.delta_time) - np.nanmin(D_all.delta_time)

    xy_bin_function = (y_bin_function -
                       np.nanmin(y_bin_function)) * x_scale + (
                           x_bin_function - np.nanmin(x_bin_function))
    xyt_bin_function = xy_bin_function + (
        D_all.delta_time - np.nanmin(D_all.delta_time)) / t_scale
    ind = np.argsort(xyt_bin_function)

    bin_dict = {}
    xy_bin_fn_sort = xy_bin_function[ind]
    fn_delta = np.concatenate([[-1],
                               np.where(np.diff(xy_bin_fn_sort))[0],
                               [xy_bin_fn_sort.size]])
    for ii in range(len(fn_delta) - 1):
        this_ind = ind[(fn_delta[ii] + 1):(fn_delta[ii + 1] + 1)]
        bin_dict[(x_bin_function[this_ind[0]],
                  y_bin_function[this_ind[0]])] = this_ind
    key_arr = np.array([key for key in bin_dict.keys()])
    key_order = np.argsort(key_arr[:, 1] - np.min(key_arr[:, 1]) * x_scale +
                           (key_arr[:, 0] - np.min(key_arr[:, 0])))
    key_arr = key_arr[key_order, :]
    for key in key_arr:
        this_group = '%dE_%dN' % tuple(key * bin_W)
        D_all.subset(bin_dict[tuple(key)]).to_file(out_file,
                                                   replace=False,
                                                   group=this_group)

    with h5py.File(out_file, 'r+') as h5f:
        grp = h5f.create_group("source_files")
        for key in file_dict:
            grp.attrs['file_%d' % key] = file_dict[key]