コード例 #1
0
def index_cycle_indices(tile_dir_root, hemisphere):

    if hemisphere==1:
        SRS_proj4='+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'
    else:
        SRS_proj4='+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'

    files=glob(tile_dir_root+'/cycle*/GeoIndex.h5')
    index_list=list()
    for sub_index_file in files:
        temp=geo_index().from_file(sub_index_file)
        xy=temp.bins_as_array()
        index_list.append(geo_index(delta=[1.e4, 1.e4]).from_xy(xy, filename=sub_index_file, file_type='h5_geoindex', fake_offset_val=-1))
        temp=None
    geo_index(delta=[1.e4, 1.e4], SRS_proj4=SRS_proj4).from_list(index_list).to_file(tile_dir_root+'/GeoIndex.h5')
コード例 #2
0
def queryIndex(index_file, demFile, verbose=False):
    from PointDatabase.geo_index import geo_index
    from IS2_calval.demBounds import demBounds

    pointData = dict()
    beam_names = ['l', 'r']
    field_dict = {
        None: ['delta_time', 'h_li', 'h_li_sigma', 'latitude', 'longitude'],
        'geophysical': ['dac'],
        'fit_statistics': [
            'dh_fit_dx', 'h_rms_misfit', 'h_robust_sprd', 'n_fit_photons',
            'h_mean', 'signal_selection_source', 'snr_significance',
            'w_surface_window_final'
        ],
        'derived': ['valid']
    }
    gI = geo_index().from_file(index_file)
    xr, yr = demBounds(demFile, proj4=gI.attrs['SRS_proj4'])
    xr += np.array([-1e4, 1e4])
    yr += np.array([-1e4, 1e4])
    xy = gI.bins_as_array()
    #plt.figure(); plt.plot(xy[0], xy[1],'k.')
    #plt.plot(xr[[0, 1, 1, 0, 0]], yr[[0, 0, 1, 1, 0]],'r')
    D6es = gI.query_xy_box(xr, yr, get_data=True, fields=field_dict)
    if verbose:
        temp = gI.query_xy_box(xr + np.array([-1e4, 1e4]),
                               yr + np.array([-1e4, 1e4]),
                               get_data=False)
        print("list of datasets:")
        for key in set(temp.keys()):
            print("\t%s" % key)
    for D6 in D6es:
        f06.phDensityFilter(D6, toNaN=True, subset=True)
        if D6.h_li.size == 0:
            continue
        f06.segDifferenceFilter(D6, tol=2, toNaN=True, subset=True)
        if D6.h_li.size == 0:
            continue
        for beam in [0, 1]:
            if np.sum(np.isfinite(D6.h_li[:, beam])) > 100:
                first = np.max(np.where(np.isfinite(D6.latitude[:, beam]))[0])
                last = np.min(np.where(np.isfinite(D6.latitude[:, beam]))[0])
                AD = np.sign(D6.latitude[last, beam] -
                             D6.latitude[first, beam])
                this_name = "%s:gt%d%s" % (os.path.basename(
                    D6.file), D6.beam_pair, beam_names[beam])

                pointData[this_name] = {
                    'latitude': D6.latitude[:, beam],
                    'longitude': D6.longitude[:, beam],
                    'h': D6.h_mean[:, beam] + D6.dac[:, beam],
                    'delta_time': D6.delta_time[:, beam],
                    'AD': AD + np.zeros_like(D6.delta_time),
                    'orbit': D6.orbit + np.zeros_like(D6.delta_time)
                }

    return pointData
コード例 #3
0
def index_tiles(tile_dir_root, cycle, hemisphere):
     """
     Generate a geo_index for the tile files
     
     Arguments:
         tile_dir_root: directory under which to search for the tile files
         hemisphere: north(1) or south(-1), used to set the projection
     """
     if hemisphere==1:
         SRS_proj4='+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'
     else:
         SRS_proj4='+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs '
     cycle_root=tile_dir_root+('/cycle_%02d' % cycle)
     files=glob(cycle_root+'/E*.h5')
     print("found files:")
     print(files)
     iList = index_list_for_files(files, "indexed_h5", [1.e4, 1.e4], SRS_proj4, dir_root=cycle_root)
     geo_index(SRS_proj4=SRS_proj4, delta=[1.e4, 1.e4]).from_list(iList, dir_root=tile_dir_root).to_file(cycle_root+'/GeoIndex.h5')
コード例 #4
0
def test_plot(gi_file):
    import matplotlib.pyplot as plt
    index=geo_index().from_file(gi_file)
    dx,dy=np.meshgrid(np.array([-1, 0, 1])*1.e4, np.array([-1, 0, 1])*1.e4)
    XX=index.bins_as_array()
    
    D_list=index.query_xy((np.array(XX[0][0])+dx, np.array(XX[1][0])+dy) , fields=['x','y'])
    plt.figure(); 
    for D in D_list:
        plt.plot(D.x, D.y,'.')
    plt.show()
    plt.close()
    print('Use control-C to close figure.')
コード例 #5
0
def make_tile(args):

    xy0 = args['xy0']
    SRS_proj4 = args['SRS_proj4']
    tile_spacing = args['tile_spacing']
    bin_W = args['bin_W']
    GI_file = args['GI_file']
    out_dir = args['out_dir']
    field_dict = args['field_dict']
    seg_diff_scale = args['seg_diff_scale']
    blockmedian_scale = args['blockmedian_scale']
    dxb, dyb = np.meshgrid(
        np.arange(-tile_spacing / 2, tile_spacing / 2 + bin_W, bin_W),
        np.arange(-tile_spacing / 2, tile_spacing / 2 + bin_W, bin_W))
    dxb = dxb.ravel()
    dyb = dyb.ravel()

    list_of_fields = []
    for group in field_dict:
        for ds in field_dict[group]:
            list_of_fields.append(ds)

    gI = geo_index().from_file(GI_file, read_file=False)
    out_file = out_dir + ('/E%d_N%d.h5' % (xy0[0] / 1.e3, xy0[1] / 1.e3))
    if os.path.isfile(out_file):
        os.remove(out_file)

    D = gI.query_xy((xy0[0] + dxb, xy0[1] + dyb), fields=field_dict)
    if D is None:
        return
    file_dict = {}
    delete_list = []
    for file_num, Di in enumerate(D):
        Di.get_xy(SRS_proj4)
        Di.assign(
            {'source_file_num': np.zeros_like(Di.x, dtype=int) + file_num})
        if seg_diff_scale is not None:
            Di.h_li[Di.atl06_quality_summary == 1] = np.NaN
            segDifferenceFilter(Di, setValid=False, toNaN=True)
        Di.ravel_fields()
        if blockmedian_scale is not None:
            Di.index(np.isfinite(Di.h_li) & (Di.atl06_quality_summary == 0))
            try:
                ind = pt_blockmedian(Di.x,
                                     Di.y,
                                     Di.h_li,
                                     blockmedian_scale,
                                     return_index=True)[3]
            except Exception:
                delete_list.append(Di)
                continue
            Di.index(ind[:, 0])
        else:
            Di.index(np.isfinite(Di.h_li))
        file_dict[file_num] = Di.filename
    D_all = point_data(list_of_fields=list_of_fields +
                       ['x', 'y', 'source_file_num']).from_list(D)

    y_bin_function = np.round(D_all.y / bin_W)
    x_bin_function = np.round(D_all.x / bin_W)
    x_scale = np.nanmax(x_bin_function) - np.nanmin(x_bin_function)
    t_scale = np.nanmax(D_all.delta_time) - np.nanmin(D_all.delta_time)

    xy_bin_function = (y_bin_function -
                       np.nanmin(y_bin_function)) * x_scale + (
                           x_bin_function - np.nanmin(x_bin_function))
    xyt_bin_function = xy_bin_function + (
        D_all.delta_time - np.nanmin(D_all.delta_time)) / t_scale
    ind = np.argsort(xyt_bin_function)

    bin_dict = {}
    xy_bin_fn_sort = xy_bin_function[ind]
    fn_delta = np.concatenate([[-1],
                               np.where(np.diff(xy_bin_fn_sort))[0],
                               [xy_bin_fn_sort.size]])
    for ii in range(len(fn_delta) - 1):
        this_ind = ind[(fn_delta[ii] + 1):(fn_delta[ii + 1] + 1)]
        bin_dict[(x_bin_function[this_ind[0]],
                  y_bin_function[this_ind[0]])] = this_ind
    key_arr = np.array([key for key in bin_dict.keys()])
    key_order = np.argsort(key_arr[:, 1] - np.min(key_arr[:, 1]) * x_scale +
                           (key_arr[:, 0] - np.min(key_arr[:, 0])))
    key_arr = key_arr[key_order, :]
    for key in key_arr:
        this_group = '%dE_%dN' % tuple(key * bin_W)
        D_all.subset(bin_dict[tuple(key)]).to_file(out_file,
                                                   replace=False,
                                                   group=this_group)

    with h5py.File(out_file, 'r+') as h5f:
        grp = h5f.create_group("source_files")
        for key in file_dict:
            grp.attrs['file_%d' % key] = file_dict[key]
コード例 #6
0
Qfit_fields = [
    'latitude', 'longitude', 'elevation', 'seconds_of_day', 'days_J2K'
]
ps_srs = osr.SpatialReference()
ps_srs.ImportFromProj4(SRS_proj4)
ll_srs = osr.SpatialReference()
ll_srs.ImportFromEPSG(4326)
ll2ps = osr.CoordinateTransformation(ll_srs, ps_srs).TransformPoint
ps2ll = osr.CoordinateTransformation(ps_srs, ll_srs).TransformPoint
WGS84a = 6378137.0
WGS84b = 6356752.31424
d2r = np.pi / 180.
delta = [10000., 10000.]

Q_GI = geo_index(SRS_proj4=SRS_proj4).from_file(Qfit_index)
D6_GI = geo_index(SRS_proj4=SRS_proj4).from_file(ATL06_index)
# the qfit index is at 1 km , the ATL06 index is at 10 km. find the overlap between the two
# Interesect the ATL06 index with the Qfit index
xy_10km_Q = unique_points(Q_GI.bins_as_array())
D6_GI = D6_GI.copy_subset(xyBin=xy_10km_Q)

out_fields = [
    'segment_id', 'x', 'y', 'beam', 'beam_pair', 'h_li', 'h_li_sigma',
    'atl06_quality_summary', 'dh_fit_dx', 'N_50m', 'N_seg', 'h_qfit_seg',
    'dh_qfit_dx', 'dh_qfit_dy', 'h_robust_sprd', 'snr_significance',
    'h_qfit_50m', 'sigma_qfit_50m', 'sigma_seg', 'dz_50m', 'E_seg', 'RDE_seg',
    'RDE_50m', 't_seg', 't_qfit', 'y_atc', 'x_seg_mean', 'y_seg_mean'
]
out_template = {f: np.NaN for f in out_fields}
out = list()
コード例 #7
0
ファイル: glas_dhdt.py プロジェクト: whigg/LSsurf
def glas_fit(xy0=np.array((-150000, -2000000)), W0, D=None, E_RMS=None, gI=None, giFile='/Data/glas/GL/rel_634/GeoIndex.h5'):
    if gI is None:
        gI=geo_index().from_file(giFile)

    #import_D=False; print("WARNING::::::REUSING D")
    timing=dict()
    xy0=np.array((-150000, -2000000))
    E_RMS={'d2z0_dx2':20000./3000/3000, 'd3z_dx2dt':10./3000/3000, 'd2z_dxdt':100/3000, 'd2z_dt2':1}

    W={'x':W0, 'y':W0,'t':6}
    spacing={'z0':5.e2, 'dzdt':5.e3}

    args={'W':W, 'ctr':ctr, 'spacing':spacing, 'E_RMS':E_RMS, 'max_iterations':25}


    if D is None:
        fields=[ 'IceSVar', 'deltaEllip', 'numPk', 'ocElv', 'reflctUC', 'satElevCorr',  'time',  'x', 'y', 'z']
        ctr={'x':xy0[0], 'y':xy0[1], 't':(2003+2009)/2. }

        D=gI.query_xy_box(xy0[0]+np.array([-W['x']/2, W['x']/2]), xy0[1]+np.array([-W['y']/2, W['y']/2]), fields=fields)

        #plt.plot(xy[0], xy[1],'.')
        #plt.plot(xy0[0], xy0[1],'r*')

        D.assign({'year': matlabToYear(D.time)})
        good=(D.IceSVar < 0.035) & (D.reflctUC >0.05) & (D.satElevCorr < 1) & (D.numPk==1)
        D.subset(good, datasets=['x','y','z','year'])

        D.assign({'sigma':np.zeros_like(D.x)+0.2, 'time':D.year})
        plt.plot(D.x, D.y,'m.')

    bds={coord:args['ctr'][coord]+np.array([-0.5, 0.5])*args['W'][coord] for coord in ('x','y')}
    grids=dict()
    grids['z0']=fd_grid( [bds['y'], bds['x']], args['spacing']['z0']*np.ones(2), name='z0')
    grids['dzdt']=fd_grid( [bds['y'], bds['x']],  args['spacing']['dzdt']*np.ones(2), \
         col_0=grids['z0'].col_N+1, name='dzdt')

    valid_z0=grids['z0'].validate_pts((D.coords()[0:2]))
    valid_dz=grids['dzdt'].validate_pts((D.coords()))
    valid_data=valid_dz & valid_z0
    D=D.subset(valid_data)

    G_data=lin_op(grids['z0'], name='interp_z').interp_mtx(D.coords()[0:2])
    G_dzdt=lin_op(grids['dzdt'], name='dzdt').interp_mtx(D.coords()[0:2])
    G_dzdt.v *= (D.year[G_dzdt.r.astype(int)]-ctr['t'])
    G_data.add(G_dzdt)

    grad2_z0=lin_op(grids['z0'], name='grad2_z0').grad2(DOF='z0')
    grad_z0=lin_op(grids['z0'], name='grad_z0').grad(DOF='z0')
    grad2_dzdt=lin_op(grids['dzdt'], name='grad2_dzdt').grad2(DOF='dzdt')
    grad_dzdt=lin_op(grids['dzdt'], name='grad_dzdt').grad2(DOF='dzdt')
    Gc=lin_op(None, name='constraints').vstack((grad2_z0, grad_z0, grad2_dzdt, grad_dzdt))
    Ec=np.zeros(Gc.N_eq)
    root_delta_A_z0=np.sqrt(np.prod(grids['z0'].delta))
    Ec[Gc.TOC['rows']['grad2_z0']]=args['E_RMS']['d2z0_dx2']/root_delta_A_z0
    Ec[Gc.TOC['rows']['grad2_dzdt']]=args['E_RMS']['d3z_dx2dt']/root_delta_A_z0
    Ec[Gc.TOC['rows']['grad_z0']]=1.e4*args['E_RMS']['d2z0_dx2']/root_delta_A_z0
    Ec[Gc.TOC['rows']['grad_dzdt']]=1.e4*args['E_RMS']['d3z_dx2dt']/root_delta_A_z0

    Ed=D.sigma.ravel()

    N_eq=G_data.N_eq+Gc.N_eq

    # calculate the inverse square root of the data covariance matrix
    TCinv=sp.dia_matrix((1./np.concatenate((Ed, Ec)), 0), shape=(N_eq, N_eq))

    # define the right hand side of the equation
    rhs=np.zeros([N_eq])
    rhs[0:D.x.size]=D.z.ravel()

    # put the fit and constraint matrices together
    Gcoo=sp.vstack([G_data.toCSR(), Gc.toCSR()]).tocoo()
    cov_rows=G_data.N_eq+np.arange(Gc.N_eq)

    # initialize the book-keeping matrices for the inversion
    m0=np.zeros(Gcoo.shape[1])
    inTSE=np.arange(G_data.N_eq, dtype=int)

    for iteration in range(args['max_iterations']):
        # build the parsing matrix that removes invalid rows
        Ip_r=sp.coo_matrix((np.ones(Gc.N_eq+inTSE.size), (np.arange(Gc.N_eq+inTSE.size), np.concatenate((inTSE, cov_rows)))), shape=(Gc.N_eq+inTSE.size, Gcoo.shape[0])).tocsc()

        m0_last=m0
        # solve the equations
        tic=time();
        m0=sparseqr.solve(Ip_r.dot(TCinv.dot(Gcoo)), Ip_r.dot(TCinv.dot(rhs)));
        timing['sparseqr_solve']=time()-tic

        # quit if the solution is too similar to the previous solution
        if np.max(np.abs((m0_last-m0)[Gc.TOC['cols']['dzdt']])) < 0.05:
            break

        # calculate the full data residual
        rs_data=(D.z-G_data.toCSR().dot(m0))/D.sigma
        # calculate the robust standard deviation of the scaled residuals for the selected data
        sigma_hat=RDE(rs_data[inTSE])
        inTSE_last=inTSE
        # select the data that are within 3*sigma of the solution
        inTSE=np.where(np.abs(rs_data)<3.0*sigma_hat)[0]
        print('found %d in TSE, sigma_hat=%3.3f' % (inTSE.size, sigma_hat))
        if sigma_hat <= 1 or( inTSE.size == inTSE_last.size and np.all( inTSE_last == inTSE )):
            break
    m=dict()
    m['z0']=m0[Gc.TOC['cols']['z0']].reshape(grids['z0'].shape)
    m['dzdt']=m0[Gc.TOC['cols']['dzdt']].reshape(grids['dzdt'].shape)
    if DOPLOT:
        plt.subplot(121)
        plt.imshow(m['z0'])
        plt.colorbar()
        plt.subplot(122)
        plt.imshow(m['dzdt'])
        plt.colorbar()

    if False:
        plt.figure()
        Dfinal=D.subset(inTSE)
        ii=np.argsort(Dfinal.z)
        plt.scatter(Dfinal.x[ii], Dfinal.y[ii], c=Dfinal.z[ii]); plt.colorbar()


    return grids, m, D, inTSE, sigma_hat
コード例 #8
0
ファイル: glas_dhdt.py プロジェクト: whigg/LSsurf
def fit_GL():
    giFile='/Data/glas/GL/rel_634/GeoIndex.h5'
    gI=geo_index().from_file(giFile, read_file=False)