コード例 #1
0
ファイル: IndexPicker.py プロジェクト: whigg/pointCollection
 def __init__(self, D, map_file, index_file, datatype):
     self.files=[]
     self.datatype=datatype
     self.map_file=map_file
     self.index_file=index_file
     print(f"TrackPicker: index_file is {index_file}")
     srs_proj4=pc.geoIndex().from_file(index_file).attrs['SRS_proj4']
     if datatype == 'ATL06':
         for ii, Di in enumerate(D):
             Di.get_xy(srs_proj4)
             Di.assign({'file_num':np.zeros_like(Di.x)+ii})
             self.files += [Di.filename]
         self.D_all=pc.data().from_list(D)
     elif datatype == 'ATL11':
        D_list=[]
        for ii, Di in enumerate(D):
            self.files += [Di.filename]
            Di.get_xy(srs_proj4)
            D_list.append(pc.data().from_dict({
                    'x':Di.x, 'y':Di.y, 'file_num':np.zeros_like(Di.x)+ii,
                    'h_corr':Di.corrected_h.h_corr[:,-1].ravel()}))
        self.D_all=pc.data().from_list(D_list)
     else:
         self.D_all=pc.data().from_list(D)
     self.D=D
     XR=[np.nanmin(self.D_all.x), np.nanmax(self.D_all.x)]
     YR=[np.nanmin(self.D_all.y), np.nanmax(self.D_all.y)]
     self.fig=plt.figure()
     if map_file is not None:
         get_map(map_file, bounds=[XR, YR])
     #for Di in D:
     #    plt.plot(Di.x, Di.y,'.')
     if self.datatype=='ATL06':
         plt.scatter(self.D_all.x, self.D_all.y, 6, c=self.D_all.r_eff, linewidth=0, vmax=1, vmin=0)
     elif self.datatype=='ATL11':
         plt.scatter(self.D_all.x, self.D_all.y, 6, c=self.D_all.h_corr); plt.colorbar()
     elif self.datatype=='ATM_waveform_fit':
         plt.scatter(self.D_all.x, self.D_all.y, 6, c=np.log10(self.D_all.K0))
     elif self.datatype=='CS2':
         plt.scatter(self.D_all.x, self.D_all.y, 6, c=self.D_all.h); plt.colorbar()
         hax=plt.axes([0.7, 0.05, 0.25, 0.25])
         hax.hist((self.D_all.time-730486)/365.25+2000, 100)
     self.cid=self.fig.canvas.mpl_connect('button_press_event', self)
     plt.show()
コード例 #2
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 21 15:56:39 2020

@author: ben
"""

import pointCollection as pc
import numpy as np
import sys
import matplotlib.pyplot as plt

plt.figure()
for index_file in sys.argv[1:]:
    xy = pc.geoIndex().from_file(index_file).bins_as_array()
    plt.plot(xy[0], xy[1], 'o', label=index_file)
    D = pc.geoIndex().from_file(index_file).query_xy(
        [[xy[0][0]], [xy[1][0]]],
        get_data=True,
        fields=['x', 'y', 'h_li', 'pair', 'cycle_number'])
    D = pc.data().from_list(D)
    plt.plot(D.x, D.y, '.', label='first bin of ' + index_file)

plt.axis('equal')
plt.legend()
plt.show()
コード例 #3
0
            'geophysical':['dac'],
            'bias_correction':['fpb_n_corr'],
            'fit_statistics':['dh_fit_dx','dh_fit_dx_sigma','dh_fit_dy','h_mean', 'h_rms_misfit','h_robust_sprd','n_fit_photons','w_surface_window_final','snr_significance'],
            'orbit_info':['rgt','orbit_number'],
            'derived': ['BP','spot']}

ATL06_fields=list()
for key in ATL06_field_dict:   
    ATL06_fields+=ATL06_field_dict[key]
            
WGS84a=6378137.0
WGS84b=6356752.31424
d2r=np.pi/180.
delta=[10000., 10000.]
  
Q_GI=pc.geoIndex(SRS_proj4=SRS_proj4).from_file(Qfit_index, read_file=True)

D6_GI=pc.geoIndex(SRS_proj4=SRS_proj4).from_file(ATL06_index, read_file=True)
# the qfit index is at 1 km , the ATL06 index is at 10 km. find the overlap between the two
# Interesect the ATL06 index with the Qfit index

xy_10km_Q=Q_GI.bins_as_array()
xy_10km_Q=np.unique(xy_10km_Q[0]+1j*xy_10km_Q[1])
D6_GI=D6_GI.copy_subset(xyBin=[np.real(xy_10km_Q), np.imag(xy_10km_Q)]);

out_fields=[ 
    'segment_id','x','y','beam', 'BP', 'h_li', 'h_li_sigma', 'atl06_quality_summary', 
    'dac', 'rgt','orbit_number','spot',
    'dh_fit_dx','N_50m','N_seg','h_qfit_seg','dh_qfit_dx','dh_qfit_dy', 
    'h_robust_sprd', 'snr_significance',
    'h_qfit_50m','sigma_qfit_50m', 'sigma_seg','dz_50m','E_seg','RDE_seg',
コード例 #4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--atl06',
                        '-I',
                        type=str,
                        help="ICESat-2 ATL06 directory to run")
    parser.add_argument('--atm', '-A', type=str, help='ATM directory to run')
    parser.add_argument('--hemisphere',
                        '-H',
                        type=int,
                        default=-1,
                        help='hemisphere, must be 1 or -1')
    parser.add_argument('--query',
                        '-Q',
                        type=float,
                        default=100,
                        help='KD-Tree query radius')
    parser.add_argument('--median',
                        '-M',
                        default=False,
                        action='store_true',
                        help='Run block median')
    parser.add_argument('--verbose',
                        '-v',
                        default=False,
                        action='store_true',
                        help='verbose output of run')
    args = parser.parse_args()

    if args.hemisphere == 1:
        SRS_proj4 = '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs '
    elif args.hemisphere == -1:
        SRS_proj4 = '+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'

    # tilde expansion of file arguments
    ATM_dir = os.path.expanduser(args.atm)
    print("working on ATL06 dir {0}, ATM directory {1}".format(
        args.atl06, ATM_dir)) if args.verbose else None

    # find Qfit files within ATM_dir
    Qfit_regex = re.compile(
        r"ATM1B.*_(\d{4})(\d{2})(\d{2})_(\d{2})(\d{2})(\d{2}).*.h5")
    Qfit_files = [
        os.path.join(ATM_dir, f) for f in os.listdir(ATM_dir)
        if Qfit_regex.search(f)
    ]

    # output directory
    out_dir = os.path.join(ATM_dir, 'xovers')
    if not os.path.isdir(out_dir):
        os.mkdir(out_dir)

    out_file = 'vs_{0}.h5'.format(
        os.path.dirname(args.atl06).replace(os.sep, '_'))
    if os.path.isfile(os.path.join(out_dir, out_file)):
        print("found: {0}".format(os.path.join(
            out_dir, out_file))) if args.verbose else None

    ATL06_index = os.path.join(os.sep, 'Volumes', 'ice2', 'ben', 'scf',
                               'AA_06', args.atl06, 'GeoIndex.h5')

    SRS_proj4 = '+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'
    ATL06_field_dict = {
        None: [
            'delta_time', 'h_li', 'h_li_sigma', 'latitude', 'longitude',
            'segment_id', 'sigma_geo_h', 'atl06_quality_summary'
        ],
        'ground_track':
        ['x_atc', 'y_atc', 'seg_azimuth', 'sigma_geo_at', 'sigma_geo_xt'],
        'geophysical': ['dac'],
        'bias_correction': ['fpb_n_corr'],
        'fit_statistics': [
            'dh_fit_dx', 'dh_fit_dx_sigma', 'dh_fit_dy', 'h_mean',
            'h_rms_misfit', 'h_robust_sprd', 'n_fit_photons',
            'w_surface_window_final', 'snr_significance'
        ],
        'orbit_info': ['rgt', 'orbit_number'],
        'derived': ['BP', 'spot']
    }

    ATL06_fields = list()
    for key in ATL06_field_dict:
        ATL06_fields += ATL06_field_dict[key]

    # read all Qfit files within ATM directory
    Qlist = list()
    for f in sorted(Qfit_files):
        Qlist.append(pc.ATM_Qfit.data().from_h5(f))
    # merge the list of ATM data and build the search tree
    Q_full = pc.data().from_list(Qlist).get_xy(SRS_proj4)
    # run block median for qsub
    if args.median:
        Q_full = blockmedian_for_qsub(Q_full, 5)
    # construct search tree from ATM Qfit coords
    Qtree = KDTree(np.c_[Q_full.x, Q_full.y])
    # could pickle Qtree here to save computational time for future runs

    # read 10 km ATL06 index
    D6_GI = pc.geoIndex(SRS_proj4=SRS_proj4).from_file(ATL06_index,
                                                       read_file=True)
    # Query the Qfit search tree to find intersecting ATL06 bins
    # search within radius equal to diagonal of bin with 1km buffer (12/sqrt(2))
    x_10km, y_10km = D6_GI.bins_as_array()
    D6ind, = np.nonzero(
        Qtree.query_radius(np.c_[x_10km, y_10km], 8485, count_only=True))
    # reduce ATL06 bins to valid
    D6_GI = D6_GI.copy_subset(xyBin=[x_10km[D6ind], y_10km[D6ind]])

    out_fields = [
        'segment_id', 'x', 'y', 'beam', 'BP', 'h_li', 'h_li_sigma',
        'atl06_quality_summary', 'dac', 'rgt', 'orbit_number', 'spot',
        'dh_fit_dx', 'N_50m', 'N_seg', 'h_qfit_seg', 'dh_qfit_dx',
        'dh_qfit_dy', 'h_robust_sprd', 'snr_significance', 'h_qfit_50m',
        'sigma_qfit_50m', 'sigma_seg', 'dz_50m', 'E_seg', 'RDE_seg',
        'hbar_20m', 'RDE_50m', 't_seg', 't_qfit', 'y_atc', 'x_seg_mean',
        'y_seg_mean'
    ]
    out_template = {f: np.NaN for f in out_fields}
    out = list()

    for bin_name in sorted(D6_GI.keys()):
        print(bin_name) if args.verbose else None
        bin_xy = [int(coord) for coord in bin_name.split('_')]

        # query ATL06 for the current bin, and index it
        D6list = D6_GI.query_xy([[bin_xy[0]], [bin_xy[1]]],
                                get_data=True,
                                fields=ATL06_field_dict)
        if not isinstance(D6list, list):
            D6list = [D6list]
        D6sub = pc.ATL06.data().from_list(D6list).get_xy(SRS_proj4)

        # query the search tree to find points within query radius
        D6xy = np.concatenate((D6sub.x[:, None], D6sub.y[:, None]), axis=1)
        Qquery = Qtree.query_radius(D6xy, args.query)
        # indices of ATL06 points within bin
        D6ind, = np.nonzero([np.any(i) for i in Qquery])
        # loop over queries in the ATL06 data
        for i_AT in D6ind:
            D6i = D6sub.copy_subset(np.array([i_AT]))
            # grab the Qfit bins around the ATL06 bin
            Qdata = Q_full.copy_subset(Qquery[i_AT], by_row=True)
            Qdata.index(
                np.isfinite(Qdata.elevation) & np.isfinite(Qdata.latitude)
                & np.isfinite(Qdata.longitude))
            # create output dictionary of ATL06 and plane-fit ATM comparison
            this_out = compare_seg_with_qfit(D6i, Qdata, 0, out_template)
            if this_out is not None:
                out.append(this_out)

    D = dict()
    with h5py.File(os.path.join(out_dir, out_file), 'w') as h5f:
        for field in out[0].keys():
            D[field] = np.array([ii[field] for ii in out])
            print(field, D[field].dtype) if args.verbose else None
            h5f.create_dataset(field, data=D[field])
コード例 #5
0
ファイル: demo.py プロジェクト: joeymartin888/pointCollection
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May  6 14:10:10 2020

@author: ben
"""

import pointCollection as pc

SRS_proj4 = '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'
gI = pc.geoIndex(SRS_proj4=SRS_proj4).for_file(
    '/Volumes/ice2/ben/scf/AA_11/U07/ATL11_000110_0306_02_vU07.h5',
    file_type='ATL11')
コード例 #6
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Apr  4 08:32:55 2020

@author: ben
"""

import pointCollection as pc
import numpy as np
xy0 = (np.array(-360000.), np.array(-1140000.))
gi = pc.geoIndex().from_file(
    '/Volumes/ice2/ben/CS2_data/GL/retrack_BC/2019/index_POCA.h5')
D = gi.query_xy(xy0, fields=['x', 'y', 'h'])
gi = None
コード例 #7
0
ファイル: get_xover_data.py プロジェクト: cuihaotian/ATL11
def get_xover_data(x0, y0, rgt, GI_files, xover_cache, index_bin_size,
                   params_11):
    """
    Read the data from other tracks.

    Maintain a cache of data so that subsequent reads don't have to reload data from disk
    Inputs:
        x0, y0: bin centers
        rgt: current rgt
        GI_files: lsti of geograpic index file
        xover_cache: data cache (dict)
        index_bin_size: size of the bins in the index
        params_11: default parameter values for the ATL11 fit

    """

    # identify the crossover centers
    x0_ctrs = buffered_bins(x0, y0, 2 * params_11.L_search_XT, index_bin_size)
    D_xover = []

    this_field_dict = params_11.ATL06_field_dict.copy()
    this_field_dict.pop('dem')

    for x0_ctr in x0_ctrs:
        this_key = (np.real(x0_ctr), np.imag(x0_ctr))
        # check if we have already read in the data for this bin
        if this_key not in xover_cache:
            # if we haven't already read in the data, read it in.  These data will be in xover_cache[this_key]
            temp = []
            for GI_file in GI_files:
                new_data = pc.geoIndex().from_file(GI_file).query_xy(
                    this_key, fields=this_field_dict)
                if new_data is not None:
                    temp += new_data
            if len(temp) == 0:
                xover_cache[this_key] = None
                continue
            xover_cache[this_key] = {
                'D':
                pc.data(
                    fields=params_11.ATL06_xover_field_list).from_list(temp)
            }
            # remove the current rgt from data in the cache
            xover_cache[this_key]['D'].index(
                ~np.in1d(xover_cache[this_key]['D'].rgt, [rgt]))
            if xover_cache[this_key]['D'].size == 0:
                continue
            xover_cache[this_key]['D'].get_xy(EPSG=params_11.EPSG)
            # index the cache at 100-m resolution
            xover_cache[this_key]['index'] = pc.geoIndex(
                delta=[100, 100], data=xover_cache[this_key]['D'])
        # now read the data from the crossover cache
        if (xover_cache[this_key] is not None) and (xover_cache[this_key]['D']
                                                    is not None):
            try:
                Q = xover_cache[this_key]['index'].query_xy([x0, y0],
                                                            pad=1,
                                                            get_data=False)
            except KeyError:
                Q = None
            if Q is None:
                continue
            # if we have read in any data for the current bin, subset it to the bins around the reference point
            for key in Q:
                for i0, i1 in zip(Q[key]['offset_start'],
                                  Q[key]['offset_end']):
                    D_xover.append(xover_cache[this_key]['D'][np.arange(
                        i0, i1 + 1, dtype=int)])
    if len(D_xover) > 0:
        D_xover = pc.data().from_list(D_xover)

    # cleanup the cache if it is too large
    if len(xover_cache.keys()) > 50:
        cleanup_xover_cache(xover_cache, x0, y0, 2e4)

    return D_xover
コード例 #8
0
ファイル: IS2_vs_GPS.py プロジェクト: Kevin2120/IS2-calval
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--gps', '-G', type=str, help="GPS file to run")
    parser.add_argument('--atl06',
                        '-I',
                        type=str,
                        help='ICESat-2 ATL06 directory to run')
    parser.add_argument('--hemisphere',
                        '-H',
                        type=int,
                        default=-1,
                        help='hemisphere, must be 1 or -1')
    parser.add_argument('--query',
                        '-Q',
                        type=float,
                        default=100,
                        help='KD-Tree query radius')
    parser.add_argument('--median',
                        '-M',
                        default=False,
                        action='store_true',
                        help='Run block median')
    parser.add_argument('--verbose',
                        '-v',
                        default=False,
                        action='store_true',
                        help='verbose output of run')
    args = parser.parse_args()

    if args.hemisphere == 1:
        SRS_proj4 = '+proj=stere +lat_0=90 +lat_ts=70 +lon_0=-45 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs '
    elif args.hemisphere == -1:
        SRS_proj4 = '+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'

    # tilde expansion of file arguments
    GPS_file = os.path.expanduser(args.gps)
    GPS_dir = os.path.dirname(GPS_file)

    print("working on ATL06 dir {0}, GPS file {1}".format(
        args.atl06, GPS_file)) if args.verbose else None

    # output directory
    out_dir = os.path.join(GPS_dir, 'xovers')
    if not os.path.isdir(out_dir):
        os.mkdir(out_dir)

    out_file = 'vs_{0}.h5'.format(
        os.path.dirname(args.atl06).replace(os.sep, '_'))
    if os.path.isfile(os.path.join(out_dir, out_file)):
        print("found: {0}".format(os.path.join(
            out_dir, out_file))) if args.verbose else None

    ATL06_index = os.path.join(os.sep, 'Volumes', 'ice2', 'ben', 'scf',
                               'AA_06', args.atl06, 'GeoIndex.h5')

    SRS_proj4 = '+proj=stere +lat_0=-90 +lat_ts=-71 +lon_0=0 +k=1 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'
    ATL06_field_dict = {
        None: [
            'delta_time', 'h_li', 'h_li_sigma', 'latitude', 'longitude',
            'segment_id', 'sigma_geo_h', 'atl06_quality_summary'
        ],
        'ground_track':
        ['x_atc', 'y_atc', 'seg_azimuth', 'sigma_geo_at', 'sigma_geo_xt'],
        'geophysical': ['dac'],
        'bias_correction': ['fpb_n_corr'],
        'fit_statistics': [
            'dh_fit_dx', 'dh_fit_dx_sigma', 'dh_fit_dy', 'h_mean',
            'h_rms_misfit', 'h_robust_sprd', 'n_fit_photons',
            'w_surface_window_final', 'snr_significance'
        ],
        'orbit_info': ['rgt', 'orbit_number'],
        'derived': ['BP', 'spot']
    }

    ATL06_fields = list()
    for key in ATL06_field_dict:
        ATL06_fields += ATL06_field_dict[key]

    # read GPS HDF5 file
    GPS_field_dict = {None: ['latitude', 'longitude', 'z']}
    GPS_full = pc.data().from_h5(GPS_file,
                                 field_dict=GPS_field_dict).get_xy(SRS_proj4)
    # run block median over GPS data
    if args.median:
        GPS_full = blockmedian_for_gps(GPS_full, 5)
    # construct search tree from GPS coordinates
    # pickle tree to save computational time for future runs
    if os.path.isfile(os.path.join(GPS_dir, 'tree.p')):
        tree = pickle.load(open(os.path.join(GPS_dir, 'tree.p'), 'rb'))
    else:
        tree = KDTree(np.c_[GPS_full.x, GPS_full.y])
        pickle.dump(tree, open(os.path.join(GPS_dir, 'tree.p'), 'wb'))

    # read 10 km ATL06 index
    D6_GI = pc.geoIndex(SRS_proj4=SRS_proj4).from_file(ATL06_index,
                                                       read_file=True)
    # Query the gps search tree to find intersecting ATL06 bins
    # search within radius equal to diagonal of bin with 1km buffer (12/sqrt(2))
    x_10km, y_10km = D6_GI.bins_as_array()
    D6ind, = np.nonzero(
        tree.query_radius(np.c_[x_10km, y_10km], 8485, count_only=True))
    # reduce ATL06 bins to valid
    D6_GI = D6_GI.copy_subset(xyBin=[x_10km[D6ind], y_10km[D6ind]])

    out_fields = [
        'segment_id', 'x', 'y', 'BP', 'h_li', 'h_li_sigma',
        'atl06_quality_summary', 'dac', 'rgt', 'orbit_number', 'spot',
        'dh_fit_dx', 'N_50m', 'N_seg', 'h_gps_seg', 'dh_gps_dx', 'dh_gps_dy',
        'h_robust_sprd', 'snr_significance', 'h_gps_50m', 'sigma_gps_50m',
        'sigma_seg', 'dz_50m', 'E_seg', 'RDE_seg', 'hbar_20m', 'RDE_50m',
        't_seg', 'y_atc', 'x_seg_mean', 'y_seg_mean'
    ]
    out_template = {f: np.NaN for f in out_fields}
    out = list()

    for bin_name in sorted(D6_GI.keys()):
        print(bin_name) if args.verbose else None
        bin_xy = [int(coord) for coord in bin_name.split('_')]

        # query ATL06 for the current bin, and index it
        D6list = D6_GI.query_xy([[bin_xy[0]], [bin_xy[1]]],
                                get_data=True,
                                fields=ATL06_field_dict)
        if not isinstance(D6list, list):
            D6list = [D6list]
        D6sub = pc.ATL06.data().from_list(D6list).get_xy(SRS_proj4)
        D6sub.ravel_fields()
        # query the search tree to find points within query radius
        #D6xy = np.c_[(np.nanmean(D6sub.x, axis=1),np.nanmean(D6sub.y, axis=1))]
        D6sub.index(np.isfinite(D6sub.x) & np.isfinite(D6sub.h_li))
        D6xy = np.c_[D6sub.x, D6sub.y]
        query = tree.query_radius(D6xy, args.query)
        # indices of ATL06 points within bin
        D6ind, = np.nonzero([np.any(i) for i in query])
        # loop over queries in the ATL06 data
        for i_AT in D6ind:
            D6i = D6sub.copy_subset(np.array([i_AT]))
            # grab the gps bins around the ATL06 bin
            GPS = GPS_full.copy_subset(query[i_AT], by_row=True)
            GPS.index(
                np.isfinite(GPS.z) & np.isfinite(GPS.latitude)
                & np.isfinite(GPS.longitude))
            # create output dictionary of ATL06 and GPS comparison
            this_out = compare_seg_with_gps(D6i, GPS, out_template)
            if this_out is not None:
                out.append(this_out)

    D = dict()
    with h5py.File(os.path.join(out_dir, out_file), 'w') as h5f:
        for field in out[0].keys():
            D[field] = np.array([ii[field] for ii in out])
            print(field, D[field].dtype) if args.verbose else None
            h5f.create_dataset(field, data=D[field])
コード例 #9
0
def read_ATL11(xy0, Wxy, index_file, SRS_proj4, sigma_geo=6.5):
    '''
    read ATL11 data from an index file

    inputs:
        xy0 : 2-element iterable specifying the domain center
        Wxy : Width of the domain
        index_file : file made by pointCollection.geoindex pointing at ATL11 data
        SRS_proj4: projection information for the data

    output:
        D: data structure
        file_list: list of ATL11 files read
    '''

    field_dict_11={None:['latitude','longitude','delta_time',\
                        'h_corr','h_corr_sigma','h_corr_sigma_systematic', 'ref_pt'],\
                        '__calc_internal__' : ['rgt'],
                        'cycle_stats' : {'tide_ocean','dac'},
                        'ref_surf':['e_slope','n_slope', 'x_atc', 'fit_quality']}
    try:
        # catch empty data
        D11_list=pc.geoIndex().from_file(index_file).query_xy_box(
            xy0[0]+np.array([-Wxy/2, Wxy/2]), \
            xy0[1]+np.array([-Wxy/2, Wxy/2]), fields=field_dict_11)
    except ValueError:
        return None, []
    if D11_list is None:
        return None, []
    D_list=[]
    XO_list=[]
    file_list= [ Di.filename for Di in D11_list ]
    for D11 in D11_list:
        D11.get_xy(proj4_string=SRS_proj4)
        # select the subset of the data within the domain
        D11.index((np.abs(D11.x[:,0]-xy0[0]) <= Wxy/2) &\
                 (np.abs(D11.y[:,0]-xy0[1]) <= Wxy/2))
        if D11.size==0:
            continue
        sigma_corr=np.sqrt((sigma_geo*np.abs(np.median(D11.n_slope)))**2+\
                           (sigma_geo*np.abs(np.median(D11.e_slope)))**2+0.03**2)
        # fix for early ATL11 versions that had some zero error estimates.
        bad=np.any(D11.h_corr_sigma==0, axis=1)
        D11.h_corr_sigma[bad,:]=np.NaN

        n_cycles=np.sum(np.isfinite(D11.h_corr), axis=1)
        n_cycles=np.reshape(n_cycles, (D11.shape[0],1))
        n_cycles=np.tile(n_cycles, [1, D11.shape[1]])

        D_list += [pc.data().from_dict({'z':D11.h_corr,
           'sigma_corr':sigma_corr+np.zeros_like(D11.h_corr),
           'sigma':D11.h_corr_sigma,
           'x':D11.x,
           'y':D11.y,
           'x_atc': D11.x_atc,
           'latitude':D11.latitude,
           'longitude':D11.longitude,
           'rgt':D11.rgt,
           'pair':np.zeros_like(D11.x)+D11.pair,
           'ref_pt':D11.ref_pt,
           'cycle':D11.cycle_number,
           'n_cycles': n_cycles,
           'fit_quality': D11.fit_quality,
           'tide_ocean': D11.tide_ocean,
           'dac': D11.dac,
           'delta_time': D11.delta_time,
           'time':D11.delta_time/24/3600/365.25+2018,
           'n_slope':D11.n_slope,
           'e_slope':D11.e_slope,
           'time':D11.delta_time/24/3600/365.25+2018,
           'along_track':np.ones_like(D11.x, dtype=bool)})]

        if len(D11.ref_pt) == 0:
            continue
        # N.B.  D11 is getting indexed in this step, and it's leading to the warning in
        # line 76.  Can fix by making crossover_data.from_h5 copy D11 on input
        D_x = pc.ATL11.crossover_data().from_h5(D11.filename, pair=D11.pair, D_at=D11)
        if D_x is None:
            continue
        # fit_quality D11 applies to all cycles, but is mapped only to some specific
        # cycle.
        temp=np.nanmax(D_x.fit_quality[:,:,0], axis=1)
        for cycle in range(D_x.shape[1]):
            D_x.fit_quality[:,cycle,1]=temp

        D_x.get_xy(proj4_string=SRS_proj4)

        good=np.isfinite(D_x.h_corr)[:,0:2,1].ravel()
        for field in D_x.fields:
            # Pull out only cycles 1 and 2
            temp=getattr(D_x, field)[:,0:2,1]
            setattr(D_x, field, temp.ravel()[good])
        # select the subset of the data within the domain
        D_x.index((np.abs(D_x.x-xy0[0]) <= Wxy/2) &\
                 (np.abs(D_x.y-xy0[1]) <= Wxy/2))
        if D_x.size==0:
            continue

        #N.B.  Check whether n_slope and e_slope are set correctly.
        zero = np.zeros_like(D_x.h_corr)
        blank = zero+np.NaN
        XO_list += [pc.data().from_dict({'z':D_x.h_corr,
            'sigma':D_x.h_corr_sigma,
            'sigma_corr': sigma_corr+np.zeros_like(D_x.h_corr),
            'x':D_x.x,
            'y':D_x.y,
            'latitude':D_x.latitude,
            'longitude':D_x.longitude,
            'rgt':D_x.rgt,
            'pair':np.zeros_like(D_x.x)+D_x.pair,
            'ref_pt':blank,
            'cycle':D_x.cycle_number,
            'n_cycles':blank,
            'fit_quality':D_x.fit_quality,
            'tide_ocean':D_x.tide_ocean,
            'dac':D_x.dac,
            'delta_time':D_x.delta_time,
            'n_slope':D_x.n_slope,
            'e_slope':D_x.e_slope,
            'time':D_x.delta_time/24/3600/365.25+2018,
            'along_track':np.zeros_like(D_x.x, dtype=bool)})]
    try:
        D=pc.data().from_list(D_list+XO_list).ravel_fields()
        D.index(np.isfinite(D.z))
        
    except ValueError:
        # catch empty data
        return None, file_list

    D.index(( D.fit_quality ==0 ) | ( D.fit_quality == 2 ))

    return D, file_list
コード例 #10
0
def read_ICESat2(xy0, W, gI_file, sensor=2, SRS_proj4=None, tiled=True, seg_diff_tol=2, blockmedian_scale=None, cplx_accept_threshold=0.):
    field_dict={None:['delta_time','h_li','h_li_sigma','latitude','longitude','atl06_quality_summary','segment_id','sigma_geo_h'], 
                'fit_statistics':['dh_fit_dx', 'dh_fit_dy', 'n_fit_photons','w_surface_window_final','snr_significance'],
                'geophysical':['tide_ocean'],
                'ground_track':['x_atc'],
                'orbit_info':['rgt','cycle_number'],
                'derived':['valid', 'BP','LR']}
    if tiled:
        fields=[]
        for key in field_dict:
            fields += field_dict[key]
        fields += ['x','y']
    else:
        fields=field_dict
    px, py=np.meshgrid(np.arange(xy0[0]-W['x']/2, xy0[0]+W['x']/2+1.e4, 1.e4),np.arange(xy0[1]-W['y']/2, xy0[1]+W['y']/2+1.e4, 1.e4) )  
    D0=pc.geoIndex().from_file(gI_file).query_xy((px.ravel(), py.ravel()), fields=fields)
    if D0 is None or len(D0)==0:
        return [None]
    # check the D6 filenames against the blacklist of bad files
    if tiled:
        D0=pc.reconstruct_ATL06_tracks(pc.data(fields=fields).from_list(D0))
    hold_list = read_hold_files()
    hold_list = {(item[0], item[1]) for item in hold_list}
    D1=list()
    for ind, D in enumerate(D0):
        if D.size<2:
            continue
        delete_file = (D.cycle_number[0], D.rgt[0]) in hold_list
        if delete_file==0:
            D1.append(D)

    # D1 is now a filtered version of D0
    D_pt=pc.data().from_list(D1)
    bin_xy=1.e4*np.round((D_pt.x+1j*D_pt.y)/1.e4)
    cplx_bins=[]
    for xy0 in np.unique(bin_xy):
        ii=np.flatnonzero(bin_xy==xy0)
        if np.mean(D_pt.atl06_quality_summary[ii]==0) < cplx_accept_threshold:
            cplx_bins+=[xy0]
    cplx_bins=np.array(cplx_bins)
    sigma_corr=np.zeros(len(D1))
    for ind, D in enumerate(D1):


        valid=segDifferenceFilter(D, setValid=False, toNaN=False, tol=seg_diff_tol)
        
        D.assign({'quality':D.atl06_quality_summary})
        
        cplx_data=np.in1d(1.e4*np.round((D.x+1j*D.y)/1.e4), cplx_bins)
        if np.any(cplx_data):
            D.quality[cplx_data] = (D.snr_significance[cplx_data] > 0.02) | \
                (D.n_fit_photons[cplx_data]/D.w_surface_window_final[cplx_data] < 5)
            valid[cplx_data] |= segDifferenceFilter(D, setValid=False, toNaN=False, tol=2*seg_diff_tol)[cplx_data]
        
        D.h_li[valid==0] = np.NaN
        D.h_li[D.quality==1] = np.NaN
        if blockmedian_scale is not None:
            D.blockmedian(blockmedian_scale, field='h_li')
              
        # rename the h_li field to 'z', and set time to the year
        # note that the extra half day is needed because 2018 is in between two leap years
        D.assign({'z': D.h_li, 'time':D.delta_time/24/3600/365.25+2018+0.5/365.25,
                  'sigma':D.h_li_sigma,'cycle':D.cycle_number})
        # thin to 40 m
        D.index(np.mod(D.segment_id, 2)==0)  
        if 'x' not in D.fields:
            D.get_xy(SRS_proj4)
        #D.index(np.isfinite(D.h_li), list_of_fields=['x','y','z','time','year','sigma','sigma_corr','rgt','cycle'])
                
        D.assign({'sensor':np.zeros_like(D.x)+sensor})
        if np.any(np.isfinite(D.dh_fit_dy)):
            dhdy_med=np.nanmedian(D.dh_fit_dy)
            dhdx_med=np.nanmedian(D.dh_fit_dx)
        else:
            dhdy_med=np.NaN
            dhdx_med=np.NaN
        sigma_geo_x=8
        sigma_corr[ind]=np.sqrt(0.03**2+sigma_geo_x**2*(dhdx_med**2+dhdy_med**2))
        if ~np.isfinite(sigma_corr[ind]):
            sigma_corr[ind]=0.1
        D.assign({'sigma_corr':np.zeros_like(D.h_li)+sigma_corr[ind]})
        D1[ind]=D.copy_subset(np.isfinite(D.h_li), datasets=['x','y','z','time',\
                 'delta_time','sigma','sigma_corr','rgt','cycle',\
                     'sensor', 'BP','LR'])
    return D1
コード例 #11
0
ファイル: ATL11_to_ATL15.py プロジェクト: whigg/surfaceChange
def read_ATL11(xy0, Wxy, index_file, SRS_proj4):
    field_dict_11={None:['latitude','longitude','delta_time',\
                        'h_corr','h_corr_sigma','h_corr_sigma_systematic', 'ref_pt'],\
                        '__calc_internal__' : ['rgt'],
                        'cycle_stats' : {'tide_ocean','dac'},
                        'ref_surf':['e_slope','n_slope', 'x_atc', 'fit_quality']}
    D11_list=pc.geoIndex().from_file(index_file).query_xy_box(xy0[0]+\
                        np.array([-Wxy/2, Wxy/2]), xy0[1]+np.array([-Wxy/2, Wxy/2]), fields=field_dict_11)
    D_list = []
    XO_list = []
    for D11 in D11_list:
        D11.get_xy(proj4_string=SRS_proj4)
        sigma_corr=np.sqrt((7.5*np.abs(np.median(D11.n_slope)))**2+\
                           (7.5*np.abs(np.median(D11.e_slope)))**2+0.03**2)
        # fix for early ATL11 versions that had some zero error estimates.
        bad = np.any(D11.h_corr_sigma == 0, axis=1)
        D11.h_corr_sigma[bad, :] = np.NaN

        n_cycles = np.sum(np.isfinite(D11.h_corr), axis=1)
        n_cycles = np.reshape(n_cycles, (D11.shape[0], 1))
        n_cycles = np.tile(n_cycles, [1, D11.shape[1]])

        D_list += [
            pc.data().from_dict({
                'z':
                D11.h_corr,
                'sigma_corr':
                sigma_corr + np.zeros_like(D11.h_corr),
                'sigma':
                D11.h_corr_sigma,
                'x':
                D11.x,
                'y':
                D11.y,
                'x_atc':
                D11.x_atc,
                'latitude':
                D11.latitude,
                'longitude':
                D11.longitude,
                'rgt':
                D11.rgt,
                'ref_pt':
                D11.ref_pt,
                'cycle':
                D11.cycle_number,
                'n_cycles':
                n_cycles,
                'fit_quality':
                D11.fit_quality,
                'tide_ocean':
                D11.tide_ocean,
                'dac':
                D11.dac,
                'delta_time':
                D11.delta_time,
                'time':
                D11.delta_time / 24 / 3600 / 365.25 + 2018
            })
        ]

        D_x = pc.ATL11.crossover_data().from_h5(D11.filename,
                                                pair=D11.pair,
                                                D_at=D11)

        # fit_quality D11 applies to all cycles, but is mapped only to some specific
        # cycle.
        temp = np.nanmax(D_x.fit_quality[:, :, 0], axis=1)
        for cycle in range(D_x.shape[1]):
            D_x.fit_quality[:, cycle, 1] = temp

        D_x.get_xy(proj4_string=SRS_proj4)

        good = np.isfinite(D_x.h_corr)[:, 0:2, 1].ravel()
        for field in D_x.fields:
            # Pull out only cycles 1 and 2
            temp = getattr(D_x, field)[:, 0:2, 1]
            setattr(D_x, field, temp.ravel()[good])

        zero = np.zeros_like(D_x.h_corr)
        blank = zero + np.NaN
        XO_list += [
            pc.data().from_dict({
                'z':
                D_x.h_corr,
                'sigma':
                D_x.h_corr_sigma,
                'sigma_corr':
                sigma_corr + np.zeros_like(D_x.h_corr),
                'x':
                D_x.x,
                'y':
                D_x.y,
                'latitude':
                D_x.latitude,
                'longitude':
                D_x.longitude,
                'rgt':
                D_x.rgt,
                'ref_pt':
                blank,
                'cycle':
                D_x.cycle_number,
                'n_cycles':
                blank,
                'fit_quality':
                D_x.fit_quality,
                'tide_ocean':
                D_x.tide_ocean,
                'dac':
                D_x.dac,
                'delta_time':
                D_x.delta_time,
                'time':
                D_x.delta_time / 24 / 3600 / 365.25 + 2018
            })
        ]

    D = pc.data().from_list(D_list + XO_list).ravel_fields()
    print({field: getattr(D, field).shape for field in D.fields})
    D.index((D.fit_quality == 0) | (D.fit_quality == 2))
    return D