def reread_data_from_files(xy0, W, thedir, template='E%d_N%d.h5'): """ Read data from a set of output (fit) files Inputs: xy0: 2-tuple box center W: box width thedir: directory to search template: template for file format For a directory of files, find the files overlapping the requested box, and select the data closest to the box center """ d_i, d_j = np.meshgrid([-1., 0, 1.], [-1., 0., 1.]) data_list = list() #plt.figure() index_list = [] for ii, jj in zip(d_i.ravel(), d_j.ravel()): this_xy = [xy0[0] + W / 2 * ii, xy0[1] + W / 2 * jj] this_file = thedir + '/' + template % (this_xy[0] / 1000, this_xy[1] / 1000) if os.path.isfile(this_file): with h5py.File(this_file, 'r') as h5f: this_data = dict() for key in h5f['data'].keys(): this_data[key] = np.array(h5f['data'][key]) this_data = point_data( list_of_fields=this_data.keys()).from_dict(this_data) these=(np.abs(this_data.x-xy0[0])<W/2) & \ (np.abs(this_data.y-xy0[1])<W/2) this_data.index(these) # & (this_data.three_sigma_edit)) data_list.append(this_data) this_index = {} Lb = 1.e4 this_index['xyb'] = np.round( (this_data.x + 1j * this_data.y - (1 + 1j) * Lb / 2) / Lb) * Lb this_index['xyb0'] = np.unique(this_index['xyb']) this_index['dist0'] = np.abs(this_index['xyb0'] - (this_xy[0] + 1j * this_xy[1])) this_index['N'] = len(data_list) - 1 + np.zeros_like( this_index['xyb0'], dtype=int) index_list.append(this_index) index = { key: np.concatenate([item[key] for item in index_list]) for key in ['xyb0', 'dist0', 'N'] } bins = np.unique(index['xyb0']) data_sub_list = [] for this_bin in bins: these = np.where(index['xyb0'] == this_bin)[0] this = index['N'][these[np.argmin(index['dist0'][these])]] data_sub_list.append( data_list[this].subset(index_list[this]['xyb'] == this_bin)) fields = data_list[0].list_of_fields return point_data(list_of_fields=fields).from_list(data_sub_list)
def __init__(self, D, map_file, index_file, datatype): self.files = [] self.datatype = datatype srs_proj4 = geo_index().from_file(index_file).attrs['SRS_proj4'] if datatype == 'ATL06': for ii, Di in enumerate(D): Di.get_xy(srs_proj4) Di.assign({'file_num': np.zeros_like(Di.x) + ii}) self.files += [Di.filename] self.D_all = point_data().from_list(D) elif datatype == 'ATL11': D_list = [] for ii, Di in enumerate(D): self.files += [Di.filename] Di.get_xy(srs_proj4) D_list.append(point_data().from_dict({ 'x': Di.x, 'y': Di.y, 'file_num': np.zeros_like(Di.x) + ii, 'h_corr': Di.corrected_h.h_corr[:, -1].ravel() })) self.D_all = point_data().from_list(D_list) self.D = D XR = [np.nanmin(self.D_all.x), np.nanmax(self.D_all.x)] YR = [np.nanmin(self.D_all.y), np.nanmax(self.D_all.y)] self.fig = plt.figure() if map_file is not None: mapData().from_geotif(map_file, bounds=[XR, YR]).show(cmap='gray') #for Di in D: # plt.plot(Di.x, Di.y,'.') if self.datatype == 'ATL06': plt.scatter(self.D_all.x, self.D_all.y, 6, c=self.D_all.r_eff, linewidth=0, vmax=1, vmin=0) elif self.datatype == 'ATL11': plt.scatter(self.D_all.x, self.D_all.y, 6, c=self.D_all.h_corr) plt.colorbar() self.cid = self.fig.canvas.mpl_connect('button_press_event', self) plt.show()
def read_xovers(xover_dir): tiles = glob.glob(xover_dir + '/*.h5') with h5py.File(tiles[0], 'r') as h5f: fields = [key for key in h5f['D0'].keys()] D = [] X = [] for tile in glob.glob(xover_dir + '/*.h5'): D.append([ point_data(list_of_fields=fields).from_file( tile, field_dict={gr: fields}) for gr in ['D0', 'D1'] ]) with h5py.open(tile, 'r') as h5f: X.append( point_data(list_of_fields=['x', 'y']).from_file( tile, field_dict={None: ['x', 'y']})) return D, X
def read_OIB_data(xy0, W, reread_file=None, hemisphere=-1, blockmedian_scale=100, SRS_proj4=None): """ Read indexed files for standard datasets """ sensor_dict = {1: 'ICESat1', 2: 'ICESat2'} if reread_file is None: GI = geo_index().from_file(GI_files(hemisphere)['ICESat2'], read_file=False) D = read_ICESat2(xy0, W, GI, SRS_proj4=SRS_proj4) GI = None GI = geo_index().from_file(GI_files(hemisphere)['ICESat1'], read_file=False) D += read_ICESat(xy0, W, GI) GI = None data = point_data(list_of_fields=[ 'x', 'y', 'z', 'time', 'sigma', 'sigma_corr', 'sensor', 'cycle', 'rgt', 'BP', 'LR' ]).from_list(D) data.assign({'day': np.floor(data.time)}) data.time = matlabToYear(data.time) for field in data.list_of_fields: setattr(data, field, getattr(data, field).astype(np.float64)) data.index( np.isfinite(data.z) & np.isfinite(data.sigma_corr) & np.isfinite(data.sigma)) else: data = dict() with h5py.File(reread_file, 'r') as h5f: for key in h5f['data'].keys(): data[key] = np.array(h5f['data'][key]) data = point_data(list_of_fields=data.keys()).from_dict(data) return data, sensor_dict
def read_fit(filename, fields=['data', 'z0', 'dz']): S = {} if 'data' in fields: S['data'] = point_data().from_file(filename, group='/data') if 'z0' in fields: S['z0'] = mapData().from_h5(filename, group='/z0/', field_mapping={'z': 'z0'}) if 'z0' in fields: S['dz'] = mapData().from_h5(filename, group='/dz/', field_mapping={'z': 'dz'}) S['dz'].z = S['dz'].z[:, :, 0] if 'bias' in fields: S['bias'] = {} with h5py.File(filename, 'r') as h5f: for key in h5f['bias'].keys(): S['bias'][key] = np.array(h5f['bias'][key]) return S
def write_xovers(xover_list, xy0, out_dir): tile_name = '/E%d_N%d.h5' % (xy0[0] / 1.e3, xy0[1] / 1.e3) out_file = out_dir + '/' + tile_name if os.path.isfile(out_file): os.remove(out_file) with h5py.File(out_file, 'w') as h5f: for key_D in ['data_0', 'data_1']: group = '/' + key_D h5f.create_group(group) key_L = key_D.replace('data_', 'L') L = np.c_[[item[key_L] for item in xover_list]] Dtemp = [item[key_D] for item in xover_list] Dtemp = point_data( list_of_fields=Dtemp[0].list_of_fields).from_list(Dtemp) shape = [np.int(Dtemp.size / 2), 2] Dtemp.shape = shape for key in Dtemp.list_of_fields: temp = getattr(Dtemp, key) temp.shape = shape h5f.create_dataset(group + '/' + key, data=temp) h5f.create_dataset(group + '/W', data=np.c_[1 - L, L]) xy = np.c_[[item['xyC'] for item in xover_list]] h5f.create_dataset('/x', data=xy[:, 0]) h5f.create_dataset('/y', data=xy[:, 1]) h5f.create_dataset('/slope_x', data=np.array( [item['slope_x'] for item in xover_list])) h5f.create_dataset('/slope_y', data=np.array( [item['slope_y'] for item in xover_list])) h5f.create_dataset('/grounded', data=np.array( [item['grounded'] for item in xover_list])) return #xover_list
def read_ICESat2(xy0, W, gI_file, sensor=2, SRS_proj4=None, tiled=True, seg_diff_tol=2): field_dict = { None: [ 'delta_time', 'h_li', 'h_li_sigma', 'latitude', 'longitude', 'atl06_quality_summary', 'segment_id', 'sigma_geo_h' ], 'fit_statistics': ['dh_fit_dx'], 'ground_track': ['x_atc'], 'geophysical': ['dac'], 'orbit_info': ['rgt', 'cycle_number'], 'derived': ['valid', 'matlab_time', 'BP', 'LR'] } if tiled: fields = [] for key in field_dict: fields += field_dict[key] fields += ['x', 'y'] else: fields = field_dict px, py = np.meshgrid( np.arange(xy0[0] - W['x'] / 2, xy0[0] + W['x'] / 2 + 1.e4, 1.e4), np.arange(xy0[1] - W['y'] / 2, xy0[1] + W['y'] / 2 + 1.e4, 1.e4)) D0 = geo_index().from_file(gI_file).query_xy((px.ravel(), py.ravel()), fields=fields) if D0 is None or len(D0) == 0: return [None] # check the D6 filenames against the blacklist of bad files if tiled: D0 = reconstruct_tracks( point_data(list_of_fields=fields).from_list(D0)) blacklist = None D1 = list() for ind, D in enumerate(D0): if D.size < 2: continue delete_file, blacklist = check_rgt_cycle_blacklist( rgt_cycle=[D.rgt[0], D.cycle_number[0]], blacklist=blacklist) if delete_file == 0: D1.append(D) for ind, D in enumerate(D1): try: segDifferenceFilter(D, setValid=False, toNaN=True, tol=seg_diff_tol) except TypeError as e: print("HERE") print(e) D.h_li[D.atl06_quality_summary == 1] = np.NaN # rename the h_li field to 'z', and rename the 'matlab_time' to 'day' D.assign({ 'z': D.h_li + D.dac, 'time': D.matlab_time, 'sigma': D.h_li_sigma, 'sigma_corr': D.sigma_geo_h, 'cycle': D.cycle_number }) D.assign({'year': D.delta_time / 24. / 3600. / 365.25 + 2018}) # thin to 40 m D.index(np.mod(D.segment_id, 2) == 0) if 'x' not in D.list_of_fields: D.get_xy(SRS_proj4) #D.index(np.isfinite(D.h_li), list_of_fields=['x','y','z','time','year','sigma','sigma_corr','rgt','cycle']) D.assign({'sensor': np.zeros_like(D.x) + sensor}) D1[ind] = D.subset(np.isfinite(D.h_li), datasets=[ 'x', 'y', 'z', 'time', 'delta_time', 'year', 'sigma', 'sigma_corr', 'rgt', 'cycle', 'sensor', 'BP', 'LR' ]) return D1
def get_xovers(self): rgt = self.attrs['ReferenceGroundTrack'] pair = self.attrs['pair_num'] xo = {'ref': {}, 'crossing': {}, 'both': {}} for field in [ 'time', 'h', 'h_sigma', 'ref_pt', 'rgt', 'PT', 'atl06_quality_summary', 'latitude', 'longitude', 'cycle' ]: xo['ref'][field] = [] xo['crossing'][field] = [] xo['crossing']['RSSz'] = [] for i1, ref_pt in enumerate(self.crossing_track_data.ref_pt): i0 = np.where(self.corrected_h.ref_pt == ref_pt)[0][0] for ic in range(self.corrected_h.delta_time.shape[1]): if not np.isfinite(self.corrected_h.h_corr[i0, ic]): continue xo['ref']['latitude'] += [self.corrected_h.latitude[i0]] xo['ref']['longitude'] += [self.corrected_h.longitude[i0]] xo['ref']['time'] += [self.corrected_h.delta_time[i0, ic]] xo['ref']['h'] += [self.corrected_h.h_corr[i0, ic]] xo['ref']['h_sigma'] += [self.corrected_h.h_corr_sigma[i0, ic]] xo['ref']['ref_pt'] += [self.corrected_h.ref_pt[i0]] xo['ref']['rgt'] += [rgt] xo['ref']['PT'] += [pair] xo['ref']['atl06_quality_summary'] += [ self.cycle_stats.ATL06_summary_zero_count[i0, ic] == 0 ] xo['ref']['cycle'] += [ic + self.cycles[0]] xo['crossing']['time'] += [ self.crossing_track_data.delta_time[i1] ] xo['crossing']['h'] += [self.crossing_track_data.h_corr[i1]] xo['crossing']['h_sigma'] += [ self.crossing_track_data.h_corr_sigma[i1] ] xo['crossing']['ref_pt'] += [ self.crossing_track_data.ref_pt[i1] ] xo['crossing']['rgt'] += [self.crossing_track_data.rgt[i1]] xo['crossing']['PT'] += [ self.crossing_track_data.spot_crossing[i1] ] xo['crossing']['atl06_quality_summary'] += [ self.crossing_track_data.atl06_quality_summary[i1] ] xo['crossing']['RSSz'] += [ self.crossing_track_data.along_track_rss[i1] ] xo['crossing']['cycle'] += [ self.crossing_track_data.cycle_number[i1] ] xo['crossing']['latitude'] = xo['ref']['latitude'] xo['crossing']['longitude'] = xo['ref']['longitude'] for field in xo['crossing']: xo['crossing'][field] = np.array(xo['crossing'][field]) for field in xo['ref']: xo['ref'][field] = np.array(xo['ref'][field]) ref = point_data().from_dict(xo['ref']) crossing = point_data().from_dict(xo['crossing']) delta = {} delta['h'] = crossing.h - ref.h delta['time'] = crossing.time - ref.time delta['sigma_h'] = np.sqrt(crossing.h_sigma**2 + ref.h_sigma**2) delta['latitude'] = ref.latitude.copy() delta['longitude'] = ref.longitude.copy() delta = point_data().from_dict(delta) return ref, crossing, delta
import matplotlib.pyplot as plt x0 = np.arange(0, 13, 2) y0 = 0.1 * (x0 * 2)**2 - 2 x1 = np.arange(0.5, 5.2, 1.) y1 = -(x1**2) + x1 + 5 plt.figure() plt.plot(x0, y0) plt.plot(x1, y1) D = [ point_data().from_dict({ 'x': x0, 'y': y0, 'time': np.arange(len(x0)) }), point_data().from_dict({ 'x': x1, 'y': y1, 'time': np.arange(len(x1)) }) ] ii, f = cross_paths(D) print(x0[ii[0]:ii[0] + 2].dot([1 - f[0], f[0]]) - x1[ii[1]:ii[1] + 2].dot([1 - f[1], f[1]])) print(y0[ii[0]:ii[0] + 2].dot([1 - f[0], f[0]]) - y1[ii[1]:ii[1] + 2].dot([1 - f[1], f[1]]))