def test_OA_request(): short_name = 'ATL06' date_range = ['2018-10-14', '2018-10-16'] bbox = [-121, 48, -120, 49] region = ipd.Icesat2Data(short_name, bbox, date_range) region.visualize_spatial_extent(elevplot=True) assert region_a.OA_data.empty == False
from icepyx import icesat2data as ipd short_name = 'ATL06' date_range = ['2018-10-14', '2020-04-04'] # bounding box for DEM bbox = [-75, -76.5, -74.5, -76] #[-76.5, -79.5, -76, -79] #[-122, 47, -120, 48] #[-121.87893182746436, 48.70258466806347, -121.79542720391288, 48.7704252035505] #bbox = [(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)] #print (len(bbox)) #reg_a = ipd.Icesat2Data('ATL06', [-55, 68, -48, 71], ['2019-02-20', '2019-02-28']) #print (reg_a.file_meta()) region = ipd.Icesat2Data(short_name, bbox, date_range) print('product: ', region.dataset) print('dates: ', region.dates) print('start time: ', region.start_time) print('end time: ', region.end_time) print('version: ', region.dataset_version) print('extent: ', region.spatial_extent) # search for available granules and provide basic summary info about them print('\nDATA:') print('\n'.join([str(item) for item in region.avail_granules().items()])) # visualize data extents region.visualize_spatial_extent(elevplot=True)
def read_atl03(lat, lon, date_range, delta_m, path="new_ATL03", extent=None): """ Read a ATL03 file based and retieve individual photons in a window around a desired latitide, longitud and a range of dates. """ # Spatial extend if extent == None: window_lat = delta_lat(lat, lon, delta_m) window_lon = delta_lon(lat, lon, delta_m) spatial_extent = [ lon - window_lon, lat - window_lat, lon + window_lon, lat + window_lat ] else: spatial_extent = extent spatial_extent = [ float(x) for x in spatial_extent ] # This line has to be remove after solving Issue 82 in Icepyx # Retreiving the data region_a = ipd.Icesat2Data('ATL03', spatial_extent, date_range) region_a.avail_granules(ids=True) region_a.earthdata_login(user, earthdata_emails[user]) region_a.order_vars.append(var_list=['lat_ph', "lon_ph", "h_ph"]) region_a.subsetparams(Coverage=region_a.order_vars.wanted) region_a.order_granules() region_a.download_granules(path) flist = file_in_dir(path) assert len( flist ) > 0, "There are not available granules for these parameters. Check that the h5 files were download in path" dataframes = pd.DataFrame( columns=["h_ph", "lon_ph", "lat_ph", "ground_track"]) for file in flist: fname = path + "/" + file with h5py.File(fname, 'r') as fi: for my_gt in filter(fi.keys(), ["gt"]): lat_ph = fi[my_gt]['heights']["lat_ph"][:] lon_ph = fi[my_gt]['heights']["lon_ph"][:] h_ph = fi[my_gt]['heights']["h_ph"][:] df = pd.DataFrame.from_dict({ "h_ph": h_ph, "lon_ph": lon_ph, "lat_ph": lat_ph, "ground_track": [my_gt] * len(h_ph) }) if extent == None: df = df[(df["lat_ph"] < lat + window_lat) & (df["lon_ph"] < lon + window_lon) & (df["lat_ph"] > lat - window_lat) & (df["lon_ph"] > lon - window_lon)] else: df = df[(df["lat_ph"] < extent[3]) & (df["lon_ph"] < extent[2]) & (df["lat_ph"] > extent[1]) & (df["lon_ph"] > extent[0])] dataframes = dataframes.append(df, ignore_index=True) return dataframes
def main(): #-- Read the system arguments listed after the program long_options = [ 'HELP', 'DIR=', 'EXTENT=', 'DATE=', 'USER='******'EMAIL=', 'noDownload' ] optlist, arglist = getopt.getopt(sys.argv[1:], 'HD:E:T:U:E:N', long_options) #-- Set default settings ddir = '/home/jovyan/data' short_name = 'ATL06' spatial_extent = [31.5, -70.56, 33.73, -69.29] date_range = ['2020-03-30', '2020-04-1'] user = '' email = '' download = True #-- read commandline inputs for opt, arg in optlist: if opt in ("-H", "--HELP"): run_help() sys.exit('Done.') elif opt in ("-D", "--DIR"): ddir = os.path.expanduser(arg) elif opt in ("-E", "--EXTENT"): spatial_extent = [ float(i) for i in arg.replace('[', '').replace(']', '').split(',') ] elif opt in ("-T", "--DATE"): date_range = arg.replace('[', '').replace(']', '').replace("'", "").split(',') elif opt in ("-U", "--USER"): user = arg elif opt in ("-E", "--EMAIL"): email = arg elif opt in ("N", "--noDownload"): download = False if download: #-- login to earth data and get data region_a = ipd.Icesat2Data(short_name, spatial_extent, date_range) region_a.earthdata_login(user, email) #-- put data order region_a.order_vars.append(var_list=['count']) #-- download data region_a.download_granules(ddir) #-- Get list of files file_list = os.listdir(ddir) files = [f for f in file_list if f.endswith('.h5')] #-- Loop through files, read specified file, and save histogram as numpy array for f in files: print(f) #-- read specified file FILE_NAME = os.path.join(ddir, f) fid = h5py.File(FILE_NAME, mode='r') #-- determine which beam is the strong beam (left or right) if fid['gt1l'].attrs['atlas_beam_type'] == 'strong': strong_id = 'l' else: strong_id = 'r' #-- loop all three beam pairs and save all three for i in range(1, 4): #-- read count count = fid['gt%i%s/residual_histogram/count' % (i, strong_id)][:] lat_mean = fid['gt%i%s/residual_histogram/lat_mean' % (i, strong_id)][:] lon_mean = fid['gt%i%s/residual_histogram/lon_mean' % (i, strong_id)][:] h_li = fid['gt%i%s/land_ice_segments/h_li' % (i, strong_id)][:] h_lat = fid['gt%i%s/land_ice_segments/latitude' % (i, strong_id)][:] h_lon = fid['gt%i%s/land_ice_segments/longitude' % (i, strong_id)][:] path_hist = os.path.join(ddir, 'hist') if not os.path.exists(path_hist): os.makedirs(path_hist) path_lon = os.path.join(ddir, 'lon') if not os.path.exists(path_lon): os.makedirs(path_lon) path_lat = os.path.join(ddir, 'lat') if not os.path.exists(path_lat): os.makedirs(path_lat) #-- save numpy arrays np.save( os.path.join( path_hist, f.replace('.h5', '_hist_gt%i%s.npy' % (i, strong_id))), count) np.save( os.path.join( path_lat, f.replace('.h5', '_lat_mean_gt%i%s.npy' % (i, strong_id))), lat_mean) np.save( os.path.join( path_lon, f.replace('.h5', '_lon_mean_gt%i%s.npy' % (i, strong_id))), lon_mean) np.save( os.path.join( ddir, f.replace('.h5', '_h_li_gt%i%s.npy' % (i, strong_id))), h_li) np.save( os.path.join( ddir, f.replace('.h5', '_h_lat_gt%i%s.npy' % (i, strong_id))), h_lat) np.save( os.path.join( ddir, f.replace('.h5', '_h_lon_gt%i%s.npy' % (i, strong_id))), h_lon) #-- close hdf5 file fid.close()