Example #1
0
def test_properties():
    reg_a = ipx.Query(
        "ATL06",
        [-64, 66, -55, 72],
        ["2019-02-22", "2019-02-28"],
        start_time="03:30:00",
        end_time="21:30:00",
        version="2",
    )
    obs_list = [
        reg_a.dataset,
        reg_a.dates,
        reg_a.start_time,
        reg_a.end_time,
        reg_a.dataset_version,
        reg_a.spatial_extent,
    ]
    exp_list = [
        "ATL06",
        ["2019-02-22", "2019-02-28"],
        "03:30:00",
        "21:30:00",
        "002",
        ["bounding box", [-64, 66, -55, 72]],
    ]

    for obs, expected in zip(obs_list, exp_list):
        assert obs == expected
Example #2
0
def test_icepyx_boundingbox_query():
    reg_a = ipx.Query(
        "ATL06",
        [-64, 66, -55, 72],
        ["2019-02-22", "2019-02-28"],
        start_time="03:30:00",
        end_time="21:30:00",
        version="5",
    )
    obs_tuple = (
        reg_a.product,
        reg_a.dates,
        reg_a.start_time,
        reg_a.end_time,
        reg_a.product_version,
        reg_a.spatial_extent,
    )
    exp_tuple = (
        "ATL06",
        ["2019-02-22", "2019-02-28"],
        "03:30:00",
        "21:30:00",
        "005",
        ("bounding box", [-64.0, 66.0, -55.0, 72.0]),
    )

    assert obs_tuple == exp_tuple
Example #3
0
def test_CMRparams():
    reg_a = ipx.Query("ATL06", [-64, 66, -55, 72],
                      ["2019-02-22", "2019-02-28"])
    reg_a.build_CMR_params()
    obs_keys = reg_a.CMRparams.keys()
    exp_keys_all = ["short_name", "version", "temporal"]
    exp_keys_any = ["bounding_box", "polygon"]

    assert all(keys in obs_keys for keys in exp_keys_all)
    assert any(key in obs_keys for key in exp_keys_any)
Example #4
0
 def query(self, parameters):
     dataset = parameters['dataset']
     date_range = [parameters['start'], parameters['end']]
     bounding_box = [
         round(float(coord), 4) for coord in parameters['bbox'].split(',')
     ]
     query = ipx.Query(dataset, bounding_box, date_range)
     query._email = self.credentials['email']
     query._session = self._session.session
     return query
Example #5
0
def test_correct_granule_list_returned():
    reg_a = ipx.Query("ATL06", [-55, 68, -48, 71],
                      ["2019-02-20", "2019-02-28"],
                      version="2")

    reg_a.avail_granules()
    obs_grans = [gran["producer_granule_id"] for gran in reg_a.granules.avail]
    exp_grans = [
        "ATL06_20190221121851_08410203_002_01.h5",
        "ATL06_20190222010344_08490205_002_01.h5",
        "ATL06_20190225121032_09020203_002_01.h5",
        "ATL06_20190226005526_09100205_002_01.h5",
    ]

    assert set(obs_grans) == set(exp_grans)
Example #6
0
def test_correct_granule_list_returned():
    reg_a = ipx.Query(
        "ATL06",
        [-55, 68, -48, 71],
        ["2019-02-20", "2019-02-28"],
        version="3",
    )

    obs_grans, = reg_a.avail_granules(ids=True)
    exp_grans = [
        "ATL06_20190221121851_08410203_003_01.h5",
        "ATL06_20190222010344_08490205_003_01.h5",
        "ATL06_20190225121032_09020203_003_01.h5",
        "ATL06_20190226005526_09100205_003_01.h5",
    ]
    assert set(obs_grans) == set(exp_grans)
Example #7
0
    def __init__(
        self,
        query_obj=None,
        product=None,
        spatial_extent=None,
        date_range=None,
        cycles=None,
        tracks=None,
    ):

        if query_obj:
            pass
        else:
            query_obj = ipx.Query(
                dataset=product,
                spatial_extent=spatial_extent,
                date_range=date_range,
                cycles=cycles,
                tracks=tracks,
            )

        self.product = is2ref._validate_OA_product(query_obj.dataset)

        if query_obj.extent_type == "bounding_box":
            self.bbox = query_obj._spat_extent

        else:
            mrc_bound = query_obj._spat_extent.minimum_rotated_rectangle
            # generate bounding box
            lonmin = min(mrc_bound.exterior.coords.xy[0])
            lonmax = max(mrc_bound.exterior.coords.xy[0])
            latmin = min(mrc_bound.exterior.coords.xy[1])
            latmax = max(mrc_bound.exterior.coords.xy[1])

            self.bbox = [lonmin, latmin, lonmax, latmax]

        self.date_range = ([
            query_obj._start.strftime("%Y-%m-%d"),
            query_obj._end.strftime("%Y-%m-%d")
        ] if hasattr(query_obj, "_start") else None)
        self.cycles = query_obj._cycles if hasattr(query_obj,
                                                   "_cycles") else None
        self.tracks = query_obj._tracks if hasattr(query_obj,
                                                   "_tracks") else None
def download(bbox, timeframe, beam, earthdata_uid, email): 
    
    import os
    import icepyx as ipx
    from icepyx import core
    import readers as rd
    
    short_name = ATL03 '''fill in with name of whichever DataSet this is a member function of'''
    
    region = ipx.Query(short_name, bbox, timeframe)
    region.avail_granules()
    
    print(region.avail_granules())
    if ('y' or 'yes' == input("Here's what we've got! Continue? (y/n)")):
        region.earthdata_login(earthdata_uid, email)
        
        path = './download/' + short_name
        region.download_granules(path)
        files = region.avail_granules(ids=True)
        
        print("Downloaded! Have a great day!")
    else:
        print("Nothing was downloaded")
        exit()

    
    for file in files:
        f = h5py.File(path+file, 'r')
        df = rd.getATL03(f,beam)
        
        #trim to bounding box
        df_cut=df[bbox]
        
        #convert time to UTC
        epoch=f['/ancillary_data/atlas_sdp_gps_epoch'][0]
        df_cut['time']=Time(epoch+df_cut['dt'],format='gps').utc.datetime
        
        #caculate along track distance
        df_cut['AT_dist']=df_cut.x-df_cut.x.values[0]
        
        #save a dictionary entry in the DataSet
        self{file} = df_cut
        
        
Example #9
0
    def query_icesat2_filelist(self) -> tuple:
        """
        Query list of ICESat-2 files for each bounding box

        Returns
        -------
        filelist_tuple : tuple
            A tuple of non-empty lists of bounding boxes and corresponding ICESat-2 file lists
        """
        # a list of 5*5 bounding boxes
        bbox_list = self.grid_bbox()

        is2_bbox_list = []
        is2_file_list = []

        for bbox_i in bbox_list:

            try:
                region = ipx.Query(
                    self.product,
                    bbox_i,
                    self.date_range,
                    cycles=self.cycles,
                    tracks=self.tracks,
                )
                icesat2_files = region.avail_granules(ids=True)[0]
            except (AttributeError, AssertionError):
                continue

            if not icesat2_files:
                continue
            else:
                all_cycles = list(set(region.avail_granules(cycles=True)[0]))
                icesat2_files_latest_cycle = files_in_latest_n_cycles(
                    icesat2_files, [int(c) for c in all_cycles]
                )
                is2_bbox_list.append(bbox_i)
                is2_file_list.append(icesat2_files_latest_cycle)

        is2_file_tuple = zip(is2_bbox_list, is2_file_list)

        return is2_file_tuple
Example #10
0
def test_reqconfig_params():
    reg_a = ipx.Query("ATL06", [-64, 66, -55, 72],
                      ["2019-02-22", "2019-02-28"])

    # test for search params
    reg_a.build_reqconfig_params("search")
    obs_keys = reg_a.reqparams.keys()
    exp_keys_all = ["page_size", "page_num"]
    assert all(keys in obs_keys for keys in exp_keys_all)

    # test for download params
    reg_a.reqparams = None
    reg_a.build_reqconfig_params("download")
    reg_a.reqparams.update({"token": "", "email": ""})
    obs_keys = reg_a.reqparams.keys()
    exp_keys_all = [
        "page_size",
        "page_num",
        "request_mode",
        "token",
        "email",
        "include_meta",
    ]
    assert all(keys in obs_keys for keys in exp_keys_all)
Example #11
0
def test_no_granules_in_search_results():
    ermsg = "Your search returned no results; try different search parameters"
    with pytest.raises(AssertionError, match=ermsg):
        ipx.Query("ATL06", [-55, 68, -48, 71], ["2019-02-20", "2019-02-20"],
                  version="2").avail_granules()
Example #12
0
def read_atl06(spatial_extent,
               date_range,
               time_start="00:00:00",
               time_end="23:59:59",
               path="./data/new_ATL06",
               user='******',
               email='*****@*****.**'):

    region_a = ipx.Query("ATL06",
                         spatial_extent,
                         date_range,
                         start_time=time_start,
                         end_time=time_end)
    #region_a = ipd.Icesat2Data("ATL06", spatial_extent, date_range, start_time = time_start, end_time = time_end)

    # The last update of icepyx returns a dictonary with the total number of granules and also a list of one element that contains
    # a list with the name of the available granules. This is the reason why we have [0] at the end of the next line.
    # - check if this is always the case. For now, I will add an assert.

    avail_granules = region_a.avail_granules(ids=True)[0]
    print("Available Granules:", avail_granules)

    assert region_a.avail_granules()['Number of available granules'] == len(
        avail_granules), "The number of avail granules does not match"

    if len(avail_granules) == 0:
        print("No granules for this specification")
        return None

    ### If I already downloaded these files, I don't need to do it again

    # I should check first that such directory exists!

    if all([
            is_file_in_directory(avail_granules[i], path)
            for i in range(len(avail_granules))
    ]):

        print("You have already downloaded all the requested files")

    else:

        region_a.earthdata_login(user, email)

        # This lines were commented after the last update of icepyx. See Issue https://github.com/icesat2py/icepyx/issues/145

        #region_a.order_vars.append(var_list=['latitude','longitude','h_li','atl06_quality_summary','delta_time',
        #                                     'signal_selection_source', 'snr', 'snr_significance','h_robust_sprd','dh_fit_dx','dh_fit_dy','bsnow_conf',
        #                                     'cloud_flg_asr','cloud_flg_atm','msw_flag','bsnow_h','bsnow_od','layer_flag','bckgrd',
        #                                     'n_fit_photons','end_geoseg','segment_id','w_surface_window_final', 'sc_orient'])

        #region_a.subsetparams(Coverage=region_a.order_vars.wanted)

        region_a.order_granules()
        region_a.download_granules(path)

    ### The files I need to read should coincide with the requested granueles

    requested_files = []

    for fname_granules in avail_granules:
        file_is_in_folder = False
        for fname_folder in list(Path(path).glob('*.h5')):
            if fname_granules in str(fname_folder):
                requested_files.append(fname_folder)
                file_is_in_folder = True
                break
        if not file_is_in_folder:
            print(fname_granules, "not found")

    if len(avail_granules) != len(requested_files):
        print("You are missing some files. There are a total of",
              len(avail_granules), "available granules but you are accessing",
              len(requested_files), "h5 files")

    dataf = atl06_2_df(requested_files)
    #dataf = atl06_2_df(Path(path).glob('*.h5'))

    # reset index
    dataf = dataf.reset_index()
    del dataf['index']

    return (dataf)
def reg(scope="module"):
    return ipx.Query(
        "ATL06", [-55, 68, -48, 71], ["2019-02-22", "2019-02-28"], version="004"
    )
        "len": 40.0,
        "res": 20.0,
        "maxi": 1
    }

    # get command line parameters
    parse_command_line(sys.argv, icfg)
    parse_command_line(sys.argv, scfg)
    parse_command_line(sys.argv, parms)

    # bypass service discovery if url is localhost
    if scfg["url"] == '127.0.0.1':
        scfg["url"] = ['127.0.0.1']

    # create icepx region
    iregion = icepyx.Query(icfg["short_name"], icfg["spatial_extent"], icfg["date_range"], cycles=icfg["cycles"], tracks=icfg["tracks"])

    # visualize icepx region
    # iregion.visualize_spatial_extent()

    # display summary information
    iregion.product_summary_info()
    # print("Available Granules:", iregion.avail_granules())
    # print("Available Granule IDs:", iregion.avail_granules(ids=True))

    # initialize sliderule api
    icesat2.init(scfg["url"], verbose=True)

    # generate sliderule atl06 elevations
    # parms["poly"] = icesat2.toregion(icfg["spatial_extent"])[0]
    atl06_sr = ipxapi.atl06p(iregion, parms, scfg["asset"])
Example #15
0
def atl03(area, date_range, path, user, email):
    """
    Retrieve atl_03 granules corresponding to selected spatio-temporal area
    """

    region_a = ipx.Query('ATL03', spatial_extent=area, date_range=date_range)
    avail_granules = region_a.avail_granules(ids=True)[0]
    print("Available Granules:", avail_granules)

    if all([
            is_file_in_directory(avail_granules[i], path)
            for i in range(len(avail_granules))
    ]):

        print("You have already downloaded all the requested files")

    else:

        region_a.earthdata_login(user, email)
        #region_a.order_vars.append(var_list=['lat_ph', "lon_ph", "h_ph"])
        #region_a.subsetparams(Coverage=region_a.order_vars.wanted)
        region_a.order_granules()
        region_a.download_granules(path)

    dataframes = pd.DataFrame(
        columns=["h_ph", "lon_ph", "lat_ph", "ground_track"])

    requested_files = []

    for fname_granules in avail_granules:
        file_is_in_folder = False
        for fname_folder in list(Path(path).glob('*.h5')):
            if fname_granules in str(fname_folder):
                requested_files.append(fname_folder)
                file_is_in_folder = True
                break
        if not file_is_in_folder:
            print(fname_granules, "not found")

    if len(avail_granules) != len(requested_files):
        print("You are missing some files. There are a total of",
              len(avail_granules), "available granules but you are accessing",
              len(requested_files), "h5 files")

    for file in requested_files:

        with h5py.File(file, 'r') as fi:

            for my_gt in filter(fi.keys(), ["gt"]):

                lat_ph = fi[my_gt]['heights']["lat_ph"][:]
                lon_ph = fi[my_gt]['heights']["lon_ph"][:]
                h_ph = fi[my_gt]['heights']["h_ph"][:]

                df = pd.DataFrame.from_dict({
                    "h_ph": h_ph,
                    "lon_ph": lon_ph,
                    "lat_ph": lat_ph,
                    "ground_track": [my_gt] * len(h_ph)
                })

                dataframes = dataframes.append(df, ignore_index=True)

    return (dataframes)