def test_search_url_bad_request(e): """Test if a bad request returns HTTPError.""" kw = { "min_time": "1700-01-01T12:00:00Z", "max_time": "1750-01-01T12:00:00Z", } with pytest.raises(HTTPError): check_url_response(e.get_search_url(**kw))
def test_check_url_response(): """Test if a bad request returns HTTPError.""" bad_request = ("http://erddap.sensors.ioos.us/erddap/tabledap/" "gov_usgs_waterdata_340800117235901.htmlTable?" "time," "&time>=2017-08-29T00:00:00Z" "&time<=2015-09-05T19:00:00Z") with pytest.raises(HTTPError): check_url_response(bad_request)
def test_info_url(e): """Check info URL results.""" dataset_id = "org_cormp_cap2" url = e.get_info_url(dataset_id=dataset_id) assert url == check_url_response(url) assert url == f"{e.server}/info/{dataset_id}/index.{e.response}" url = e.get_info_url(dataset_id=dataset_id, response="csv") assert url == check_url_response(url) assert url == f"{e.server}/info/{dataset_id}/index.csv"
def test_servers(): """ Tests if listed servers are responding. We are OK with redirects here b/c we only want to update a URL if they are broken, most of the time a redirect is only adding '/index.html'. """ for server in servers.values(): # Should raise HTTPError if broken, otherwise returns the URL. check_url_response(server.url, follow_redirects=True) == server.url
def test_check_url_response(): """Test if a bad request returns HTTPError.""" bad_request = ( "https://standards.sensors.ioos.us/erddap/tabledap/" "org_cormp_cap2.htmlTable?" "time," "&time>=2017-08-29T00:00:00Z" "&time<=2015-09-05T19:00:00Z" ) with pytest.raises(httpx.HTTPError): check_url_response(bad_request)
def test_download_url_unconstrained(e): """Check download URL results.""" dataset_id = "org_cormp_cap2" variables = ["station", "z"] url = e.get_download_url(dataset_id=dataset_id, variables=variables) assert url == check_url_response(url, follow_redirects=True) assert url.startswith( f"{e.server}/{e.protocol}/{dataset_id}.{e.response}?") assert sorted(url.split("?")[1].split(",")) == sorted(variables)
def test_download_url_distinct(e): """Check download URL results with and without the distinct option.""" dataset_id = "org_cormp_cap2" variables = ["station", "z"] no_distinct_url = e.get_download_url(dataset_id=dataset_id, variables=variables) with_distinct_url = e.get_download_url( dataset_id=dataset_id, variables=variables, distinct=True, ) assert not no_distinct_url.endswith("&distinct()") assert with_distinct_url.endswith("&distinct()") assert no_distinct_url == check_url_response(no_distinct_url, follow_redirects=True) assert with_distinct_url == check_url_response( with_distinct_url, follow_redirects=True, )
def test_search_url_change_protocol(e): """Test if we change the protocol it show in the URL.""" kw = {"search_for": "salinity"} tabledap_url = e.get_search_url(protocol="tabledap", **kw) assert tabledap_url == check_url_response(tabledap_url) options = _url_to_dict(tabledap_url) assert options.pop("protocol") == "tabledap" griddap_url = e.get_search_url(protocol="griddap", **kw) # Turn this off while no griddap datasets are available # assert griddap_url == check_url_response(griddap_url) assert griddap_url == tabledap_url.replace("tabledap", "griddap") options = _url_to_dict(griddap_url) assert options.pop("protocol") == "griddap" e.protocol = None url = e.get_search_url(**kw) assert url == check_url_response(url) options = _url_to_dict(url) assert options.pop("protocol") == "(ANY)"
def test_search_url_valid_request_with_relative_time_constraints(e): """Test if a bad request returns HTTPError.""" min_time = "now-25years" max_time = "now-20years" kw = {"min_time": min_time, "max_time": max_time} url = e.get_search_url(dataset_id="org_cormp_cap2", **kw) assert url == check_url_response(url) assert url.startswith(f"{e.server}/search/advanced.{e.response}?") options = _url_to_dict(url) assert options.pop("minTime") == min_time assert options.pop("maxTime") == max_time assert options.pop("itemsPerPage") == str(1000) for k, v in options.items(): if k == "protocol": assert v == e.protocol else: assert v == "(ANY)"
def test_search_url_valid_request(e): """Test if a bad request returns HTTPError.""" min_time = "2000-03-23T00:00:00Z" max_time = "2000-03-30T14:08:00Z" kw = {"min_time": min_time, "max_time": max_time} url = e.get_search_url(**kw) assert url == check_url_response(url) assert url.startswith(f"{e.server}/search/advanced.{e.response}?") options = _url_to_dict(url) assert options.pop("minTime") == str(parse_dates(min_time)) assert options.pop("maxTime") == str(parse_dates(max_time)) assert options.pop("itemsPerPage") == str(1000) for k, v in options.items(): if k == "protocol": assert v == e.protocol else: assert v == "(ANY)"
def test_download_url_constrained(e): """Test a constraint download URL.""" dataset_id = "org_cormp_cap2" variables = ["station", "z"] min_time = "2000-03-23T00:00:00Z" max_time = "2000-03-30T14:08:00Z" min_lat = 32.8032 max_lat = 32.8032 min_lon = -79.6204 max_lon = -79.6204 constraints = { "time>=": min_time, "time<=": max_time, "latitude>=": min_lat, "latitude<=": max_lat, "longitude>=": min_lon, "longitude<=": max_lon, } url = e.get_download_url( dataset_id=dataset_id, variables=variables, response="csv", constraints=constraints, ) assert url == check_url_response(url, follow_redirects=True) assert url.startswith(f"{e.server}/{e.protocol}/{dataset_id}.csv?") options = _url_to_dict(url) assert options["time>"] == str(parse_dates(min_time)) assert options["time<"] == str(parse_dates(max_time)) assert options["latitude>"] == str(min_lat) assert options["latitude<"] == str(max_lat) assert options["longitude>"] == str(min_lon) assert options["longitude<"] == str(max_lon)
def test_servers(): for server in servers.values(): # Should raise HTTPError if broken, otherwise returns the URL. check_url_response(server.url) == server.url