def test_download_url_constrained(e): dataset_id = "gtoppAT" variables = ["commonName", "yearDeployed", "serialNumber"] min_time = "2002-06-30T13:53:16Z" max_time = "2018-10-27T04:54:00Z" min_lat = -42 max_lat = 42 min_lon = 0 max_lon = 360 constraints = { "time>=": min_time, "time<=": max_time, "latitude>=": min_lat, "latitude<=": max_lat, "longitude>=": min_lon, "longitude<=": max_lon, } url = e.get_download_url( dataset_id=dataset_id, variables=variables, response="csv", constraints=constraints, ) assert url.startswith(f"{e.server}/{e.protocol}/{dataset_id}.csv?") options = _url_to_dict(url) assert options["time>"] == str(parse_dates(min_time)) assert options["time<"] == str(parse_dates(max_time)) assert options["latitude>"] == str(min_lat) assert options["latitude<"] == str(max_lat) assert options["longitude>"] == str(min_lon) assert options["longitude<"] == str(max_lon)
def test_search_url_valid_request(e): """Test if a bad request returns HTTPError.""" min_time = "1800-01-01T12:00:00Z" max_time = "1950-01-01T12:00:00Z" kw = {"min_time": min_time, "max_time": max_time} url = e.get_search_url(**kw) assert url.startswith(f"{e.server}/search/advanced.{e.response}?") options = _url_to_dict(url) assert options.pop("minTime") == str(parse_dates(min_time)) assert options.pop("maxTime") == str(parse_dates(max_time)) assert options.pop("itemsPerPage") == str(1000) for k, v in options.items(): if k == "protocol": assert v == e.protocol else: assert v == "(ANY)"
def test_parse_dates_from_string(): """Test if parse_dates can take string input.""" assert parse_dates("1970-01-01T00:00:00") == 0 assert parse_dates("1970-01-01T00:00:00Z") == 0 assert parse_dates("1970-01-01") == 0 assert parse_dates("1970/01/01") == 0 assert parse_dates("1970-1-1") == 0 assert parse_dates("1970/1/1") == 0
def test_download_url_constrained(e): """Test a constraint download URL.""" dataset_id = "org_cormp_cap2" variables = ["station", "z"] min_time = "2000-03-23T00:00:00Z" max_time = "2000-03-30T14:08:00Z" min_lat = 32.8032 max_lat = 32.8032 min_lon = -79.6204 max_lon = -79.6204 constraints = { "time>=": min_time, "time<=": max_time, "latitude>=": min_lat, "latitude<=": max_lat, "longitude>=": min_lon, "longitude<=": max_lon, } url = e.get_download_url( dataset_id=dataset_id, variables=variables, response="csv", constraints=constraints, ) assert url == check_url_response(url, follow_redirects=True) assert url.startswith(f"{e.server}/{e.protocol}/{dataset_id}.csv?") options = _url_to_dict(url) assert options["time>"] == str(parse_dates(min_time)) assert options["time<"] == str(parse_dates(max_time)) assert options["latitude>"] == str(min_lat) assert options["latitude<"] == str(max_lat) assert options["longitude>"] == str(min_lon) assert options["longitude<"] == str(max_lon)
def test_parse_dates_nonutc_pendulum(): """Non-UTC timestamp at 1970-1-1 must have the zone offset.""" d = pendulum.datetime(1970, 1, 1, 0, 0, 0, tz="America/Vancouver") assert parse_dates(d) == abs(d.utcoffset().total_seconds())
def test_parse_dates_nonutc_datetime(): """Non-UTC timestamp at 1970-1-1 must have the zone offset.""" d = datetime(1970, 1, 1, tzinfo=pytz.timezone("US/Eastern")) assert parse_dates(d) == abs(d.utcoffset().total_seconds())
def test_parse_dates_utc_pendulum(): """UTC timestamp at 1970-1-1 must be 0.""" d = pendulum.datetime(1970, 1, 1, 0, 0, 0, tz="UTC") assert parse_dates(d) == 0
def test_parse_dates_utc_datetime(): """UTC timestamp at 1970-1-1 must be 0.""" d = datetime(1970, 1, 1, 0, 0, tzinfo=pytz.utc) assert parse_dates(d) == 0
def test_parse_dates_naive_datetime(): """Naive timestamp at 1970-1-1 must be 0.""" d = datetime(1970, 1, 1, 0, 0) assert parse_dates(d) == 0