def test_gzip_error(null_logger, tmpdir, monkeypatch, uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)
    monkeypatch.setattr(gzip, "open", raise_test_exception)
    url = "file://%s" % os.path.abspath("./tests/assets/4.json.gz")

    with pytest.raises(GZExtractionError):
        udc = USTDownloadCache(null_logger, tmpdir)

        mr = MockResponse("", 200, url=url)
        monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)
        udc.get_data_from_url(url)
def test_download_error(null_logger, tmpdir, monkeypatch, uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)
    monkeypatch.setattr(requests, "get", raise_test_exception)
    url = "file://%s" % os.path.abspath("./tests/assets/1.json")

    with pytest.raises(DownloadError):
        udc = USTDownloadCache(null_logger, tmpdir)

        monkeypatch.setattr(requests, "get",
                            lambda *args, **kwargs: raise_test_exception)
        udc.get_data_from_url(url)
def test_download_missing_metadata(null_logger, tmpdir, monkeypatch, uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)
    url = "file://%s" % os.path.abspath("./tests/assets/3.json")

    with pytest.raises(Exception):
        udc = USTDownloadCache(null_logger, tmpdir)

        mr = MockResponse("", 200, url=url)
        monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)
        udc.get_data_from_url(url)

    assert not os.path.exists(tmpdir.join("99"))
def test_download_404(null_logger, tmpdir, monkeypatch, uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)

    mr = MockResponse("", 404)
    monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)

    url = "file://%s" % os.path.abspath("./tests/assets/1.json")

    with pytest.raises(DownloadError) as de:
        udc = USTDownloadCache(null_logger, tmpdir)
        udc.get_data_from_url(url)

    assert "404" in str(de.value)
def test_download_get_data(null_logger, tmpdir, monkeypatch, uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)
    url = "file://%s" % os.path.abspath("./tests/assets/1.json")

    mr = MockResponse("", 200, url=url)
    monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)

    udc = USTDownloadCache(null_logger, tmpdir)
    data = udc.get_data_from_url(url)
    assert data["a"] == 1
    assert data["b"] == 2
    assert data["c"] == 3
def test_download_gzip(null_logger, tmpdir, monkeypatch, uuid4):
    url = "file://%s" % os.path.abspath("./tests/assets/4.json.gz")

    udc = USTDownloadCache(null_logger, tmpdir)

    mr = MockResponse("", 200, url=url)
    monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)
    data = udc.get_data_from_url(url)

    assert data["a"] == "I"
    assert data["b"] == "II"
    assert data["c"] == "III"
def test_download_creates_file_cache_contents(null_logger, tmpdir, monkeypatch,
                                              uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)
    url = "file://%s" % os.path.abspath("./tests/assets/1.json")

    mr = MockResponse("", 200, url=url)
    monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)

    udc = USTDownloadCache(null_logger, tmpdir)
    udc.get_data_from_url(url)
    with open(tmpdir.join("file_cache.json")) as f:
        cache_contents = json.load(f)

    expected_cache_contents = {
        url: {
            "url": url,
            "path": str(tmpdir.join("99")),
            "timestamp": 1591401600,
            "ttl": 60,
        }
    }
    assert cache_contents == expected_cache_contents
def test_download_cache_expired(null_logger, tmpdir, monkeypatch, uuid4):
    monkeypatch.setattr(uuid, "uuid4", uuid4.get)
    monkeypatch.setattr(CachedFile, "is_expired", True)
    url = "file://%s" % os.path.abspath("./tests/assets/1.json")

    udc = USTDownloadCache(null_logger, tmpdir)

    mr = MockResponse("", 200, url=url)
    monkeypatch.setattr(requests, "get", lambda *args, **kwargs: mr)
    udc.get_data_from_url(url)
    # Downloading a second time will cause the mock uuid to increment
    udc.get_data_from_url(url)
    with open(tmpdir.join("file_cache.json")) as f:
        cache_contents = json.load(f)

    expected_cache_contents = {
        url: {
            "url": url,
            "path": str(tmpdir.join("100")),
            "timestamp": 1591401600,
            "ttl": 60,
        }
    }
    assert cache_contents == expected_cache_contents