Esempio n. 1
0
def test_get_package_version_metrics_intervals(dao: Dao, channel, db,
                                               package_version, interval):

    now = datetime.datetime(2020, 10, 1, 10, 1, 10)
    dao.incr_download_count(channel.name,
                            package_version.filename,
                            package_version.platform,
                            timestamp=now)

    metrics = dao.get_package_version_metrics(package_version.id, interval,
                                              "download")
    timestamp_interval = round_timestamp(now, interval)
    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [(timestamp_interval, 1)]

    end = timestamp_interval.replace(year=2021)
    metrics = dao.get_package_version_metrics(
        package_version.id,
        interval,
        "download",
        start=timestamp_interval,
        end=end,
        fill_zeros=True,
    )

    metrics_dict = [(m.timestamp, m.count) for m in metrics]

    assert metrics_dict[0] == (timestamp_interval, 1)
    assert metrics_dict[-1] == (end, 0)
Esempio n. 2
0
def test_get_channel_download_count(
    auth_client, public_channel, package_version_factory, db, dao: Dao
):

    versions = [package_version_factory(str(i)) for i in range(3)]

    now = datetime.utcnow()

    for v in versions:
        dao.incr_download_count(
            public_channel.name,
            v.filename,
            v.platform,
            timestamp=now,
        )

    endpoint_url = f"/metrics/channels/{public_channel.name}"

    response = auth_client.get(endpoint_url)
    assert response.status_code == 200

    expected = {
        "server_timestamp": ANY,
        "metric_name": "download",
        "period": "D",
        "packages": {
            v.platform
            + '/'
            + v.filename: {
                "series": [
                    {
                        "timestamp": now.replace(
                            minute=0, second=0, microsecond=0, hour=0
                        ).isoformat(),
                        "count": 1,
                    }
                ]
            }
            for v in versions
        },
    }

    assert response.json() == expected
Esempio n. 3
0
def test_increment_download_count(dao: Dao, channel, db, package_version,
                                  session_maker):

    assert package_version.download_count == 0
    now = datetime.datetime(2020, 10, 1, 10, 1, 10)
    dao.incr_download_count(channel.name,
                            package_version.filename,
                            package_version.platform,
                            timestamp=now)

    download_counts = db.query(PackageVersionMetric).all()
    for m in download_counts:
        assert m.count == 1

    assert len(download_counts) == len(IntervalType)

    db.refresh(package_version)
    assert package_version.download_count == 1

    dao.incr_download_count(channel.name,
                            package_version.filename,
                            package_version.platform,
                            timestamp=now)
    download_counts = db.query(PackageVersionMetric).all()
    for m in download_counts:
        assert m.count == 2

    assert len(download_counts) == len(IntervalType)

    db.refresh(package_version)
    assert package_version.download_count == 2

    dao.incr_download_count(
        channel.name,
        package_version.filename,
        package_version.platform,
        timestamp=now + datetime.timedelta(days=1),
    )

    download_counts = db.query(PackageVersionMetric).all()
    assert len(download_counts) == len(IntervalType) + 2

    db.refresh(package_version)
    assert package_version.download_count == 3
Esempio n. 4
0
def test_get_download_count(auth_client, public_channel, package_version, db,
                            dao: Dao):

    timestamps = [
        "2020-01-05T21:01",
        "2020-01-06T22:10",
        "2020-02-18T10:10",
    ]

    month_day = []
    for t in timestamps:
        dt = datetime.fromisoformat(t)
        dao.incr_download_count(
            public_channel.name,
            package_version.filename,
            package_version.platform,
            timestamp=dt,
        )
        month_day.append((dt.month, dt.day))

    endpoint_url = (
        f"/metrics/channels/{public_channel.name}/"
        f"packages/{package_version.package_name}/"
        f"versions/{package_version.platform}/{package_version.filename}")

    response = auth_client.get(endpoint_url)
    assert response.status_code == 200

    assert response.json() == {
        "server_timestamp":
        ANY,
        "period":
        "D",
        "metric_name":
        "download",
        "total":
        3,
        "series": [{
            "timestamp": f"2020-{m:02}-{d:02}T00:00:00",
            "count": 1
        } for m, d in month_day],
    }

    response = auth_client.get(endpoint_url + "?start=2020-01-05T10:00")
    assert response.status_code == 200

    assert response.json() == {
        "server_timestamp":
        ANY,
        "period":
        "D",
        "metric_name":
        "download",
        "total":
        2,
        "series": [{
            "timestamp": f"2020-{m:02}-{d:02}T00:00:00",
            "count": 1
        } for m, d in month_day[1:]],
    }

    # fill zeros
    response = auth_client.get(endpoint_url + "?start=2020-01-05T20:00"
                               "&end=2020-01-05T22:00"
                               "&period=H"
                               "&fill_zeros=true")
    assert response.status_code == 200

    assert response.json() == {
        "server_timestamp":
        ANY,
        "period":
        "H",
        "metric_name":
        "download",
        "total":
        1,
        "series": [
            {
                "timestamp": "2020-01-05T20:00:00",
                "count": 0
            },
            {
                "timestamp": "2020-01-05T21:00:00",
                "count": 1
            },
            {
                "timestamp": "2020-01-05T22:00:00",
                "count": 0
            },
        ],
    }

    response = auth_client.get(endpoint_url + "?period=M")
    assert response.status_code == 200

    assert response.json() == {
        "server_timestamp":
        ANY,
        "period":
        "M",
        "metric_name":
        "download",
        "total":
        3,
        "series": [{
            "timestamp": f"2020-{m:02}-01T00:00:00",
            "count": c
        } for m, c in [(1, 2), (2, 1)]],
    }
Esempio n. 5
0
def test_get_package_version_metrics(dao: Dao, channel, db, package_version):

    now = datetime.datetime(2020, 10, 1, 10, 1, 10)
    dao.incr_download_count(channel.name,
                            package_version.filename,
                            package_version.platform,
                            timestamp=now)

    metrics = dao.get_package_version_metrics(package_version.id,
                                              IntervalType.hour, "download")

    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    timestamp = now.replace(minute=0, second=0)

    assert metrics_dict == [(timestamp, 1)]

    hour = datetime.timedelta(hours=1)
    day = datetime.timedelta(days=1)

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.hour,
        "download",
        start=now - hour,
        end=now + hour,
    )

    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [(timestamp, 1)]

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.hour,
        "download",
        start=now - hour,
        end=now + hour,
        fill_zeros=True,
    )

    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [
        (timestamp - hour, 0),
        (timestamp, 1),
        (timestamp + hour, 0),
    ]

    # no start/end

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.hour,
        "download",
        start=now - hour,
        fill_zeros=True,
    )
    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [
        (timestamp - hour, 0),
        (timestamp, 1),
    ]

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.hour,
        "download",
        end=now + hour,
        fill_zeros=True,
    )
    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [
        (timestamp, 1),
        (timestamp + hour, 0),
    ]

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.hour,
        "download",
        fill_zeros=True,
    )
    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [
        (timestamp, 1),
    ]

    # day interval
    timestamp_day = timestamp.replace(hour=0)
    metrics = dao.get_package_version_metrics(package_version.id,
                                              IntervalType.day, "download")
    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [(timestamp_day, 1)]

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.day,
        "download",
        start=now - day,
        end=now + day,
        fill_zeros=True,
    )

    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [
        (timestamp_day - day, 0),
        (timestamp_day, 1),
        (timestamp_day + day, 0),
    ]

    # two items
    dao.incr_download_count(
        channel.name,
        package_version.filename,
        package_version.platform,
        timestamp=now + datetime.timedelta(hours=2),
    )

    metrics = dao.get_package_version_metrics(
        package_version.id,
        IntervalType.hour,
        "download",
        fill_zeros=True,
    )
    metrics_dict = [(m.timestamp, m.count) for m in metrics]
    assert metrics_dict == [
        (timestamp, 1),
        (timestamp + hour, 0),
        (timestamp + 2 * hour, 1),
    ]
Esempio n. 6
0
def synchronize_metrics_from_mirrors(
    channel_name: str,
    dao: Dao,
    session: requests.Session,
    now: datetime = datetime.utcnow(),
):
    logger = logging.getLogger("quetz")
    channel = dao.get_channel(channel_name)
    if not channel:
        return
    for m in channel.mirrors:
        if not m.metrics_endpoint:
            logger.warning(
                f"metrics endpoint not configured for mirror {m.url}."
                "Skipping metrics synchronisation"
            )
            continue
        query_str = ["period=H"]
        start_time: Optional[datetime]
        if m.last_synchronised:
            start_time = m.last_synchronised.replace(minute=0, second=0, microsecond=0)
            query_str.append(f"start={start_time.isoformat()}")
        else:
            start_time = None

        # exclude incomplete intervals (the current hour)
        end_time = now.replace(minute=0, second=0, microsecond=0)

        if start_time == end_time:
            logger.debug(f"metrics data for mirror {m.url} are up-to-date")
            continue

        query_str.append(f"end={end_time.isoformat()}")

        metrics_url = m.metrics_endpoint + "?" + "&".join(query_str)
        response = session.get(metrics_url)

        if response.status_code != 200:
            logger.error(
                f"mirror server {metrics_url} returned bad response with code "
                f"{response.status_code} and message {response.text}"
            )
            continue

        response_data = response.json()
        try:
            packages = response_data["packages"]
        except KeyError:
            logger.error(
                f"malfromated respose received from {metrics_url}: "
                "missing 'packages' key"
            )
            continue

        for platform_filename, data in packages.items():
            platform, filename = platform_filename.split('/')
            for s in data["series"]:
                timestamp = datetime.fromisoformat(s["timestamp"])
                count = s["count"]
                dao.incr_download_count(
                    channel_name, filename, platform, timestamp, count
                )
        logger.debug(f"synchronized metrics from {metrics_url}")
        m.last_synchronised = end_time
        dao.db.commit()