예제 #1
0
def get_corona_counts(last_date: typing.Optional[date] = None):
    try:
        result = requests.get("https://virus-korona.sk/api.php")
        if result.status_code != 200:
            raise ConnectionError(
                "Unable to fetch data from https://virus-korona.sk/api.php")
        data = json.loads(result.text)
        infected_data = data["tiles"]["k5"]["data"]["d"].pop()
        tested_data = data["tiles"]["k23"]["data"]["d"].pop()
        cured_data = data["tiles"]["k7"]["data"]["d"].pop()
        deaths_data = data["tiles"]["k8"]["data"]["d"].pop()
        updated_at = datetime.strptime(infected_data["d"], "%y%m%d").date()
        if last_date is None or updated_at > last_date:
            infected = int(infected_data["v"])
            tested = int(tested_data["v"])
            cured = int(cured_data["v"])
            deaths = int(deaths_data["v"])
            db.add_corona_log(
                infected=infected,
                cured=cured,
                tests=tested,
                deaths=deaths,
                log_date=updated_at,
            )
            cache.clear()
            if last_date is not None and updated_at > last_date:
                logger.info(
                    f"Scrapped {infected}, {tested}, Cancelling job for today")
                return schedule.CancelJob
        else:
            logger.info(f"Stats not updated")
    except Exception:
        logger.exception("Error while scrapping data")
        return schedule.CancelJob
예제 #2
0
def edit_log(date=None,
             infected=None,
             tests=None,
             cured=None,
             deaths=None,
             median=None):
    log_date = (datetime.datetime.strptime(date, "%Y-%m-%d").date()
                if date else db.get_last_log_date())
    log, created = db.get_log_by_date(log_date)
    if infected:
        log.infected = infected
    if tests:
        log.tests = tests
    if cured:
        log.cured = cured
    if deaths:
        log.deaths = deaths
    if median:
        log.median = median
    log.save()
    cache.clear()
예제 #3
0
def get_location_data(always_update: bool = False):
    try:
        result = requests.get("https://mapa.covid.chat/map_data")
        if result.status_code != 200:
            raise ConnectionError(
                "Unable to fetch data from https://mapa.covid.chat/map_data")
        map_data = json.loads(result.text)["districts"]
        is_updated = False
        location_map = {
            location.location: location
            for location in db.CoronaLocation.select()
        }
        for record in map_data:
            title = _normalize_location_title(record["title"])
            location, created = _get_or_create_location(title, location_map)
            last_updated = datetime.fromtimestamp(
                int(record["last_occurrence_timestamp"])).date()
            if created or last_updated > location.last_updated:
                location_map[title] = location
                location.last_updated = last_updated
                location.save()
                is_updated = True
        if always_update or is_updated:
            with coronastats.db_wrapper.database.atomic():
                for record in map_data:
                    title = _normalize_location_title(record["title"])
                    location = location_map[title]

                    location_log, _ = db.CoronaLocationLog.get_or_create(
                        location=location, date=datetime.today())
                    location_log.infected = record["amount"]["infected"]
                    location_log.cured = record["amount"]["recovered"]
                    location_log.deaths = record["amount"]["deaths"]
                    location_log.save()
            logger.info("Updated location data")
        cache.clear()
    except Exception:
        logger.exception("Error while scrapping data")
        return schedule.CancelJob
예제 #4
0
def get_korona_gov_data(
    last_date: typing.Optional[date] = None,
    overwrite_updated_at: typing.Optional[date] = None,
):
    try:
        result = requests.get(
            "https://korona.gov.sk/koronavirus-na-slovensku-v-cislach/")
        if result.status_code != 200:
            raise ConnectionError(
                "Unable to fetch data from " +
                "https://korona.gov.sk/koronavirus-na-slovensku-v-cislach/")
        wrapper = BeautifulSoup(result.text, "html.parser")
        c = (wrapper.find("main").findAll(
            "div", {"class": "govuk-width-container"})[1])
        date_text = get_element_with_comment(
            c, "REPLACE:koronastats-last-update").text
        updated_at = overwrite_updated_at or (datetime.strptime(
            date_text, "Aktualizované %d. %m. %Y").date() - timedelta(days=1))

        last_log = db.get_last_log()

        if last_date is None or updated_at > last_date:
            infected = _normalize_number(
                get_element_with_comment(c,
                                         "REPLACE:koronastats-positives").text,
                last_log.infected)
            tested = _normalize_number(
                get_element_with_comment(c,
                                         "REPLACE:koronastats-lab-tests").text,
                last_log.tests)
            cured = _normalize_number(
                get_element_with_comment(c, "REPLACE:koronastats-cured").text,
                last_log.cured)
            deaths = _normalize_number(
                get_element_with_comment(c,
                                         "REPLACE:koronastats-deceased").text,
                last_log.deaths)
            median = _normalize_number(
                get_element_with_comment(c, "REPLACE:koronastats-median").text,
                last_log.median)
            hospitalized = _normalize_number(
                get_element_with_comment(
                    c, "REPLACE:koronastats-hospitalized").text,
                last_log.hospitalized)
            confirmed_hospitalized = _normalize_number(
                get_element_with_comment(
                    c, "REPLACE:koronastats-hospitalized-covid19").text,
                last_log.confirmed_hospitalized)
            confirmed_hospitalized_text = get_element_with_comment(
                c, "REPLACE:koronastats-hospitalized-covid19-intensive").text
            confirmed_hospitalized_text_match = re.match(
                (r"Počet hospitalizovanýchs potvrdeným covid19z toho na "
                 r"JIS: (.+) a na pľúcnej ventilácii: (.+)"),
                confirmed_hospitalized_text,
            )
            confirmed_hospitalized_icu = _normalize_number(
                confirmed_hospitalized_text_match.group(1),
                last_log.confirmed_hospitalized_icu)
            confirmed_hospitalized_ventilation = _normalize_number(
                confirmed_hospitalized_text_match.group(2),
                last_log.confirmed_hospitalized_ventilation)
            db.add_corona_log(
                infected=infected,
                cured=cured,
                tests=tested,
                deaths=deaths,
                log_date=updated_at,
                median=median,
                hospitalized=hospitalized,
                confirmed_hospitalized=confirmed_hospitalized,
                confirmed_hospitalized_icu=confirmed_hospitalized_icu,
                confirmed_hospitalized_ventilation=
                confirmed_hospitalized_ventilation,
            )
            cache.clear()
            if last_date is not None and updated_at > last_date:
                logger.info(
                    f"Scrapped {infected}, {tested}, Cancelling job for today")
                return schedule.CancelJob

    except Exception:
        logger.exception("Error while scrapping data")
        return schedule.CancelJob