Beispiel #1
0
def update_cache(logger=None) -> None:
    logger = logger or script_logger.create_logger("bikes")
    logger.info(f"(updating cache) -> Fetching data from http://api.citybik.es/v2/networks/")

    stations = BikeStation.objects.filter(is_active=True)
    target_network = stations.first().network
    target_stations = set(stations.values_list("station_id", flat=True))

    try:
        stations_data = list(fetch_json(target_network, target_stations))
    except Exception as e:
        logger.error(f"could not fetch data {e}")
        raise

    logger.info(f"data fetched for {len(stations_data)} stations")

    station_name_map = dict(stations.values_list('station_id', 'name'))
    for ix, station in enumerate(stations_data):
        stations_data[ix]['name'] = station_name_map[station['id']]
        logger.debug(f"found data for station {station_name_map[station['id']]}")

    data_to_cache = {
        'data':stations_data,
        'updated_at':timezone.now().isoformat()
    }

    file_name = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME)
    logger.info(f"updating tmp file {file_name}")
    with open(file_name, "w") as f:
        json.dump(data_to_cache, f)
    def handle(self, *args, **kwargs):
        logger = script_logger.create_logger("weather")
        hs = hubstate.HubState()
        if not hs.getkey(hs.STATE_KEY_IS_ONLINE):
            logger.debug("skipping weather update: hubstate offline")
            return

        openweather.update_cache(logger=logger)
Beispiel #3
0
    def handle(self, *args, **kwargs):
        logger = script_logger.create_logger("bikes")
        hs = hubstate.HubState()
        if not hs.getkey(hs.STATE_KEY_IS_ONLINE):
            logger.debug("skipping bikes update, hubstate is offline")
            return

        bikes.update_cache(logger=logger)
Beispiel #4
0
def update_cache(logger=None) -> None:
    logger = logger or script_logger.create_logger("wotd")
    logger.info(
        f"(updating cache) -> Fetching data from {WORD_OF_THE_DAY_API_URL}")
    try:
        data = fetch_json()
    except Exception as e:
        logger.error(f"Failed to fetch data: {e}")
        raise

    logger.info(f'downloaded data {data}')

    file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME)
    with open(file_path, "w") as f:
        json.dump(data, f)
Beispiel #5
0
def update_cache(logger=None) -> None:
    logger = logger or script_logger.create_logger("weather")
    logger.info(f"(updating cache) -> Fetching data from {API_BASE_URL}")
    try:
        data = fetch_json()
    except Exception as e:
        logger.error(f"Failed to fetch data: {e}")
        raise

    data_hash = hash_weather_dict(data)
    data['hash'] = data_hash
    logger.info(f"data fetched, hash:{data_hash}")

    file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME)
    with open(file_path, "w") as f:
        json.dump(data, f)
Beispiel #6
0
def refresh_db_data(logger=None):
    logger = logger or script_logger.create_logger("vulnerabilities")
    logger.info(
        f"(updating vulnerability data) -> Fetching data from {API_URL}")

    existing_cve_ids = set(
        Vulnerability.objects.values_list("cve_identifier", flat=True))

    try:
        data = get_feed_data()
    except Exception as e:
        logger.error(f"Could not fetch data {e}")
        raise

    new_items = []
    existing_items = set()
    for cve in data['CVE_Items']:

        cve_id = cve['cve']['CVE_data_meta']['ID']
        if cve_id in existing_cve_ids:
            existing_items.add(cve_id)
            continue

        description = " ".join(
            dd['value']
            for dd in cve['cve']['description']['description_data'])
        if vulnerability_is_rejected(description):
            continue

        new_items.append(
            Vulnerability(displayed_at=None,
                          cve_identifier=cve_id,
                          description=description))

    logger.info(f"adding {len(new_items)} vulnerability items")
    Vulnerability.objects.bulk_create(new_items)

    cves_to_delete = Vulnerability.objects.filter(
        Q(displayed_once=True)
        & ~Q(cve_identifier__in=existing_items))

    if cves_to_delete.exists():
        logger.info(f"deleting {cves_to_delete.count()} vulnerability items")
        cves_to_delete.delete()
Beispiel #7
0
def update_cache(year=None, logger=None) -> None:
    logger = logger or script_logger.create_logger("holidays")
    year = year or dt.datetime.now().year
    logger.info(f"(updating cache) -> Fetching holiday data for year {year}")

    try:
        data = fetch_json(year)['response']['holidays']
    except Exception as e:
        logger.error(f"could not fetch data {e}")
        raise

    data.sort(
        key=lambda h: dt.datetime.strptime(h['date']['iso'].split('T')[0], '%Y-%m-%d').date())

    logger.info(f"(updating cache) -> Fetched {len(data)} holidays")

    file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME)
    with open(file_path, "w") as f:
        json.dump(data, f)
 def handle(self, *args, **kwargs):
     logger = script_logger.create_logger("vulnerabilities")
     vulnerability.refresh_db_data(logger=logger)
Beispiel #9
0
 def handle(self, *args, **kwargs):
     logger = script_logger.create_logger("holidays")
     calendarific.update_cache(logger=logger)
Beispiel #10
0
 def handle(self, *args, **kwargs):
     logger = script_logger.create_logger("wotd")
     wordnik.update_cache(logger=logger)