def update_cache(logger=None) -> None: logger = logger or script_logger.create_logger("bikes") logger.info(f"(updating cache) -> Fetching data from http://api.citybik.es/v2/networks/") stations = BikeStation.objects.filter(is_active=True) target_network = stations.first().network target_stations = set(stations.values_list("station_id", flat=True)) try: stations_data = list(fetch_json(target_network, target_stations)) except Exception as e: logger.error(f"could not fetch data {e}") raise logger.info(f"data fetched for {len(stations_data)} stations") station_name_map = dict(stations.values_list('station_id', 'name')) for ix, station in enumerate(stations_data): stations_data[ix]['name'] = station_name_map[station['id']] logger.debug(f"found data for station {station_name_map[station['id']]}") data_to_cache = { 'data':stations_data, 'updated_at':timezone.now().isoformat() } file_name = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME) logger.info(f"updating tmp file {file_name}") with open(file_name, "w") as f: json.dump(data_to_cache, f)
def __init__(self): self.logger = script_logger.get_hub_logger() self._filepath = tmp_lib.generate_named_tmp_file("hub-state.json") if not os.path.exists(self._filepath): self.logger.warning( "hubstate not found, creating hubstate file with default values" ) data = {self.STATE_KEY_IS_ONLINE: True} with open(self._filepath, "w") as f: json.dump(data, f)
def update_cache(logger=None) -> None: logger = logger or script_logger.create_logger("wotd") logger.info( f"(updating cache) -> Fetching data from {WORD_OF_THE_DAY_API_URL}") try: data = fetch_json() except Exception as e: logger.error(f"Failed to fetch data: {e}") raise logger.info(f'downloaded data {data}') file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME) with open(file_path, "w") as f: json.dump(data, f)
def update_cache(logger=None) -> None: logger = logger or script_logger.create_logger("weather") logger.info(f"(updating cache) -> Fetching data from {API_BASE_URL}") try: data = fetch_json() except Exception as e: logger.error(f"Failed to fetch data: {e}") raise data_hash = hash_weather_dict(data) data['hash'] = data_hash logger.info(f"data fetched, hash:{data_hash}") file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME) with open(file_path, "w") as f: json.dump(data, f)
def update_cache(year=None, logger=None) -> None: logger = logger or script_logger.create_logger("holidays") year = year or dt.datetime.now().year logger.info(f"(updating cache) -> Fetching holiday data for year {year}") try: data = fetch_json(year)['response']['holidays'] except Exception as e: logger.error(f"could not fetch data {e}") raise data.sort( key=lambda h: dt.datetime.strptime(h['date']['iso'].split('T')[0], '%Y-%m-%d').date()) logger.info(f"(updating cache) -> Fetched {len(data)} holidays") file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME) with open(file_path, "w") as f: json.dump(data, f)
def get_updated_ipv4() -> str: """ Check if IPv4 has been updated. Returns None if no change. """ tmp_file = generate_named_tmp_file(IP_ADDRESS_FILE_NAME) current_ipv4 = get_local_ipv4() if not os.path.exists(tmp_file): # No previous IP recorded. with open(tmp_file, "w") as f: f.write(current_ipv4) return current_ipv4 else: # A previous IP was recorded. with open(tmp_file, "r") as f: previous_ipv4 = f.read() if previous_ipv4 == current_ipv4: return None else: with open(tmp_file, "w") as f: f.write(current_ipv4) return current_ipv4
def read_cache() -> Dict: file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME) if not os.path.exists(file_path): raise HolidayCacheIsEmptyError() with open(file_path, "r") as f: return json.load(f)
def delete_cache() -> None: file_path = tmp_lib.generate_named_tmp_file(CACHE_FILE_NAME) if os.path.exists(file_path): os.remove(file_path)