def test_basic(): news = client.get_news_from_sources() print(news) assert(len(news) > 0) news = client.get_news_from_sources(sources=['ign'], sortBy=['latest']) print(news) assert(len(news) > 0) print('test_basic passed!')
def run(): while True: news_list = news_api_client.get_news_from_sources(NEWS_SOURCES) num_of_new_news = 0 # remove duplicates for news in news_list: news_digest = hashlib.md5(news['title'].encode('utf-8')).hexdigest() if redis_client.get(news_digest) is None: num_of_new_news += 1 news['digest'] = news_digest # TODO: set 'digest' and 'publishedAt' field. if news['publishedAt'] is None: news['publishedAt'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') # TODO: update redis with this news, news_digest as key, with expiretime for 3 days redis_client.set(news_digest, 'True') redis_client.expire(news_digest, NEWS_TIME_OUT_IN_SECONDS) cloudAMQP_client.sendMessage(news) logger.info("Fetched %d news. ", num_of_new_news) # to maintain heartbeat, use cloudAMQP sleep, not default python sleep cloudAMQP_client.sleep(SLEEP_TIME_IN_SECONDS)
def run(): while True: news_list = news_api_client.get_news_from_sources(NEWS_SOURCES) num_of_new_news = 0 for news in news_list: news_digest = hashlib.md5(news['title'].encode('utf-8')).hexdigest() if redis_client.get(news_digest) is None: num_of_new_news += 1 news['digest'] = news_digest if news['publishedAt'] is None: news['publishedAt'] = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ") redis_client.set(news_digest, 'True') redis_client.expire(news_digest, NEWS_TIMEOUT_IN_SECONDS) cloudAMQP_client.sendMessage(news) LOGGER.info("Fetched %d news.", num_of_new_news) cloudAMQP_client.sleep(SLEEP_TIME_IN_SECONDS)