def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMsg = {"test": "test"} client.sendMessage(sentMsg) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print "test passedd"
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMSS = {"test": "DEMA"} client.sendMessage(sentMSS) client.sleep(10) receivedMsg = client.getMessage() print receivedMsg['test']
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMsg = {"test": "demo"} client.sendMessage(sentMsg) client.sleep(10) receivedMsg = client.getMessage() assert receivedMsg == sentMsg print "CloudAMQP test passed!"
def test_basic(): client = CloudAMQPClient(TEST_CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMsg = {'test': 'test'} client.sendMessage(sentMsg) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print('test_basic passed.')
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMsg = {'test': 'demo'} client.sendMessage(sentMsg) client.sleep(10) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print 'test_basic passed'
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, NEWS_FETCH_TASK_QUEUE_NAME) sentMsg = {'test': 'test'} client.sendMessage(sentMsg) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print 'test_basic passed.'
def test_cloudamqp_client(): client = CloudAMQPClient(TEST_CLOUDAMQP_URL, TEST_QUEUE_NAME) test_message = {'test': '123test'} client.sendMessage(test_message) # client.sendMessage(test_message) receieved_message = client.getMessage() assert test_message == receieved_message print ("AMQP client test passed")
def logNewsclickForuser(userId, newsId): message = {'userId': userId, 'newsId': newsId, 'timestamp': datetime.utcnow()} db = mongodb_client.get_db() #save original record db[CLICKS_DB_COLLECTION].insert(message) message = {'userId': userId, 'newsId': newsId, 'timestamp': str(datetime.utcnow())} CloudAMQPClient.sendMessage(message)
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, QUEUE_NAME) sentMsg = {'test_key': 'test_value'} client.sendMessage(sentMsg) client.sleep(5) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print 'test_basic passed.'
def test_basic(): client = CloudAMQPClient(TEST_AMPQ_URL, TEST_QUEUE_NAME) message = {'test': 'test'} client.sendMessage(message) client.sleep(5) receivedMessage = client.getMessage() assert message == receivedMessage print('test passed')
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMsg = {'test':'demo'} client.sendMessage(sentMsg) client.sleep(10) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print 'test_basic passed!'
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, QUEUE_NAME) sentMsg = {'test': 'test'} client.sendMessage(sentMsg) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print('MQ basically works well')
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) # construct a message sentMsg = {"test": "demo"} client.sendMessage(sentMsg) client.sleep(10) receivedMsg = client.getMessage() assert sentMsg == receivedMsg print 'test_basic passed!'
def test_basic(): ''' test cloudAMQP_client basic utility ''' client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sent_message = {"test": "test"} client.sendMessage(sent_message) received_message = client.getMessage() assert sent_message == received_message print "test_basic passed"
def test_basic(): client = CloudAMQPClient(CloudAMQP_URL,TEST_QUEUE_NAME ) sentMsg = {"test":"test"} client.sendMessage(sentMsg) client.sleep(10) reveivedMsg = client.getMessage() assert sentMsg == reveivedMsg print("test_basic passed!")
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, QUEUE_NAME) sentMsg = {'test': 'test'} client.sendMessage(sentMsg) client.sleep(5) receiveMsg = client.getMessage() assert sentMsg == receiveMsg print("Good")
def logNewsClickForUser(user_id, news_id): LOG_CLICKS_TASK_QUEUE_URL = "amqp://*****:*****@emu.rmq.cloudamqp.com/evvloemh" LOG_CLICKS_TASK_QUEUE_NAME = "LOG_CLICKS_TASK_QUEUE" cloudAMQP_client = CloudAMQPClient(LOG_CLICKS_TASK_QUEUE_URL, LOG_CLICKS_TASK_QUEUE_NAME) message = {'userId': user_id, 'newsId': news_id, 'timestamp': datetime.utcnow()} db = mongodb_client.get_db() db[CLICK_LOGS_TABLE_NAME].insert(message) # Send log task to machine learning service for prediction message = {'userId': user_id, 'newsId': news_id, 'timestamp': str(datetime.utcnow())} cloudAMQP_client.sendMessage(message);
def handle_message(msg): if msg is None or not isinstance(msg, dict): print('message is broken') return task = msg text = None article = Article(task['url']) article.download() article.parse() task['text'] = article.text dedupe_news_queue_client = CloudAMQPClient(DEDUPE_NEWS_TASK_QUEUE_URL, DEDUPE_NEWS_TASK_QUEUE_NAME) dedupe_news_queue_client.sendMessage(task)
class NewsMonitor: def __init__(self): with open(CONFIG_FILE, 'r') as f: data = json.load(f) self.scrape_news_task_queue_url = data['queue'][ 'scrapeNewsTaskQueueUrl'] self.scrape_news_task_queue_name = data['queue'][ 'scrapeNewsTaskQueueName'] self.redis_server_host = data['redis']['redisServerHost'] self.redis_server_port = int(data['redis']['redisServerPort']) self.news_timeout_redis_in_seconds = int( data['redis']['newsMonitorExpireInSeconds']) self.news_sources = list(data['newsApi']['source']) def __call__(self): self.redis_client = redis.StrictRedis(self.redis_server_host, self.redis_server_port) self.cloudAMQP_client = CloudAMQPClient( self.scrape_news_task_queue_url, self.scrape_news_task_queue_name) news_list = news_api_client.getNewsFromSource(self.news_sources) print "call news monitor" num_of_new_news = 0 num_of_old_news = 0 for news in news_list: news_digest = hashlib.md5( news['title'].encode('utf-8')).digest().encode('base-64') if self.redis_client.get(news_digest) is None: num_of_new_news = num_of_new_news + 1 news['digest'] = news_digest if news['publishedAt'] is None: news['publishedAt'] = datetime.datetime.utcnow().strftime( '%Y-%m-%dT%H:%M:%SZZ') self.redis_client.set(news_digest, news) self.redis_client.expire(news_digest, self.news_timeout_redis_in_seconds) self.cloudAMQP_client.sendMessage(news) else: num_of_old_news = num_of_old_news + 1 print "Fetched %d new news. %d old news in redis" % (num_of_new_news, num_of_old_news) self.cloudAMQP_client.close()
def logCoursesClick(userId, courseId): print "click received" db = mongodb_client.get_db() message = { 'userId': userId, 'courseId': courseId, 'timestamp': datetime.utcnow() } db['clicklog'].insert(message) cloudAMQP_client = CloudAMQPClient(CLICK_TASK_QUEUE_URL, CLICK_TASK_QUEUE_NAME) message = { 'userId': userId, 'newsId': courseId, 'timestamp': str(datetime.utcnow()) } cloudAMQP_client.sendMessage(message)
SCRAPE_NEWS_TASK_QUEUE_URL = 'amqp://*****:*****@otter.rmq.cloudamqp.com/nfdiqtrj' SCRAPE_NEWS_TASK_QUEUE_NAME = 'tap-news-scrape-news-task-queue' redis_client = redis.StrictRedis(REDIS_HOST, REDIS_PORT) cloudAMQP_client = CloudAMQPClient(SCRAPE_NEWS_TASK_QUEUE_URL, SCRAPE_NEWS_TASK_QUEUE_NAME) while True: news_list = news_api_client.getNewsFromSource(NEWS_SOURCES) num_of_news = 0 for news in news_list: news_digest = hashlib.md5(news['title'].encode('utf-8')).hexdigest() if redis_client.get(news_digest) is None: num_of_news += 1 news['digest'] = news_digest if news['publishedAt'] is None: news['publishedAt'] = datetime.datetime.utcnow().strftime( "%Y-%m-%dT%H:%M:%SZ") redis_client.set(news_digest, "True") redis_client.expire(news_digest, NEWS_TIME_OUT_IN_SECONDS) cloudAMQP_client.sendMessage(news) # send to rabbitmq print("Fetched %d news." % num_of_news) cloudAMQP_client.sleep(SLEEP_TIME_IN_SECONDS)
'techcrunch', 'the-new-york-times', 'the-wall-street-journal', 'the-washington-post' ] redis_client = redis.StrictRedis(REDIS_HOST, REDIS_PORT) cloudAMQP_client = CloudAMQPClient(SCRAPE_NEWS_TASK_QUEUE_URL, SCRAPE_NEWS_TASK_QUEUE_NAME) while True: news_list = news_api_client.getNewsFromSource(NEWS_SOURCES) num_of_new_news = 0 for news in news_list: news_digest = hashlib.md5(news['title'].encode('utf-8')).digest().encode('base64') if redis_client.get(news_digest) is None: num_of_new_news = num_of_new_news + 1 news['digest'] = news_digest if news['publishedAt'] is None: # format: YYYY-MM-DDTHH:MM:SS in UTC news['publishedAt'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') redis_client.set(news_digest, news) redis_client.expire(news_digest, NEWS_TIME_OUT_IN_SECONDS) cloudAMQP_client.sendMessage(news) print "Fetched %d new news." % num_of_new_news cloudAMQP_client.sleep(SLEEP_TIME_IN_SECOUNDS)
redis_client = redis.Redis(host = REDIS_HOST, port = REDIS_PORT) # Connect CloudAMQP cloudAMQP_client = CloudAMQPClient(AMQP_URL, SCRAPE_NEWS_QUEUE_NAME) while True: # GET NEWS API 的 news articles = news_api_client.getNewsFromSource(NEWS_SOURCE) # count how many new news need to be saved num_of_new_news = 0 for article in articles: # encoding title, store it into redis as the key article_digest = hashlib.md5(article['title'].encode('utf-8')).digest().encode('base64') # check whether redis already have it, if not, then store it into redis and send it to CloudAMQP QUEUE if redis_client.get(article_digest) is None: num_of_new_news = num_of_new_news + 1 article['digest'] = article_digest # in case there is no this field, then add current time if article['publishedAt'] is None: article['publishedAt'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') # insert into redis and set expiration time redis_client.set(article_digest, article) redis_client.expire(article_digest, NEWS_TIME_OUT_IN_SECONDS) # send this new news into Message Queue cloudAMQP_client.sendMessage(article) print "Fetched %d new news." % num_of_new_news # sleep a constant time and go to the next news API call cloudAMQP_client.sleep(SLEEP_TIME_OUT_IN_SECONDS)
SLEEP_TIME_IN_SECONDS = int( config['cloudAMQP'] ['scrape_news_task_queue_sleep_time_in_seconds_at_monitor']) while True: news_list = news_api_client.getNewsFromSource(NEWS_SOURCES) num_of_new_news = 0 for news in news_list: news_digest = hashlib.md5( news['title'].encode('utf-8')).digest().encode('base64') if redis_client.get(news_digest) is None: num_of_new_news = num_of_new_news + 1 news['digest'] = news_digest # if there's no published time, set it to current UTC time if news['publishedAt'] is None: news['publishedAt'] = datetime.datetime.utcnow().strftime( '%Y-%m-%dT%H:%M:%SZ') redis_client.set(news_digest, news) redis_client.expire(news_digest, NEWS_TIME_OUT_IN_SECONDS) scrape_news_queue_client.sendMessage(news) logger.info("News monitor : fetched %d news." % num_of_new_news) scrape_news_queue_client.sleep(SLEEP_TIME_IN_SECONDS)
def test_basic(): client = CloudAMQPClient(CLOUDAMQP_URL, TEST_QUEUE_NAME) sentMsg = {'test': 'demo'} client.sendMessage(sentMsg) client.sleep(1) receiveMsg = client.getMessage()
import os import sys from dotenv import load_dotenv # pylint: disable=E0401 from os.path import join, dirname sys.path.append(os.path.join(os.path.dirname(__file__), '..')) from cloudAMQP_client import CloudAMQPClient # pylint: disable=E0401 dotenv_path = join(os.path.dirname(__file__), '..', '..', '..', '.env') load_dotenv(dotenv_path) MQ_REAL_ESTATE_FETCH_TASK_URI = os.environ.get("MQ_REAL_ESTATE_FETCH_TASK_URI") MQ_REAL_ESTATE_FETCH_TASK_NAME = os.environ.get( "MQ_REAL_ESTATE_FETCH_TASK_NAME") client = CloudAMQPClient(MQ_REAL_ESTATE_FETCH_TASK_URI, MQ_REAL_ESTATE_FETCH_TASK_NAME) client.sendMessage({'zpid': '30691509'})
redis_client = redis.StrictRedis(REDIS_HOST, REDIS_PORT) cloudAMQP_client = CloudAMQPClient(SCRAPE_NEWS_TASK_QUEUE_URL, SCRAPE_NEWS_TASK_QUEUE_NAME) while True: news_list = news_api_client.getNewsFromSource(NEWS_SOURCES) num_of_new_news = 0 for news in news_list: news_digest = hashlib.md5( news['title'].encode('utf-8')).digest().encode('base64') if redis_client.get(news_digest) is None: num_of_new_news = num_of_new_news + 1 news['digest'] = news_digest if news['publishedAt'] is None: # format: YYYY-MM-DDTHH:MM:SS in UTC news['publishedAt'] = datetime.datetime.utcnow().strftime( '%Y-%m-%dT%H:%M:%SZ') redis_client.set(news_digest, news) redis_client.expire(news_digest, NEWS_TIME_OUT_IN_SECONDS) cloudAMQP_client.sendMessage(news) print "Fetched %d new news." % num_of_new_news cloudAMQP_client.sleep(SLEEP_TIME_IN_SECOUNDS)
dedupe_news_queue_client = CloudAMQPClient(DEDUPE_NEWS_TASK_QUEUE_URL, DEDUPE_NEWS_TASK_QUEUE_NAME) newsApiClient = NewsApiClient(api_key='6e402bf74e5e4376b4d991ce169d1ed3') while True: news_list = newsApiClient.get_top_headlines(None, NEWS_SOURCES) num_of_new_news = 0 for news in news_list: news_digest = hashlib.md5(news['title'].encode('utf-8')).hexdigest() if redis_client.get(news_digest) is None: num_of_new_news = num_of_new_news + 1 news['digest'] = news_digest # If 'publishedAt' is None, set it to current UTC time. if news['publishedAt'] is None: # Make the time in format YYYY-MM_DDTHH:MM:SS in UTC news['publishedAt'] = datetime.datetime.utcnow().strftime( '%Y-%m-%dT%H:%M:%SZ') redis_client.set(news_digest, json.dumps(news)) redis_client.expire(news_digest, NEWS_TIME_OUT_IN_SECONDS) print('send message') dedupe_news_queue_client.sendMessage(news) print("Fetched %d news." % num_of_new_news) dedupe_news_queue_client.sleep(SLEEP_TIME_IN_SECONDS)