class RedisWrapper(): def __init__(self, decrypt_key): self.redicclient = redis.StrictRedis(host='35.236.16.13', port=6379, db=0, decode_responses=True) self.mng = MongoWrapper(decrypt_key) def get_logger(self, logger_name): return self.mng.get_logger(logger_name) def redis_update_json(self, api_string, key): if api_string == 'get_tweets_with_lat_long/': json_data = self.mng.get_tweets_with_lat_long(key) self.redicclient.execute_command('JSON.SET', api_string+key, '.', json_data) elif api_string == 'get_polarity_tweets_of_stock/': json_data = self.mng.get_polarity_tweets_of_stock(key) self.redicclient.execute_command('JSON.SET', api_string + key, '.', json_data) def redis_insert_tweet(self, key, tweet): """ Either insert a single tweet or multiple tweets and this def will update the redis cache accordingly :param key: :param tweets: :return: """ try: lat_long_list = tweet['Geo']['coordinates'] has_lat_long = True except: has_lat_long = False lat_long_list = ['None', 'None'] if has_lat_long: sentiment_value = float(tweet["Sentiment_Value"]) full_text = tweet["Text"] root_json_path = {} root_json_path["Latitude"] = lat_long_list[0] root_json_path["Longitude"] = lat_long_list[1] root_json_path["Tweet_Text"] = full_text root_json_path["Sentiment_Value"] = sentiment_value api_string = 'get_tweets_with_lat_long/' self.redicclient.execute_command('JSON.ARRAPPEND', api_string+key, '.', json.dumps(root_json_path)) sentiment_polarity = int(tweet["Sentiment_Polarity"]) full_text = tweet["Text"] root_json_path = {} root_json_path["Latitude"] = lat_long_list[0] root_json_path["Longitude"] = lat_long_list[1] root_json_path["Tweet_Text"] = full_text root_json_path["Sentiment_Polarity"] = sentiment_polarity api_string = 'get_polarity_tweets_of_stock/' if sentiment_polarity == -1: root_path = '.Negative_Tweets' elif sentiment_polarity == 0: root_path = '.Neutral_Tweets' elif sentiment_polarity == 1: root_path = '.Positive_Tweets' self.redicclient.execute_command('JSON.ARRAPPEND', api_string + key, root_path, json.dumps(root_json_path)) def redis_get_json(self, api_string, key): return self.redicclient.execute_command('JSON.GET', api_string+key) def redis_flush_all(self): """ Danger. This flushes the whole DB. Careful :return: """ self.redicclient.flushdb()
from WatchDogs_MongoWrapper import MongoWrapper if __name__ == "__main__": mongo = MongoWrapper() logger = mongo.get_logger('Tweets Stats') logger.info('# of companies in Companies in the database: '+str(mongo.print_statistics('Stocks'))) logger.info('# of companies in Tweets in the database: '+str(mongo.print_statistics('Tweets')))
from kafka import KafkaConsumer from json import loads from WatchDogs_MongoWrapper import MongoWrapper from WatchDogs_RedisWrapper import RedisWrapper from constants import Constants """ Mongo_Group: The consumer is Mongo Redis_Group: The consumer is Redis """ if __name__ == "__main__": Constants = Constants() r = RedisWrapper(Constants.decrypt_key) mng = MongoWrapper(Constants.decrypt_key) test_logger = mng.get_logger('Kafka DB Populator') consumer = KafkaConsumer( 'mongoredis', bootstrap_servers=['34.83.10.248:9092'], auto_offset_reset='earliest', enable_auto_commit=True, group_id='MongoRedis', value_deserializer=lambda x: loads(x.decode('utf-8'))) # r.redis_flush_all() # test_logger.info('Redis Cache Flushed') #### Pull all Companies and update the cache first for each_company in mng.get_all_stocks(): stock_name = each_company['Company'] r.redis_update_json('get_tweets_with_lat_long/', stock_name) r.redis_update_json('get_polarity_tweets_of_stock/', stock_name)