def _get_from_cache(cls, subreddit_id): cache_key = Subreddit.make_cache_key(subreddit_id) logger.info( f"Getting subreddit listing from cache at key {cache_key}.") ids = list_cache.get(cache_key, start=0) ids_cache = Thread.make_cache_keys(map(str, ids)) logger.info( f"Getting thread listing from cache with keys {ids[:5]}...") items = cache.get_many(ids_cache) # For the items not found in cache, fetch from database items_found = [] items_not_found_ids = [] for i in range(len(ids)): if items[i]: items_found.append(items[i]) else: items_not_found_ids.append(ids[i]) logger.info(f"Did not find {len(ids_not_found)} in cache.") if items_not_found_ids: logger.info("Fetching missing items from database.") items_found_in_db = self._get_from_db_w_ids(items_not_found_ids) logger.info( f"Found {len(items_found_in_db)}/{len(ids_not_found)} missing cache items in database." ) items_found.extend(items_found_in_db) return items_found
def set(self, key, val): try: logger.info(f"Caching {val}") return self.backend.hmset(key, val) except redis.exceptions.RedisError as ex: raise ex logger.info("Error while caching data", str(ex))
def get(cls, subreddit_id): try: listing_objects = cls._get_from_cache(subreddit_id) except Exception as ex: logger.info(f"Encountered exception reading from cache: {ex}") logger.info(f"Fetching from database.") listing_objects = cls._get_from_db(subreddit_id) return cls(listing_objects)
def get(cls, user_id): try: items = cls._get_from_cache(user_id) except Exception as ex: logger.info(f"Encountered exception reading from cache: {ex}") logger.info(f"Fetching from database.") items = cls._get_from_db(user_id) return cls(items)
def init_app(self, app): logger.info( f"<EventPublisher> Connecting to RabbitMQ @ host:{app.config['RABBITMQ_HOST']} and port:{app.config['RABBITMQ_PORT']}" ) self.exchange = app.config['RABBITMQ_EXCHANGE'] self.pool = RabbitMqConnectionPool(app) self.channel.exchange_declare(exchange=self.exchange, exchange_type='topic')
def set_many(self, keys, vals): pipe = self.backend.pipeline() pipe.multi() try: for key, val in zip(keys, vals): pipe.hmset(key, val) pipe.execute() except redis.exceptions.RedisError as ex: logger.info("Error while caching data", str(ex)) return True
def get_from_cache(cls, id): cache = cls.cache key = cls.make_cache_key(id) logger.info(f"Getting cached key {key}") cached = cache.get(key) logger.info(f"Read cached value {cached}") if cached: if cached == INVALID_HASH: raise InvalidUsage.resource_not_found() cached = cls._serializer.load(cached) return cached
def handle_update(self, ch, method, props, body): logger.info("Vote updated; atomically updating scores.") try: body = json.loads(body)['body'] post_id = body['voted_thread_id'] user_id = body['voter_id'] direction = body['direction'] except json.JSONDecodeError as ex: logger.info("Error while decoding json:", str(ex)) thread_key = Thread.make_cache_key(post_id) return cache.incr_field(thread_key, 'score', direction * -1 * 2)
def send_event(self, event): routing_key = event.routing_key event_body = event.dump() logger.info(f"Sending event {routing_key} to exchange {self.exchange}") try: self.channel.basic_publish(exchange=self.exchange, routing_key=routing_key, body=event_body) except pika.exceptions.AMQPConnectionError as ex: logger.info( f"Could not connect to RabbitMQ when sending event {routing_key}." )
def handle_delete(self, ch, method, properties, body): logger.info(f"Handling comment.delete. {body}") try: body = json.loads(body)['body'] thread_id = body['thread_id'] thread_cache_key = Thread.make_cache_key(thread_id) cache.incr_field(thread_cache_key, 'num_comments', -1) # Acknowledgement ch.basic_ack(delivery_tag=method.delivery_tag) logger.info("Finished updating number of comments in cache.") except json.JSONDecodeError as ex: logger.warning("Error while decoding json:", str(ex))
def handle_create(self, ch, method, props, body): logger.info(f"Handling create. {body}") try: body = json.loads(body)['body'] thread_id = body['id'] sr_id = body['subreddit_id'] sr_list_key = Subreddit.make_cache_key(sr_id) list_cache.lpush(sr_list_key, thread_id) # Acknowledgement ch.basic_ack(delivery_tag=method.delivery_tag) logger.info("Finished updating Post list cache.") except json.JSONDecodeError as ex: logger.warning("Error while decoding json:", str(ex))
def _get_from_cache(cls, user_id): # TODO: retrieve back ups from database query = select([subscriptions.c.subreddit_id])\ .where(subscriptions.c.subscriber_id == user_id) sr_ids = db.engine.execute(query) sr_ids = Subreddit.make_cache_keys(map(lambda pair: pair[0], sr_ids)) logger.info(f'Subreddit followed: {sr_ids}') sr_listing_ids = list_cache.get_many(sr_ids) sr_listing_ids = Thread.make_cache_keys(chain(*sr_listing_ids)) logger.info(f"Fetched cached listing ids {sr_listing_ids[:5]}...") items = cache.get_many(sr_listing_ids) logger.info(f"Fetched {len(items)} cached items.") return items
def start(self): logger.info(f"<Queue:{self.queue_name}> Starting EventConsumer.") channel = self.channel exchange = self.app.config['RABBITMQ_EXCHANGE'] queue_name = self.queue_name logger.info(f"<Queue:{self.queue_name}> Declaring queue.") result = channel.queue_declare(queue_name, durable=True) channel.queue_bind(exchange=exchange, queue=queue_name, routing_key=self.routing_key) channel.basic_consume( queue=queue_name, on_message_callback=self.handle_event, ) logger.info(f"<Queue:{self.queue_name}> Consuming.") channel.start_consuming()
def write_to_cache(self): key = self.cache_key() logger.info(f"Writing to cache key {key}.") return self.cache.set(key, self.serialize())
def decorated(*args, **kwargs): start = time.time() result = f(*args, **kwargs) logger.info(f'{f.__name__} took {time.time()-start} seconds.') return result