for header in crawled_raw["headers"]: crawled.headers.append(header["text"]) crawled.title = crawled_raw["title"] crawled.checksum = crawled_raw["checksum"] crawled.html = crawled_raw["content"] print("Received {0}".format(str(crawled))) handle_crawled_data._indexer.index(crawled) chl.basic_ack(delivery_tag=method.delivery_tag) def cleanup(): print("closing resources") handle_crawled_data._indexer.close() if __name__ == "__main__": indexer = Indexer(index_dir="search_index") indexer.load() handle_crawled_data._indexer = indexer connection = pika.BlockingConnection( pika.ConnectionParameters(host="localhost")) channel = connection.channel() channel.queue_declare(queue="crawledQueue", durable=True) channel.basic_consume(handle_crawled_data, queue="crawledQueue") try: channel.start_consuming() except KeyboardInterrupt: cleanup() channel.close() connection.close()