"type": "keyword" }, 'model_predict': { "type": "integer" } } } return structure if __name__ == "__main__": # Kafka config kfk_object = KFK(topic="parserOutput") consumer = kfk_object.init_Kafka_consumer() # Elasticsearch config es = elasticsearch.Elasticsearch([{'host': 'localhost', 'port': 9200}]) index = "tfmindex" doc_type = 'traffic_data' mapping = fMap(doc_type, index) if len(sys.argv) > 1: if sys.argv[1] == '-del': if es.indices.exists(index=index): print('Deleting existing index') es.indices.delete(index=index) print('Creating index') es.indices.create(index=index)
velocity_options = {"Slow":5.0, "Normal":2.5, "Fast":0.25} try: # Kafka config kafka_ip = 'localhost' kafka_port = 9092 # kafkastockExchange = KFK(host=kafka_ip, port=kafka_port, topic='stockExchange') # message_stock = kafkastockExchange.init_Kafka_consumer() # kafka_unemployment = KFK(host=kafka_ip, port=kafka_port, topic='unemployment') # consumer_unem = kafka_unemployment.init_Kafka_consumer() kafka_connection = KFK(host=kafka_ip, port=kafka_port, topic=topic_name) consumer = kafka_connection.init_Kafka_consumer() except: pass def source_bokeh_kafka(column_names): data_dict = {name: [] for name in column_names} source = ColumnDataSource(data_dict) return source def multi_plot(figure_info, source): fig = Figure(plot_width=figure_info["plot_width"], plot_height=figure_info["plot_height"], title=figure_info["title"], x_axis_type = "datetime")
try: # client = KafkaClient(hosts="127.0.0.1:9092") # topic = client.topics[topic_name] # consumer = topic.get_simple_consumer(auto_offset_reset=OffsetType.LATEST, # reset_offset_on_start=True) # consumer = topic.get_simple_consumer() # Kafka config kafka_ip = 'localhost' kafka_port = 9092 kafkastockExchange = KFK(host=kafka_ip, port=kafka_port, topic='stockExchange') consumer_stock = kafkastockExchange.init_Kafka_consumer() kafka_unemployment = KFK(host=kafka_ip, port=kafka_port, topic='unemployment') consumer_unem = kafka_unemployment.init_Kafka_consumer() except: pass def source_bokeh_kafka(column_names): data_dict = {name: [] for name in column_names} source = ColumnDataSource(data_dict) return source
import ast path_to_append = os.path.dirname(os.path.abspath(__file__)).replace( "/TrafficAnalyzer", "") sys.path.append(path_to_append) from KafkaConnection.kafka_connection import KafkaConnection as KFK IP_KAFKA = 'localhost' PORT_KAFKA = 9092 if len(sys.argv) < 2: print( 'Please use: python simple_consumer.py <topic_name> <csv (optional)>') else: topic = sys.argv[1] kafka = KFK(topic=topic, host=IP_KAFKA, port=PORT_KAFKA) consumer = kafka.init_Kafka_consumer() p = 0 if len(sys.argv) == 3: if sys.argv[2] == 'csv': csv_path = '../Data/' csv_name = 'voIP' for message in consumer: p += 1 transformed_message = ast.literal_eval( json.loads(message.value.replace("u'", "'"))) if p == 1: header = transformed_message.keys() csv_file = open(csv_path + csv_name, 'w') spamwriter = csv.writer(csv_file, delimiter='#')