Example #1
0
            'label_service': {
                "type": "keyword"
            },
            'model_predict': {
                "type": "integer"
            }
        }
    }

    return structure


if __name__ == "__main__":

    # Kafka config
    kfk_object = KFK(topic="parserOutput")
    consumer = kfk_object.init_Kafka_consumer()

    # Elasticsearch config
    es = elasticsearch.Elasticsearch([{'host': 'localhost', 'port': 9200}])
    index = "tfmindex"
    doc_type = 'traffic_data'
    mapping = fMap(doc_type, index)

    if len(sys.argv) > 1:
        if sys.argv[1] == '-del':
            if es.indices.exists(index=index):
                print('Deleting existing index')
                es.indices.delete(index=index)

    print('Creating index')
Example #2
0
# finally:
#     f.close()

try:
    # client = KafkaClient(hosts="127.0.0.1:9092")
    # topic = client.topics[topic_name]
    # consumer = topic.get_simple_consumer(auto_offset_reset=OffsetType.LATEST,
    #                                      reset_offset_on_start=True)
    # consumer = topic.get_simple_consumer()

    # Kafka config
    kafka_ip = 'localhost'
    kafka_port = 9092

    kafkastockExchange = KFK(host=kafka_ip,
                             port=kafka_port,
                             topic='stockExchange')
    consumer_stock = kafkastockExchange.init_Kafka_consumer()

    kafka_unemployment = KFK(host=kafka_ip,
                             port=kafka_port,
                             topic='unemployment')
    consumer_unem = kafka_unemployment.init_Kafka_consumer()

except:
    pass


def source_bokeh_kafka(column_names):
    data_dict = {name: [] for name in column_names}
    source = ColumnDataSource(data_dict)
velocity_options = {"Slow":5.0, "Normal":2.5, "Fast":0.25}

try:

    # Kafka config
    kafka_ip = 'localhost'
    kafka_port = 9092

    # kafkastockExchange = KFK(host=kafka_ip, port=kafka_port, topic='stockExchange')
    # message_stock = kafkastockExchange.init_Kafka_consumer()

    # kafka_unemployment = KFK(host=kafka_ip, port=kafka_port, topic='unemployment')
    # consumer_unem = kafka_unemployment.init_Kafka_consumer()

    kafka_connection = KFK(host=kafka_ip, port=kafka_port, topic=topic_name)
    consumer = kafka_connection.init_Kafka_consumer()

except:
    pass


def source_bokeh_kafka(column_names):
    data_dict = {name: [] for name in column_names}
    source = ColumnDataSource(data_dict)
    return source

def multi_plot(figure_info, source):

    fig = Figure(plot_width=figure_info["plot_width"],
                 plot_height=figure_info["plot_height"],
            insert_data_intoMONGO(mongoOBJ, unemploymentData, collections['unemployment'], type, sources, coins)

        elif sys.argv[1] == 'remove':
            # Remove data, if needed
            remove_data(mongoOBJ, collections['stockExchange'])
            remove_data(mongoOBJ, collections['unemployment'])
            sys.exit()

    #  Send data


    try:

        # Kafka
        init_kafka_docker()
        kafkaObj_stockExchange = KFK(host = kafka_ip, port = kafka_port, topic = 'stockExchange')
        producer_stock = kafkaObj_stockExchange.init_Kafka_producer()

        kafkaObj_unemployment = KFK(host = kafka_ip, port = kafka_port, topic = 'unemployment')
        producer_unem = kafkaObj_unemployment.init_Kafka_producer()

        init_stock_index = '2000-01'
        init_unem_index = '2000-01'

        finish_stock_index = '2016-11'
        finish_unem_index = '2016-11'

        curr_stock_index = init_stock_index
        curr_unem_index = init_unem_index
        finish_queries = False
        response_stock = None
Example #5
0
import json
import ast
path_to_append = os.path.dirname(os.path.abspath(__file__)).replace(
    "/TrafficAnalyzer", "")
sys.path.append(path_to_append)
from KafkaConnection.kafka_connection import KafkaConnection as KFK

IP_KAFKA = 'localhost'
PORT_KAFKA = 9092

if len(sys.argv) < 2:
    print(
        'Please use: python simple_consumer.py <topic_name> <csv (optional)>')
else:
    topic = sys.argv[1]
    kafka = KFK(topic=topic, host=IP_KAFKA, port=PORT_KAFKA)
    consumer = kafka.init_Kafka_consumer()
    p = 0

    if len(sys.argv) == 3:
        if sys.argv[2] == 'csv':
            csv_path = '../Data/'
            csv_name = 'voIP'

            for message in consumer:
                p += 1
                transformed_message = ast.literal_eval(
                    json.loads(message.value.replace("u'", "'")))
                if p == 1:
                    header = transformed_message.keys()
                    csv_file = open(csv_path + csv_name, 'w')
Example #6
0
    # # Message structure
    # data_message = {'dpiPktNum': '', 'IP_UpLink': '','IP_DstIP': '',
    #                 'IP_FiveTuple': '', 'voIP': 0,'IP_Version': '4', 'is_youtube': 0,
    #                   'IP_TotLen': '', 'IP_SrcIP': '', "timeStamp":'', 'coord_1':0, 'coord_2':0,
    #                    'service_label': ''}

    # Received messages
    topic = "analyzed_traffic"
    brokers = "localhost:9092"

    # Sent messages
    output_topic = str.encode('parserOutput')
    broker_send = "localhost:9092"
    kafka_send = KFK(host=broker_send.split(":")[0],
                     port=int(broker_send.split(":")[1]),
                     topic=output_topic)
    producer_kafka = kafka_send.init_Kafka_producer()

    # Spark
    spark = (SparkSession.builder.master("local[*]").config(
        "spark.driver.cores", 1).appName("LogAnalyzer").getOrCreate())
    sc = spark.sparkContext
    reading_time_window = 1
    ssc = StreamingContext(sc, reading_time_window)

    # Load model
    # RF_streaming_path = '../Model/RandomForest_Streaming100'
    RF_streaming_path = '../Model/RandomForest_Streaming10'
    model = RandomForestModel.load(sc, RF_streaming_path)