elif sys.argv[1] == 'remove':
            # Remove data, if needed
            remove_data(mongoOBJ, collections['stockExchange'])
            remove_data(mongoOBJ, collections['unemployment'])
            sys.exit()

    #  Send data


    try:

        # Kafka
        init_kafka_docker()
        kafkaObj_stockExchange = KFK(host = kafka_ip, port = kafka_port, topic = 'stockExchange')
        producer_stock = kafkaObj_stockExchange.init_Kafka_producer()

        kafkaObj_unemployment = KFK(host = kafka_ip, port = kafka_port, topic = 'unemployment')
        producer_unem = kafkaObj_unemployment.init_Kafka_producer()

        init_stock_index = '2000-01'
        init_unem_index = '2000-01'

        finish_stock_index = '2016-11'
        finish_unem_index = '2016-11'

        curr_stock_index = init_stock_index
        curr_unem_index = init_unem_index
        finish_queries = False
        response_stock = None
        response_unem = None
Esempio n. 2
0
    # data_message = {'dpiPktNum': '', 'IP_UpLink': '','IP_DstIP': '',
    #                 'IP_FiveTuple': '', 'voIP': 0,'IP_Version': '4', 'is_youtube': 0,
    #                   'IP_TotLen': '', 'IP_SrcIP': '', "timeStamp":'', 'coord_1':0, 'coord_2':0,
    #                    'service_label': ''}

    # Received messages
    topic = "analyzed_traffic"
    brokers = "localhost:9092"

    # Sent messages
    output_topic = str.encode('parserOutput')
    broker_send = "localhost:9092"
    kafka_send = KFK(host=broker_send.split(":")[0],
                     port=int(broker_send.split(":")[1]),
                     topic=output_topic)
    producer_kafka = kafka_send.init_Kafka_producer()

    # Spark
    spark = (SparkSession.builder.master("local[*]").config(
        "spark.driver.cores", 1).appName("LogAnalyzer").getOrCreate())
    sc = spark.sparkContext
    reading_time_window = 1
    ssc = StreamingContext(sc, reading_time_window)

    # Load model
    # RF_streaming_path = '../Model/RandomForest_Streaming100'
    RF_streaming_path = '../Model/RandomForest_Streaming10'
    model = RandomForestModel.load(sc, RF_streaming_path)

    # Spark Streaming Kafka
    kvs = KafkaUtils.createDirectStream(ssc, [topic],