def getEmotionEmpployee(data):
    employee_alias = request.args.get('alias', '').split(",")[0]
    #num_samples = int(request.args.get('num_samples', '').split(",")[0] or '50')
    initial_time = datetime.datetime.fromtimestamp(
        float(request.args.get('initial_time', '').split(",")[0]))
    final_time = datetime.datetime.fromtimestamp(
        float(request.args.get('final_time', '').split(",")[0]))
    daoEmployeeStatus = employeeStatusDao.EmployeeDAOImpl()
    daoEmployeeStatus.createsession(ip_cassandra)
    daoEmployeeStatus.setlogger()
    daoEmployeeStatus.loadkeyspace(keyspace)
    emotion_list = daoEmployeeStatus.select_some_emotion_inRange(
        employee_alias, initial_time, final_time, -1)
    emotion_2_send = []
    for row in emotion_list:
        emotion_2_send.append({"date": row[0], "emotion": row[1]})
    return jsonify(emotion_2_send)
import os
import sys

sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import NewsCore.dao.EmployeeDAOImpl as dao
import KAFKA.Consumer as consumer
import settings as settings

if __name__ == "__main__":
    #variables
    ip_DCOS_cassandra = settings.ip_DCOS_cassandra
    keyspace = settings.keyspace_cassandra
    topic = 'cyberops_arousal'
    field2Extract = "arousal"
    #loading of the cassandra seesion, creatiopn of table (if needded)
    daoStatus = dao.EmployeeDAOImpl()
    daoStatus.createsession(ip_DCOS_cassandra)
    daoStatus.setlogger()
    daoStatus.loadkeyspace(keyspace)
    daoStatus.create_table()  #only if table is not created previously
    #run consumer for emotions:
    consumer_emotion = consumer.Consumer(topic=topic,
                                         field2Extract=field2Extract,
                                         DAO=daoStatus,
                                         ip_kafka_DCOS=settings.ip_kafka_DCOS)
    consumer_emotion.run()