示例#1
0
def edit_user():
    """
    Update information about a given user.

    :return: Users as the table and operation result alert message
    """

    # Default alert
    alert_type = "success"
    alert_message = ""
    error = False

    # Check mandatory inputs
    if not (request.post_vars.username and request.post_vars.name
            and request.post_vars.organization and request.post_vars.email
            and request.post_vars.role):
        alert_type = "danger"
        alert_message = "Some mandatory input is missing!"
        error = True

    # Parse inputs
    username = escape(request.post_vars.username) if not error else ""
    name = escape(request.post_vars.name) if not error else ""
    organization = escape(request.post_vars.organization) if not error else ""
    email = escape(request.post_vars.email) if not error else ""
    role = escape(request.post_vars.role) if not error else ""

    # Check if username exists
    if not error and not check_username(db, username):
        alert_type = "danger"
        alert_message = "Given username \"" + username + "\" not exists in the system!"
        error = True

    # Check if user has correct permisions
    if not error and session.role == "user" and role != "user":
        alert_type = "danger"
        alert_message = "You do not have permission to update role of the user \"" + username + "\"!"
        error = True

    # Edit user in all users tables
    if not error:
        # Update table users
        db(db.users.username == username).update(name=name,
                                                 organization=organization,
                                                 email=email,
                                                 role=role)
        # Set success message
        alert_message = "User \"" + username + "\" successfully updated."

    # Get all users join with last login datetime
    users = db(db.users.id == db.users_logins.user_id).select()
    # Use standard view.
    response.view = request.controller + '/users_management.html'
    return dict(alert_type=alert_type,
                alert_message=alert_message,
                users=users)
示例#2
0
def change_password():
    """
    Set a new password for a given user.

    :return: Users as the table and operation result alert message
    """

    # Default alert
    alert_type = "success"
    alert_message = ""
    error = False

    # Check mandatory inputs
    if not (request.post_vars.username and request.post_vars.password_new
            and request.post_vars.password_confirm):
        alert_type = "danger"
        alert_message = "Some mandatory input is missing!"
        error = True

    # Parse inputs
    username = escape(request.post_vars.username) if not error else ""
    password_new = escape(request.post_vars.password_new) if not error else ""
    password_confirm = escape(
        request.post_vars.password_confirm) if not error else ""

    # Compare passwords
    if not error and (password_new != password_confirm):
        alert_type = "danger"
        alert_message = "Given passwords are different!"
        error = True

    # Set new password
    if not error:
        # Get user id
        user_id = db(db.users.username == username).select(db.users.id)[0].id
        # Get salt and generate a new hash
        salt = db(db.users_auth.user_id == user_id).select(
            db.users_auth.salt)[0].salt
        hash = hashlib.sha256(salt + password_new).hexdigest()
        # Update password
        db(db.users_auth.user_id == user_id).update(password=hash)
        # Set success message
        alert_message = "Password for the user \"" + username + "\" successfully changed."

    # Get all users join with last login datetime
    users = db(db.users.id == db.users_logins.user_id).select()
    # Use standard view
    response.view = request.controller + '/users_management.html'
    return dict(alert_type=alert_type,
                alert_message=alert_message,
                users=users)
示例#3
0
def delete_user():
    """
    Delete a given user from the system (from tables users, users_auth, users_logins).

    :return: Users as the table and operation result alert message
    """

    # Default alert
    alert_type = "success"
    alert_message = ""
    error = False

    # Check mandatory inputs
    if not request.post_vars.username:
        alert_type = "danger"
        alert_message = "Username not given!"
        error = True

    # Parse inputs
    username = escape(request.post_vars.username) if not error else ""

    # Check if username exists
    if not error and not check_username(db, username):
        alert_type = "danger"
        alert_message = "Given username \"" + username + "\" not exists in the system!"
        error = True

    # Delete user from all tables
    if not error:
        # Get user id
        user_id = db(db.users.username == username).select(db.users.id)[0].id
        # Delete from all users tables
        db(db.users.id == user_id).delete()
        db(db.users_auth.user_id == user_id).delete()
        db(db.users_logins.user_id == user_id).delete()
        # Set success message
        alert_message = "User \"" + username + "\" successfully deleted from the system."

    # Get all users join with last login datetime
    users = db(db.users.id == db.users_logins.user_id).select()
    # Use standard view
    response.view = request.controller + '/users_management.html'
    return dict(alert_type=alert_type,
                alert_message=alert_message,
                users=users)
示例#4
0
def get_host_tcp_flags():
    """
    Gets tcp flags statistics for a given host

    Returns: JSON with status "ok" or "error" and requested data.

    """
    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.aggregation and request.get_vars.host_ip):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    aggregation = escape(request.get_vars.aggregation)
    host_ip = escape(request.get_vars.host_ip)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'src_ip': host_ip}})

        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('by_time', 'date_histogram', field='@timestamp', interval=aggregation) \
              .metric('sum_of_syn', 'sum', field='stats.tcp_flags.SYN') \
              .metric('sum_of_ack', 'sum', field='stats.tcp_flags.ACK') \
              .metric('sum_of_fin', 'sum', field='stats.tcp_flags.FIN') \
              .metric('sum_of_psh', 'sum', field='stats.tcp_flags.PSH') \
              .metric('sum_of_rst', 'sum', field='stats.tcp_flags.RST') \
              .metric('sum_of_ece', 'sum', field='stats.tcp_flags.ECE') \
              .metric('sum_of_urg', 'sum', field='stats.tcp_flags.URG')

        result = s.execute()

        data_raw = {}
        data = "Timestamp,Sum of SYN, Sum of ACK, Sum of FIN, Sum of PSH, Sum of RST, Sum of ECE, Sum of URG;"
        for record in result.aggregations.by_time.buckets:
            timestamp = record.key
            number_of_syn = int(record.sum_of_syn.value)
            number_of_ack = int(record.sum_of_ack.value)
            number_of_fin = int(record.sum_of_fin.value)
            number_of_psh = int(record.sum_of_psh.value)
            number_of_rst = int(record.sum_of_rst.value)
            number_of_ece = int(record.sum_of_ece.value)
            number_of_urg = int(record.sum_of_urg.value)

            data += str(timestamp) + "," + str(number_of_syn) + "," + str(
                number_of_ack) + "," + str(number_of_fin) + "," + str(
                    number_of_psh) + "," + str(number_of_rst) + "," + str(
                        number_of_ece) + "," + str(number_of_urg) + ";"

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return (json_response)

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#5
0
def get_host_flows():
    """
    Gets flows, packet and bytes time series for a given host

    Returns: JSON with status "ok" or "error" and requested data.

    """
    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.aggregation and request.get_vars.host_ip):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    aggregation = escape(request.get_vars.aggregation)
    host_ip = escape(request.get_vars.host_ip)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'src_ip': host_ip}})

        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('by_time', 'date_histogram', field='@timestamp', interval=aggregation) \
              .metric('sum_of_flows', 'sum', field='stats.total.flow') \
              .metric('sum_of_packets', 'sum', field='stats.total.packets') \
              .metric('sum_of_bytes', 'sum', field='stats.total.bytes')

        result = s.execute()

        data = "Timestamp,Number of flows,Number of packets,Number of bytes;"
        for record in result.aggregations.by_time.buckets:
            timestamp = record.key
            number_of_flows = int(record.sum_of_flows.value)
            number_of_packets = int(record.sum_of_packets.value)
            number_of_bytes = int(record.sum_of_bytes.value)

            data += str(timestamp) + "," + str(number_of_flows) + "," + str(
                number_of_packets) + "," + str(number_of_bytes) + ";"

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return (json_response)

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#6
0
def get_top_n_statistics():
    """
    Obtains TOP N statistics about TCP ports scans.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end and request.get_vars.type and request.get_vars.number and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    type = escape(request.get_vars.type)
    number = int(escape(request.get_vars.number))
    filter = escape(request.get_vars.filter)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{'host': myconf.get('consumer.hostname'), 'port': myconf.get('consumer.port')}])
        elastic_bool = []
        elastic_bool.append({'range': {'@timestamp': {'gte': beginning, 'lte': end}}})
        if type == "horizontal-sources" or type == "horizontal-victims":
            elastic_bool.append({'term': {'@type': 'portscan_horizontal'}})
            dst_field = 'dst_port.raw'
        else:
            elastic_bool.append({'term': {'@type': 'portscan_vertical'}})
            dst_field = 'dst_ip'
        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'src_ip': filter}})
            elastic_should.append({'term': {'dst_ip': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})
        # Prepare query
        qx = Q({'bool': {'must': elastic_bool}})

        # Elastic can sometimes return not all records that match the search
        search_ip = Search(using=client, index='_all').query(qx)
        search_ip.aggs.bucket('by_src', 'terms', field='src_ip', size=2147483647) \
                      .bucket('by_dst', 'terms', field=dst_field, size=2147483647) \
                      .bucket('by_targets', 'top_hits', size=1, sort=[{'@timestamp': {'order': 'desc'}}])

        results_ip = search_ip.execute()

        # Prepare ordered collection
        counter = collections.Counter()
        if type == "horizontal-sources" or type == "vertical-sources":
            for src_buckets in results_ip.aggregations.by_src.buckets:
                for result in src_buckets.by_dst.buckets:
                    hit = result.by_targets.hits.hits[0]["_source"]
                    # For each source IP add number of targets to the counter
                    counter[hit["src_ip"]] += hit["targets_total"]
        else:  # victims
            for src_buckets in results_ip.aggregations.by_src.buckets:
                for result in src_buckets.by_dst.buckets:
                    hit = result.by_targets.hits.hits[0]["_source"]
                    if type == "horizontal-victims":
                        counter[hit["dst_port"]] += hit["targets_total"]
                    else:
                        counter[hit["dst_ip"]] += hit["targets_total"]

        # Select first N (number) values
        data = ""
        for value, count in counter.most_common(number):
            data += value + "," + str(count) + ","
        data = data[:-1]

        # Return info message if no data is present
        if data == "":
            json_response = '{"status": "Empty", "data": "No data found"}'
        # Return data as JSON
        else:
            json_response = '{"status": "Ok", "data": "' + data + '"}'

        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(str(e)) + '"}'
        return json_response
示例#7
0
def get_histogram_statistics():
    """
    Obtains statistics about TCP ports scans for histogram chart.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end and request.get_vars.aggregation and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    aggregation = escape(request.get_vars.aggregation)
    filter = escape(request.get_vars.filter)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{'host': myconf.get('consumer.hostname'), 'port': myconf.get('consumer.port')}])
        elastic_bool = []
        elastic_bool.append({'range': {'@timestamp': {'gte': beginning, 'lte': end}}})
        elastic_bool.append({'terms': {'@type': ['portscan_vertical', 'portscan_horizontal']}})
        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'src_ip': filter}})
            elastic_should.append({'term': {'dst_ip': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})
        # Prepare query
        qx = Q({'bool': {'must': elastic_bool}})

        # Get histogram data
        search_histogram = Search(using=client, index='_all').query(qx)
        search_histogram.aggs.bucket('by_time', 'date_histogram', field='@timestamp', interval=aggregation) \
            .bucket('by_src', 'terms', field='src_ip', size=2147483647) \
            .bucket('sum_of_flows', 'sum', field='flows_increment')
        histogram = search_histogram.execute()

        # Prepare obtained data
        detections = {}
        for interval in histogram.aggregations.by_time.buckets:
            timestamp = interval.key
            for source in interval.by_src.buckets:
                # Create a new key of not exists
                if source.key not in detections:
                    detections[source.key] = []
                # Append array of timestamp and number of flows
                detections[source.key].append([timestamp, source.sum_of_flows.value])

        # Return info message if no data is present
        if not detections:
            return '{"status": "Empty", "data": "No data found"}'

        # Return data as JSON
        response = {"status": "Ok", "data": detections}
        return json.dumps(response)

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(str(e)) + '"}'
        return json_response
示例#8
0
def get_scans_list():
    """
    Obtains list of all ports scans in given time range.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    filter = escape(request.get_vars.filter)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{'host': myconf.get('consumer.hostname'), 'port': myconf.get('consumer.port')}])
        elastic_bool = []
        elastic_bool.append({'range': {'@timestamp': {'gte': beginning, 'lte': end}}})
        elastic_bool.append({'term': {'@type': 'portscan_vertical'}})

        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'src_ip': filter}})
            elastic_should.append({'term': {'dst_ip': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})

        # Get data for vertical scans
        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('by_src', 'terms', field='src_ip', size=2147483647) \
            .bucket('by_dst_ip', 'terms', field='dst_ip', size=2147483647) \
            .bucket('top_src_dst', 'top_hits', size=1, sort=[{'@timestamp': {'order': 'desc'}}])
        vertical = s.execute()

        elastic_bool = []
        elastic_bool.append({'range': {'@timestamp': {'gte': beginning, 'lte': end}}})
        elastic_bool.append({'term': {'@type': 'portscan_horizontal'}})

        # Append filter
        if filter != 'none':
            elastic_bool.append({'bool': {'should': elastic_should}})

        # Get data for horizontal scans
        rx = Q({'bool': {'must': elastic_bool}})
        r = Search(using=client, index='_all').query(rx)
        r.aggs.bucket('by_src', 'terms', field='src_ip', size=2147483647) \
            .bucket('by_dst_port', 'terms', field='dst_port.raw', size=2147483647) \
            .bucket('top_src_dst', 'top_hits', size=1, sort=[{'@timestamp': {'order': 'desc'}}])
        horizontal = r.execute()

        # Result Parsing into CSV in format: type, timestamp, source_ip, destination_ip/port, targets count, duration
        data = ""
        for src_aggregations in vertical.aggregations.by_src.buckets:
            for result in src_aggregations.by_dst_ip.buckets:
                record = result.top_src_dst.hits.hits[0]["_source"]
                m, s = divmod(record["duration_in_milliseconds"] / 1000, 60)
                h, m = divmod(m, 60)
                duration = "%d:%02d:%02d" % (h, m, s)
                data += "Vertical," + record["@timestamp"].replace("T", " ").replace("Z", "") + "," + record["src_ip"] \
                        + "," + record["dst_ip"] + "," + str(record["targets_total"]) + "," + str(record["flows"]) + "," + str(duration) + ","

        for src_aggregations in horizontal.aggregations.by_src.buckets:
            for result in src_aggregations.by_dst_port.buckets:
                record = result.top_src_dst.hits.hits[0]["_source"]
                m, s = divmod(record["duration_in_milliseconds"] / 1000, 60)
                h, m = divmod(m, 60)
                duration = "%d:%02d:%02d" % (h, m, s)
                data += "Horizontal," + record["@timestamp"].replace("T", " ").replace("Z", "") + "," + record["src_ip"] \
                        + "," + record["dst_port"] + "," + str(record["targets_total"]) + "," + str(record["flows"]) + "," + str(duration) + ","
        data = data[:-1]

        # Return info message if no data is present
        if data == "":
            json_response = '{"status": "Empty", "data": "No data found"}'
        # Return data as JSON
        else:
            json_response = '{"status": "Ok", "data": "' + data + '"}'

        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(str(e)) + '"}'
        return json_response
示例#9
0
def get_summary_statistics():
    """
    Obtains statistics about current sum of flows, packets, bytes.

    :return: JSON with status "ok" or "error" and requested data.
    """

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': "now-5m",
                    'lte': "now"
                }
            }})
        elastic_bool.append({'term': {'@type': 'protocols_statistics'}})

        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('sum_of_flows', 'sum', field='flows')
        s.aggs.bucket('sum_of_packets', 'sum', field='packets')
        s.aggs.bucket('sum_of_bytes', 'sum', field='bytes')
        s.sort('@timestamp')
        result = s.execute()

        # Result Parsing into CSV in format: timestamp, tcp protocol value, udp protocol value
        data = "Timestamp, Flows, Packets, Bytes;"
        timestamp = "Last 5 Minutes"
        data += timestamp + ', ' +\
                str(int(result.aggregations.sum_of_flows['value'])) + ', ' +\
                str(int(result.aggregations.sum_of_packets['value'])) + ', ' +\
                str(int(result.aggregations.sum_of_bytes['value']))

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
def get_records_list():
    """
    Obtains list of all records for given type given time range.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    filter = escape(request.get_vars.filter)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'@type': 'external_dns_resolver'}})

        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'src_ip': filter}})
            elastic_should.append({'term': {'resolver_ip.raw': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})
        qx = Q({'bool': {'must': elastic_bool}})

        # Search with maximum size aggregations
        search = Search(using=client, index='_all').query(qx)
        search.aggs.bucket('by_src', 'terms', field='src_ip', size=2147483647)\
              .bucket('by_dst', 'terms', field='resolver_ip.raw', size=2147483647)\
              .bucket('top_src_dst', 'top_hits', size=1, sort=[{'timestamp': {'order': 'desc'}}])
        results = search.execute()

        # Result Parsing into CSV in format: timestamp, source_ip, resolver_ip, flows
        data = ""
        for src_aggregations in results.aggregations.by_src.buckets:
            for result in src_aggregations.by_dst.buckets:
                record = result.top_src_dst.hits.hits[0]["_source"]
                data += record["timestamp"].replace("T", " ").replace("Z", "") + "," + record["src_ip"] + "," \
                    + record["resolver_ip"] + "," + str(record["flows"]) + ","
        data = data[:-1]

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#11
0
def get_records_list():
    """
    Obtains list of all records for given type given time range.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    filter = escape(request.get_vars.filter)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch(
            [{'host': myconf.get('consumer.hostname'), 'port': myconf.get('consumer.port')}])
        elastic_bool = []
        elastic_bool.append({'range': {'timestamp': {'gte': beginning, 'lte': end}}})
        elastic_bool.append({'term': {'@type': 'open_dns_resolver'}})

        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'resolver_ip.raw': filter}})
            elastic_should.append({'term': {'resolved_data.raw': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})
        qx = Q({'bool': {'must': elastic_bool}})

        # Search with maximum size aggregations
        search = Search(using=client, index='_all').query(qx)
        search.aggs.bucket('by_src', 'terms', field='resolver_ip.raw', size=2147483647) \
            .bucket('by_data', 'terms', field='resolved_data.raw', size=2147483647) \
            .bucket('sum_by_ip', 'sum', field='flows')
        search.aggs['by_src']['by_data'].bucket('by_query', 'top_hits', size=1, sort=[{'flows': {'order': 'desc'}}])
        search.aggs['by_src'].bucket('by_start_time', 'top_hits', size=1, sort=[{'timestamp': {'order': 'asc'}}])
        results = search.execute()

        # Prepare ordered collection
        counter = collections.Counter()
        for resolver_buckets in results.aggregations.by_src.buckets:
            domain_counter = collections.Counter()

            # Calculate sums for each resolved query and resolver ip
            for dst_buckets in resolver_buckets.by_data.buckets:
                query = dst_buckets["by_query"]["hits"].hits[0]["_source"].resolved_query
                domain_counter[(resolver_buckets.key, dst_buckets.key, query)] += int(dst_buckets.sum_by_ip.value)

            top_resolved_data_for_ip = domain_counter.most_common(1)[0][0][1]
            top_resolved_query_for_ip = domain_counter.most_common(1)[0][0][2]
            top_resolved_query_flows_count = domain_counter.most_common(1)[0][1]
            first_timestamp_for_ip = resolver_buckets.by_start_time[0].timestamp.replace("T", " ").replace("Z", "")

            counter[
                (resolver_buckets.key, top_resolved_query_for_ip, top_resolved_data_for_ip,
                 top_resolved_query_flows_count, first_timestamp_for_ip)] \
                += sum(domain_counter.values())

        # Result Parsing into CSV in format: timestamp, resolver_ip, resolved_data, flows
        data = ""
        for record in counter.most_common():
            data += str(record[0][0]) + "," + str(record[0][1]) + "," + str(record[0][2]) + "," \
                    + str(record[0][3]) + "," + str(record[0][4]) + "," + str(record[1]) + ","
        data = data[:-1]

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Exception: ' + escape(str(e)) + '"}'
        return json_response
示例#12
0
def get_top_n_statistics():
    """
    Obtains TOP N DNS statistics.
    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.type and request.get_vars.number):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    type = escape(request.get_vars.type)
    number = int(escape(request.get_vars.number))

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'@stat_type': type}})

        # Prepare query
        qx = Q({'bool': {'must': elastic_bool}})

        # Set query according to the statistic type
        if type == "queried_by_ip":
            search_ip = Search(using=client, index='_all').query(qx)
            search_ip.aggs.bucket('all_nested', 'nested', path='data_array') \
                .bucket('by_key', 'terms', field='data_array.key.raw', size=2147483647)\
                .bucket('by_ip', 'terms', field='data_array.ip', size=1, order={'sum_by_ip': 'desc'}) \
                .bucket('sum_by_ip', 'sum', field='data_array.value')
            search_ip.aggs['all_nested']['by_key'].bucket(
                'sum_total', 'sum', field='data_array.value')
            results = search_ip.execute()
        else:
            search_ip = Search(using=client, index='_all').query(qx)
            search_ip.aggs.bucket('all_nested', 'nested', path='data_array') \
                .bucket('by_key', 'terms', field='data_array.key.raw', size=2147483647) \
                .bucket('stats_sum', 'sum', field='data_array.value')
            results = search_ip.execute()

        # Prepare data variable
        data = ""
        # Prepare ordered collection
        counter = collections.Counter()

        if type == "queried_by_ip":
            for record in results.aggregations.all_nested.by_key.buckets:
                top_ip = record.by_ip.buckets[0]
                counter[(record.key, top_ip.key, int(
                    top_ip.sum_by_ip.value))] = int(record.sum_total.value)

            # Select top N (number) values
            for value, count in counter.most_common(number):
                data += value[0] + "," + value[1] + "," + str(
                    value[2]) + "," + str(count) + ","
        else:
            for all_buckets in results.aggregations.all_nested.by_key:
                counter[all_buckets.key] += int(all_buckets.stats_sum.value)

            # Select top N (number) values
            for value, count in counter.most_common(number):
                data += value + "," + str(count) + ","

        # Remove trailing comma
        data = data[:-1]

        if data == "":
            json_response = '{"status": "Empty", "data": "No data found"}'
        else:
            json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#13
0
def get_records_list():
    """
    Obtains list of all records for given type given time range.
    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.type):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    type = escape(request.get_vars.type)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'@stat_type': type}})

        # Prepare query
        qx = Q({'bool': {'must': elastic_bool}})

        # Set query according to the statistic type
        search_ip = Search(using=client, index='_all').query(qx)
        search_ip.aggs.bucket('all_nested', 'nested', path='data_array')\
            .bucket('by_key', 'terms', field='data_array.key.raw', size=2147483647)\
            .bucket('stats_sum', 'sum', field='data_array.value')
        results = search_ip.execute()

        data = ""
        for all_buckets in results.aggregations.all_nested.by_key:
            data += all_buckets.key + "," + str(
                int(all_buckets.stats_sum.value)) + ","

        # Remove trailing comma
        data = data[:-1]

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#14
0
def login():
    """
    Verify given username and corresponding password, and logs user if credentials are correct. Set session variables to the logged user.

    :return: Index page with alert message if some error occured, otherwise redirect to the index
    """

    # Default alert
    alert_type = ""
    alert_message = ""
    error = False

    # Check mandatory inputs
    if not (request.post_vars.username and request.post_vars.password
            and request.post_vars.current_page):
        alert_type = "danger"
        alert_message = "No credentials given!"
        error = True

    # Parse inputs
    username = escape(request.post_vars.username) if not error else ""
    password = escape(request.post_vars.password) if not error else ""
    current_page = escape(request.post_vars.current_page) if not error else ""

    # Check if username exists and corresponds to the password
    if not error and not check_password(db, username, password):
        alert_type = "danger"
        alert_message = "Username or password is incorrect!"
        error = True

    # Set session variables
    if not error:
        # Get user info
        user_info = db(db.users.username == username).select(db.users.ALL)[0]
        # Set session
        session.logged = True
        session.user_id = user_info.id
        session.username = user_info.username
        session.name = user_info.name
        session.organization = user_info.organization
        session.role = user_info.role
        session.email = user_info.email

    # Set login time.
    if not error:
        db(db.users_logins.user_id == session.user_id).update(
            last_login=datetime.now())

    # Redirect to the index if everything was ok
    if not error:
        if ("login" in current_page) or ("logout" in current_page):
            redirect("/index")
        else:
            redirect(current_page)

    # Use standard error view
    response.view = 'error.html'
    return dict(
        alert_type=alert_type,
        alert_message=alert_message,
    )
示例#15
0
def get_host_distinct_peers():
    """
    Gets flows, packet and bytes time series for a given host

    Returns: JSON with status "ok" or "error" and requested data.

    """
    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.aggregation and request.get_vars.host_ip):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    aggregation = escape(request.get_vars.aggregation)
    host_ip = escape(request.get_vars.host_ip)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'src_ip': host_ip}})

        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('by_time', 'date_histogram', field='@timestamp', interval=aggregation) \
              .metric('peer_avg', 'avg', field='stats.peer_number') \
              .metric('peer_max', 'min', field='stats.peer_number') \
              .metric('peer_min', 'max', field='stats.peer_number')

        result = s.execute()

        data_avg = []
        data_min_max = []
        data_max = []
        data_min = []
        for record in result.aggregations.by_time.buckets:
            timestamp = record.key
            maximum = round(record.peer_max.value,
                            2) if record.peer_max.value else None
            minimum = round(record.peer_min.value,
                            2) if record.peer_min.value else None
            data_avg.append([
                timestamp,
                round(record.peer_avg.value, 2)
                if record.peer_avg.value else None
            ])
            data_min_max.append([timestamp, [minimum, maximum]])
            data_max.append(maximum)
            data_min.append(minimum)

        json_response = {
            "status": "Ok",
            "data": {
                "data_avg": data_avg,
                "data_min_max": data_min_max,
                "data_min": data_min,
                "data_max": data_max
            }
        }
        return (json.dumps(json_response))

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#16
0
def add_user():
    """
    Add a new user to the system (into the table users, users_auth, users_logins).

    :return: Users as the table and operation result alert message
    """

    # Default alert
    alert_type = "success"
    alert_message = ""
    error = False

    # Check mandatory inputs
    if not (request.post_vars.username and request.post_vars.name
            and request.post_vars.organization and request.post_vars.email
            and request.post_vars.role and request.post_vars.password
            and request.post_vars.password_confirm):
        alert_type = "danger"
        alert_message = "Some mandatory input is missing!"
        error = True

    # Parse inputs
    username = escape(request.post_vars.username) if not error else ""
    name = escape(request.post_vars.name) if not error else ""
    organization = escape(request.post_vars.organization) if not error else ""
    email = escape(request.post_vars.email) if not error else ""
    role = escape(request.post_vars.role) if not error else ""
    password = escape(request.post_vars.password) if not error else ""
    password_confirm = escape(
        request.post_vars.password_confirm) if not error else ""

    # Check if username exists
    if not error and check_username(db, username):
        alert_type = "danger"
        alert_message = "Given username \"" + username + "\" already exists in the system!"
        error = True

    # Compare passwords
    if not error and (password != password_confirm):
        alert_type = "danger"
        alert_message = "Given passwords are different!"
        error = True

    # Insert user into tables
    if not error:
        # Insert into users table
        db.users.insert(username=username,
                        name=name,
                        organization=organization,
                        email=email,
                        role=role)
        # Get new user id
        user_id = db(db.users.username == username).select(db.users.id)[0].id
        # Generate salt and password
        salt = ''.join(
            random.choice(
                '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
            ) for i in range(20))
        hash = hashlib.sha256(salt + password).hexdigest()
        # Insert into users_auth table
        db.users_auth.insert(user_id=user_id, salt=salt, password=hash)
        # Set last login to default
        db.users_logins.insert(user_id=user_id, last_login=datetime.now())
        # Set success message
        alert_message = "User \"" + username + "\" successfully added to the system."

    # Get all users join with last login datetime
    users = db(db.users.id == db.users_logins.user_id).select()
    # Use standard view
    response.view = request.controller + '/users_management.html'
    return dict(alert_type=alert_type,
                alert_message=alert_message,
                users=users)
示例#17
0
def get_heatmap_statistics():
    """
    Obtains data for headmap chart in a time range

    :return: JSON with status "ok" or "error" and requested data.
    """
    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.network):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    network = escape(request.get_vars.network)

    # Get the first and last IP from given CIDR
    cidr = IPNetwork(network)
    cidr_first = cidr[0]
    cidr_last = cidr[-1]

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'src_ip': network}})

        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('by_host', 'terms', field='src_ip', size=2147483647) \
              .bucket('sum_of_flows', 'sum', field='stats.total.flow')

        result = s.execute()

        # Generate zero values for all IP addresses
        empty_data = ""
        segment_first = str(cidr_first).split(".")
        segment_last = str(cidr_last).split(".")
        for c_segment in range(int(segment_first[2]),
                               int(segment_last[2]) + 1):  # Do at least once
            for d_segment in range(int(segment_first[3]),
                                   int(segment_last[3]) +
                                   1):  # Do at least once
                empty_data += str(d_segment) + "," + str(c_segment) + ",0;"

        data = ""
        for bucket in result.aggregations.by_host.buckets:
            ip = bucket.key.split(".")
            # switch D anc C segment of IP to correct view in the chart
            data += ip[3] + "," + ip[2] + "," + str(
                bucket.sum_of_flows.value) + ";"

        # Create JSON response (combine empty_data and add values)
        json_response = '{"status": "Ok", "data": "' + empty_data + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#18
0
def get_attacks_list():
    """
    Obtains list of all detections by Pattern Finder in given time range.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    filter = escape(request.get_vars.filter)
    config_filter = escape(request.get_vars.config_filter)

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'@type': 'pattern_finder'}})

        if config_filter != 'none':
            elastic_bool.append({'term': {'configuration.raw': config_filter}})

        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'src_ip': filter}})
            elastic_should.append({'term': {'dst_ip': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})

        qx = Q({'bool': {'must': elastic_bool}})

        # Search with maximum size aggregations
        search = Search(using=client, index='_all').query(qx)
        search.aggs.bucket('by_src', 'terms', field='src_ip', size=2147483647) \
                   .bucket('by_dst', 'terms', field='dst_ip', size=2147483647) \
                   .bucket('top_src_dst', 'top_hits', size=1, sort=[{'@timestamp': {'order': 'desc'}}])
        results = search.execute()

        # Result Parsing into CSV in format: timestamp, source_ip, destination_ip, flows, duration
        data = ""
        for src_aggregations in results.aggregations.by_src.buckets:
            for result in src_aggregations.by_dst.buckets:
                record = result.top_src_dst.hits.hits[0]["_source"]
                timestamp = record['@timestamp'].replace('T',
                                                         ' ').replace('Z', '')
                closest_patterns = ""
                for pattern in record['closest_patterns']:
                    closest_patterns += str(pattern) + '; '

                for data_array in record['data_array']:
                    if data_array['name'] == record['closest_patterns'][0]:
                        distribution = data_array['distribution']
                        mid_index = len(distribution) / 2
                        array_ratio = sum(distribution[:mid_index]) / float(sum(distribution[mid_index:])) \
                            if sum(distribution[mid_index:]) else float('inf')

                if array_ratio < 1.1:
                    confidence = "Very Low"
                elif array_ratio < 1.25:
                    confidence = "Low"
                elif array_ratio < 2:
                    confidence = "Medium"
                elif array_ratio < 5:
                    confidence = "High"
                else:
                    confidence = "Very high"

                closest_patterns = closest_patterns[:-2]
                data += timestamp + ',' + record["src_ip"] + ',' + record["dst_ip"] + ',' \
                        + (record.get('configuration') or 'unknown') + ',' + closest_patterns + ',' + confidence + ','
        data = data[:-1]

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
def get_top_n_statistics():
    """
    Obtains TOP N statistics about DNS external resolvers.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check login
    if not session.logged:
        json_response = '{"status": "Error", "data": "You must be logged!"}'
        return json_response

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.type and request.get_vars.number
            and request.get_vars.filter):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    type = escape(request.get_vars.type)
    filter = escape(request.get_vars.filter)
    number = int(escape(request.get_vars.number))

    try:
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'@type': 'external_dns_resolver'}})

        # Set filter
        if filter != 'none':
            elastic_should = []
            elastic_should.append({'term': {'src_ip': filter}})
            elastic_should.append({'term': {'resolver_ip.raw': filter}})
            elastic_bool.append({'bool': {'should': elastic_should}})
        # Prepare query
        qx = Q({'bool': {'must': elastic_bool}})

        # Get ordered data (with maximum size aggregation)
        search = Search(using=client, index='_all').query(qx)
        search.aggs.bucket('by_src', 'terms', field='src_ip', size=2147483647)\
              .bucket('by_dst', 'terms', field='resolver_ip.raw', size=2147483647)\
              .bucket('top_src_dst', 'top_hits', size=1, sort=[{'timestamp': {'order': 'desc'}}])
        results = search.execute()

        # Prepare ordered collection
        counter = collections.Counter()
        for src_buckets in results.aggregations.by_src.buckets:
            if type == "sources":
                counter[src_buckets.key] = len(src_buckets.by_dst.buckets)
            else:
                for dst_buckets in src_buckets.by_dst.buckets:
                    counter[dst_buckets.key] += 1

        # Select first N (number) values
        data = ""
        for ip, count in counter.most_common(number):
            data += ip + "," + str(count) + ","

        # Remove trailing comma
        data = data[:-1]

        if data == "":
            json_response = '{"status": "Empty", "data": "No data found"}'
        else:
            json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response
示例#20
0
def get_statistics():
    """
    Obtains statistics about TCP, UDP a other protocols.

    :return: JSON with status "ok" or "error" and requested data.
    """

    # Check mandatory inputs
    if not (request.get_vars.beginning and request.get_vars.end
            and request.get_vars.aggregation and request.get_vars.type):
        json_response = '{"status": "Error", "data": "Some mandatory argument is missing!"}'
        return json_response

    # Parse inputs and set correct format
    beginning = escape(request.get_vars.beginning)
    end = escape(request.get_vars.end)
    aggregation = escape(request.get_vars.aggregation)
    type = escape(
        request.get_vars.type
    )  # name of field to create sum from, one of {flows, packets, bytes }

    try:
        # Elastic query
        client = elasticsearch.Elasticsearch([{
            'host':
            myconf.get('consumer.hostname'),
            'port':
            myconf.get('consumer.port')
        }])
        elastic_bool = []
        elastic_bool.append(
            {'range': {
                '@timestamp': {
                    'gte': beginning,
                    'lte': end
                }
            }})
        elastic_bool.append({'term': {'@type': 'protocols_statistics'}})

        qx = Q({'bool': {'must': elastic_bool}})
        s = Search(using=client, index='_all').query(qx)
        s.aggs.bucket('by_time', 'date_histogram', field='@timestamp', interval=aggregation)\
              .bucket('by_type', 'terms', field='protocol.raw', size=0)\
              .bucket('sum_of_flows', 'sum', field=type)
        s.sort('@timestamp')
        result = s.execute()

        # Result Parsing into CSV in format: timestamp, tcp protocol value, udp protocol value, other protocols value
        data_raw = {}
        data = "Timestamp, TCP protocol, UDP protocol, Other protocols;"  # CSV header
        for interval in result.aggregations.by_time.buckets:
            timestamp = interval.key
            timestamp_values = ['null'] * 3
            data_raw[timestamp] = timestamp_values
            for bucket in interval.by_type.buckets:
                value = bucket.sum_of_flows.value
                if bucket.key == "tcp":
                    data_raw[timestamp][0] = str(int(value))
                elif bucket.key == "udp":
                    data_raw[timestamp][1] = str(int(value))
                elif bucket.key == "other":
                    data_raw[timestamp][2] = str(int(value))

            data += str(timestamp) + ", " + str(
                data_raw[timestamp][0]) + ", " + str(
                    data_raw[timestamp][1]) + ", " + str(
                        data_raw[timestamp][2]) + ";"

        json_response = '{"status": "Ok", "data": "' + data + '"}'
        return json_response

    except Exception as e:
        json_response = '{"status": "Error", "data": "Elasticsearch query exception: ' + escape(
            str(e)) + '"}'
        return json_response