def test_invalid_message_data(migrate_table, mocker):
    """
    Case when one required data point is missing. Message should be not saved to database
    """
    mocker.patch(
        "consumer.consumer.KafkaConsumer",
        return_value=[
            MagicMock(
                value={
                    "metadata":
                    "{\"affectedNode\":\"LX000191\",\"affectedEquipment\":\"RRU-B8-S2\","
                    "\"affectedSite\":\"LX000191\",\"alarmCategory\":\"FAULT\","
                    "\"alarmGroup\":\"003--1143978760-SubNetwork=Rijeka,MeContext=LX000191,"
                    "ManagedElement=LX000191,Equipment=1,FieldReplaceableUnit=RRU-B8-S2,"
                    "RfPort=A-1460123\",\"alarmCSN\":\"1460123\",\"alarmID\":\"0\","
                    "\"alarmMO\":\"SubNetwork=Rijeka,MeContext=LX000191,ManagedElement=LX000191,"
                    "Equipment=1,FieldReplaceableUnit=RRU-B8-S2,RfPort=A\","
                    "\"alarmNotificationType\":\"Minor\",\"alarmLastSeqNo\":\"1460123\","
                    "\"alarmEventTime\":\"2020-01-21T15:45:15+02:00\"}"
                }),
        ])
    consume_messages()
    select_sql = "SELECT COUNT(*) FROM metrics;"
    with Session() as session:
        select_results = session.fetch_one(select_sql)

    # Message is not saved into db because it has invalid format
    assert select_results == (0, )
Beispiel #2
0
def test_create_drop_table():
    migrate_metrics_table()
    drop_metrics_table()
    with Session() as session:
        # Check table was dropped
        result = session.fetch_one("SELECT to_regclass('metrics');")
    assert result == (None, )
def test_most_affected_nodes_histo(populate_table_with_test_data):
    most_frequent = _most_affected_nodes()
    assert most_frequent == [('LX00018', 3)]  # node_id, times affected

    # Add two more entities
    with Session() as session:
        for i in range(2):
            session.execute(
                """
                INSERT INTO metrics(
                    affected_node, affected_equipment, affected_site, alarm_category,
                    alarm_group, alarm_csn, alarm_id, alarm_mo, alarm_notification_type,
                    alarm_last_seq_no, alarm_event_time, vnoc_alarm_id
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """, (
                    "LX00012",
                    "RRU-B1B3-S1-2",
                    "LX00012",
                    "FAULT",
                    "003--1143982548-SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015",
                    "1460077",
                    "9175144",
                    "SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015bleUnit=RRU-B1B3-S1-2",
                    "Minor",
                    "1460077",
                    "2020-01-24T09:19:50+02:00",
                    "ERA005",
                ))
        session.commit()

    most_frequent = _most_affected_nodes()
    assert most_frequent == [('LX00018', 3), ('LX00012', 2)]
Beispiel #4
0
def populate_table_with_test_data(migrate_table):
    with Session() as session:
        for i in range(3):
            session.execute(
                """
                INSERT INTO metrics(
                    affected_node, affected_equipment, affected_site, alarm_category,
                    alarm_group, alarm_csn, alarm_id, alarm_mo, alarm_notification_type,
                    alarm_last_seq_no, alarm_event_time, vnoc_alarm_id
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """, (
                    "LX00018",
                    "RRU-B1B3-S1-2",
                    "LX00015",
                    "FAULT",
                    "003--1143982548-SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015",
                    "1460077",
                    "9175147",
                    "SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015bleUnit=RRU-B1B3-S1-2",
                    "Minor",
                    "1460077",
                    "2020-01-24T09:19:50+02:00",
                    "ERA005",
                ))
        session.commit()
def test_connection_is_called_on_session_open(psycopg2_connect_fixture):
    with Session() as session:
        pass

    assert session._connection.cursor.call_count == 1
    assert session._connection.close.call_count == 1
    assert session._cursor.close.call_count == 1
Beispiel #6
0
def test_fetch_one(populate_table_with_test_data):
    select_sql = "SELECT vnoc_alarm_id FROM metrics ORDER BY alarm_event_time DESC;"

    with Session() as session:
        session.execute(
            """
            INSERT INTO metrics(
                affected_node, affected_equipment, affected_site, alarm_category,
                alarm_group, alarm_csn, alarm_id, alarm_mo, alarm_notification_type,
                alarm_last_seq_no, alarm_event_time, vnoc_alarm_id
            ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
            """, (
                "LX00018",
                "RRU-B1B3-S1-2",
                "LX00015",
                "FAULT",
                "003--1143982548-SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015",
                "1460077",
                "9175147",
                "SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015bleUnit=RRU-B1B3-S1-2",
                "Minor",
                "1460077",
                "2020-01-29T09:19:50+02:00",
                "ERA015",
            ))
        session.commit()
        select_results = session.fetch_one(select_sql)

    assert select_results == ('ERA015', )
def test_consume_message_data_saved_to_db(consumer_mocked, migrate_table):
    consume_messages()
    select_sql = "SELECT vnoc_alarm_id FROM metrics ORDER BY alarm_event_time DESC;"
    count_sql = "SELECT COUNT(*) FROM metrics;"
    with Session() as session:
        select_results = session.fetch_one(select_sql)
        count_result = session.fetch_one(count_sql)

    assert select_results == ('ERA021', )
    assert count_result == (1, )
def test_consume_multiple_messages(migrate_table,
                                   consumer_mocked_multiple_messages):
    consume_messages()
    count_sql = "SELECT COUNT(*) FROM metrics;"
    with Session() as session:
        count_result = session.fetch_one(count_sql)
    assert count_result == (2, )

    # Check that messages were created properly
    with Session() as session:
        select_result_era015 = session.fetch_one(
            "SELECT vnoc_alarm_id FROM metrics WHERE vnoc_alarm_id = 'ERA015';"
        )
        select_result_era021 = session.fetch_one(
            "SELECT vnoc_alarm_id FROM metrics WHERE vnoc_alarm_id = 'ERA021';"
        )
        select_multiple_results = session.fetch_all(
            "SELECT vnoc_alarm_id FROM metrics ORDER BY alarm_event_time DESC;"
        )
    assert select_result_era015 == ("ERA015", )
    assert select_result_era021 == ("ERA021", )
    assert select_multiple_results == [("ERA021", ), ("ERA015", )]
Beispiel #9
0
def test_raises_insert_invalid_type(migrate_table):
    with pytest.raises(Exception):
        with Session() as session:
            session.execute(
                """
                INSERT INTO metrics(
                    affected_node, affected_equipment, affected_site, alarm_category,
                    alarm_grou, alarm_csn, alarm_id, alarm_mo, alarm_notification_type,
                    alarm_last_seq_no, alarm_event_time, vnoc_alarm_id
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """, ("something", "something", "something", "something",
                      "something", "something"
                      "something", "something", "something", "something",
                      "something", "something"))
Beispiel #10
0
def _most_frequent_histogram() -> List[Tuple[Any]]:
    with Session() as session:
        alert_counts = session.fetch_all("""
            SELECT alarm_id, COUNT(alarm_id) 
            FROM metrics
            GROUP BY alarm_id
            ORDER BY COUNT(alarm_id) DESC;
            """)
    table = BeautifulTable()
    print("Most Frequent Alerts")
    for alert in alert_counts:
        table.rows.append([alert[0], alert[1]])
    table.columns.header = ["alert id", "alert count"]
    print(table)
    return alert_counts
Beispiel #11
0
def _most_affected_nodes() -> List[Tuple[Any]]:
    top_affected_nodes = 5
    with Session() as session:
        node_counts = session.fetch_all(f"""
            SELECT affected_node, COUNT(affected_node) 
            FROM metrics
            GROUP BY affected_node
            ORDER BY COUNT(affected_node) DESC
            LIMIT {top_affected_nodes};
            """)
    table = BeautifulTable()
    print(f"Top {top_affected_nodes} most affected nodes")
    for node in node_counts:
        table.rows.append([node[0], node[1]])
    table.columns.header = ["node", "times affected"]
    print(table)
    return node_counts
Beispiel #12
0
def _timeline_era015() -> List[Tuple[Any]]:
    with Session() as session:
        era015_timeline = session.fetch_all("""
            SELECT date_trunc('hour', alarm_event_time), count(1)
            FROM metrics
            GROUP BY 1
            ORDER BY date_trunc('hour', alarm_event_time) DESC
            """)

    table = BeautifulTable()
    print("ERA015 generated per hour")
    for era015_daily in era015_timeline:
        table.rows.append([f"{era015_daily[0]}", era015_daily[1]])

    table.columns.header = ["date", "count"]
    print(table)
    return era015_timeline
Beispiel #13
0
def consume_messages() -> None:
    logger.warning(f"Started listener {threading.get_ident()}")
    kafka_consumer = KafkaConsumer(
        os.getenv(KAFKA_TOPIC, 'metrics'),
        bootstrap_servers=os.getenv(KAFKA_HOST, ['kafka:9092']),
        enable_auto_commit=True,
        value_deserializer=lambda data: json.loads(data.decode('utf8')),
    )

    with Session() as session:
        for message in kafka_consumer:
            if not is_valid_consumer_message(message.value):
                logger.warning(f"Received invalid message: {message.value}")
                continue
            metadata = json.loads(message.value['metadata'])
            session.execute(
                """
                INSERT INTO metrics(
                    affected_node, affected_equipment, affected_site, alarm_category,
                    alarm_group, alarm_csn, alarm_id, alarm_mo, alarm_notification_type,
                    alarm_last_seq_no, alarm_event_time, vnoc_alarm_id
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """,
                (
                    metadata.get('affectedNode'),
                    metadata.get('affectedEquipment'),
                    metadata.get('affectedSite'),
                    metadata['alarmCategory'],
                    metadata['alarmGroup'],
                    metadata['alarmCSN'],
                    metadata['alarmID'],
                    metadata['alarmMO'],
                    metadata['alarmNotificationType'],
                    metadata['alarmLastSeqNo'],
                    metadata['alarmEventTime'],
                    metadata['vnocAlarmID'],
                ))
            session.commit()
def test_era015_timeline(populate_table_with_test_data):
    era_timeline = _timeline_era015()

    assert era_timeline == [
        (datetime(2020, 1, 24, 9), 3)
    ]  # grouped date by hour, count of entities created

    # Add two more entities
    with Session() as session:
        for i in range(2):
            session.execute(
                """
                INSERT INTO metrics(
                    affected_node, affected_equipment, affected_site, alarm_category,
                    alarm_group, alarm_csn, alarm_id, alarm_mo, alarm_notification_type,
                    alarm_last_seq_no, alarm_event_time, vnoc_alarm_id
                ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
                """, (
                    "LX00012",
                    "RRU-B1B3-S1-2",
                    "LX00012",
                    "FAULT",
                    "003--1143982548-SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015",
                    "1460077",
                    "9175144",
                    "SubNetwork=Zagreb,MeContext=LX00015,ManagedElement=LX00015bleUnit=RRU-B1B3-S1-2",
                    "Minor",
                    "1460077",
                    "2020-01-22T09:19:50+02:00",
                    "ERA005",
                ))
        session.commit()

    era_timeline = _timeline_era015()

    assert era_timeline == [(datetime(2020, 1, 24, 9), 3),
                            (datetime(2020, 1, 22, 9), 2)]
Beispiel #15
0
def test_raises_cursor_already_closed(migrate_table):
    with Session() as session:
        pass
    with pytest.raises(InterfaceError):
        session.execute(
            "SELECT * FROM metrics ORDER BY alarm_event_time DESC;")
Beispiel #16
0
def test_raises_invalid_sql(migrate_table):
    with pytest.raises(Exception):
        with Session() as session:
            session.execute(
                "INSERT INSERT INTO metrics(url, content, response_time, code) VALUES (%s, %s, %s, %s)",
                ("http://github.com", "another on", 3.33, 201))
def test_connection_commit_is_called(psycopg2_connect_fixture):
    with Session() as session:
        session.commit()

    assert session._connection.commit.call_count == 1
def test_cursor_execute_is_called__on_fetchone(psycopg2_connect_fixture):
    with Session() as session:
        session.fetch_one("SELECT * FROM imaginary_table")

    assert session._cursor.execute.call_count == 1