def test_parse_incident_with_B_record():
    result = read_file('log_importer/tests/test_files/file_read_test.txt')
    session = setup_connection(create_db=True)
    incident = parse_incident(session, result[0], result[1], include_parts=True)
    assert incident.host == u"somehostname.at", "unexpected host, was: %r" % incident.host
    assert incident.path == u"/fubar/sr/10/SomeAction.do", "invalid path, was:%r" %incident.path
    assert incident.method == u"GET", "unexpected HTTP method, was: %r" % incident.method
Exemplo n.º 2
0
def import_log_to_database():
    parser = argparse.ArgumentParser(description="Import Log-Files into Database.")
    parser.add_argument('database', help="Database to import to")
    parser.add_argument('files', metavar='File', type=argparse.FileType('r'), nargs='+')
    parser.add_argument('--import-parts', help="import raw parts", action="store_true")

    args = parser.parse_args()

    if args.import_parts:
        print("also adding parts!")
    else:
        print("not adding parts")

    # open database
    session = setup_connection(create_db=True, path=args.database)

    # add files
    for f in args.files:
        print("parsing " + f.name)
        tmp = read_from_file(f)
        incident = parse_incident(session, tmp[0], tmp[1], include_parts=args.import_parts)

        print("adding " + f.name + " to db")
        session.add(incident)
        session.commit()

    # close database
    session.close()
Exemplo n.º 3
0
def output_overview():
    """ just outputs a (formatted) dump of the original data """
    parser = argparse.ArgumentParser(\
                        description="Give an high-level overview of database.")
    parser.add_argument('database', help="Database to analyze")

    group = parser.add_mutually_exclusive_group()
    group.add_argument("-t", "--sort-by-time", action="store_true")
    group.add_argument("-s", "--sort-by-source-ip", action="store_true")
    group.add_argument("-d", "--sort-by-destination-ip", action="store_true")

    args = parser.parse_args()

    # open database
    session = setup_connection(create_db=False, path=args.database)

    # get results
    results = session.query(Incident)

    if args.sort_by_source_ip:
        results = results.join(Incident.source).order_by(Source.ip, Source.port).all()
    elif args.sort_by_destination_ip:
        results = results.join(Incident.destination).order_by(Destination.ip, Destination.port).all()
    else:
        results = results.order_by(Incident.timestamp).all()

    # show all incidences
    for incident in results:
        for detail in incident.details:
            output_details(incident, detail)

    # close database
    session.close()
def test_parse_incident_with_H_record():
    result = read_file('log_importer/tests/test_files/file_read_test.txt')
    session = setup_connection(create_db=True)
    incident = parse_incident(session, result[0], result[1], include_parts=True)

    expected_ids = sorted([960024, 981203])
    found_ids = sorted([i.incident_catalog.catalog_id for i in incident.details])

    assert found_ids == expected_ids
def create_database():
    parser = argparse.ArgumentParser(description="Create Database.")
    parser.add_argument('database', help="Database to import to")

    args = parser.parse_args()

    session = setup_connection(create_db=True, path=args.database)

    session.connection()
    session.close()
def test_parse_incident():
    result = read_file('log_importer/tests/test_files/file_read_test.txt')
    session = setup_connection(create_db=True)
    incident = parse_incident(session, result[0], result[1])

    assert incident.fragment_id == u'7cf8df3f'
    assert incident.timestamp == datetime.datetime(2015, 3, 30, 21, 10, 38) # should be in UTC
    assert incident.unique_id == u'VRm7zgr5AlMAAClwIZoAAAAU'
    assert incident.source.ip == u'10.199.23.1'
    assert incident.source.port == 40889
    assert incident.destination.ip == u'1.2.3.4'
    assert incident.destination.port == 18060
    assert not incident.parts
def test_insert_and_query():
    """ data commited to the database should be readable afterwards"""
    session = setup_connection(create_db=True)

    destination = Destination(ip=u'127.0.0.1', port=80)
    session.add(destination)
    session.commit()

    result = session.query(Destination).filter(Destination.ip == u'127.0.0.1').all()

    assert result[0].ip == destination.ip

    session.close()
def test_import_with_parts():
    """ import file while saving (optional) parts. """

    result = read_file('log_importer/tests/test_files/file_read_test.txt')
    session = setup_connection(create_db=True)
    incident = parse_incident(session, result[0], result[1], include_parts=True)

    session.add(incident)
    session.commit()

    # reload from db
    i = session.query(Incident).filter(Incident.id == incident.id).first()

    common_data(i, incident)
    assert len(i.parts) == 6
def output_summary():
    parser = argparse.ArgumentParser(\
                    description="Give an high-level overview of database.")
    parser.add_argument('database', help="Database to analyze")

    args = parser.parse_args()

    # open database
    session = setup_connection(create_db=False, path=args.database)

    # show all incidences
    for source, ipaddress, port, path, method, msg, cnt in retrieve_data(session):
        print("%5d: %s -> %s %s:%5d%s %s" % (cnt, source, method, ipaddress, port, path, msg))

    # close database
    session.close()
Exemplo n.º 10
0
def output_incident_types():
    """ group database by <ip, port and error-message>
        and output group error counts. """

    parser = argparse.ArgumentParser(\
                    description="List all known incident types.")
    parser.add_argument('database', help="Database to analyze")
    args = parser.parse_args()

    # open database
    session = setup_connection(create_db=False, path=args.database)

    # show the different catalog types
    for i in retrieve_data(session):
        print("id %3d: %s:%d %s" % (i.catalog_id, i.config_file, i.config_line, i.message))

    # close database
    session.close()
def output_destinations():
    """ group database by <ip, port and error-message>
        and output group error counts. """

    parser = argparse.ArgumentParser(\
                    description="Give an high-level overview of database.")
    parser.add_argument('database', help="Database to analyze")

    args = parser.parse_args()

    # open database
    session = setup_connection(create_db=False, path=args.database)

    # show all incidences
    for ipaddress, port, msg, cnt in retrieve_data(session):
        print("%3d: %s:%5d %s" % (cnt, ipaddress, port, msg))

    # close database
    session.close()
def test_import_with_parts():
    """ import file while saving (optional) parts. """

    result = read_file('log_importer/tests/test_files/file_read_test.txt')
    session = setup_connection(True, "postgresql://modsec@localhost/modsec")

    cache_destination = DestinationCache(session)
    cache_source = SourceCache(session)
    cache_details = IncidentDetailCache(session)
    incident_counter = IncidentCount(session)
    incident_cache = IncidentCache()

    incident = parse_incident(result, include_parts=True)
    incidentObject = forward_to_db(session, incident, incident_counter, incident_cache, cache_destination, cache_source, cache_details, diff=1)

    # reload from db
    i = session.query(Incident).filter(Incident.id == incidentObject['id']).first()

    common_data(i, incident)
    assert len(i.parts) == 6
Exemplo n.º 13
0
def import_log_to_database():
    parser = argparse.ArgumentParser(description="Import Log-Files into Database.")
    parser.add_argument('database', help="Database to import to")
    parser.add_argument('files', metavar='File', type=argparse.FileType('r'), nargs='+')
    parser.add_argument('--import-parts', help="import raw parts", action="store_true")

    args = parser.parse_args()

    if args.import_parts:
        print("also adding parts!")
    else:
        print("not adding parts")

    # open database to calculate num_worker
    session = setup_connection(create_db=True, path=args.database)

    cache_destination = DestinationCache(session)
    cache_source = SourceCache(session)
    cache_details = IncidentDetailCache(session)
    incident_counter = IncidentCount(session)
    incident_cache = IncidentCache()

    # files = [args.files[0].name]*20000
    files = [ f.name for f in args.files ]
    with futures.ProcessPoolExecutor(max_workers=max(1, cpu_count()-1)) as executor:
        for incident in executor.map(tmp, files):
            try:
                forward_to_db(session, incident, incident_counter, incident_cache, cache_destination, cache_source, cache_details)
            except KeyError as e:
                print("ERROR: key error {0}: {1}".format(e.errno, e.strerror))

    # close database
    conn = session.connection()
    cache_source.sync_to_db(conn)
    cache_destination.sync_to_db(conn)
    incident_cache.writeIncidents(conn)
    incident_cache.writeParts(conn)
    cache_details.sync_to_db(conn)
    session.commit()
    session.close()
def test_parse_incident_with_parts():
    result = read_file('log_importer/tests/test_files/file_read_test.txt')
    session = setup_connection(create_db=True)
    incident = parse_incident(session, result[0], result[1], include_parts=True)
    assert len(incident.parts) == 6
def test_create_inmemory_db():
    """ a simple in-memory database-initialization should work """
    setup_connection(create_db=True)