def test_parse_incident_with_H_record(): result = read_file('log_importer/tests/test_files/file_read_test.txt') incident = parse_incident(result, include_parts=True) expected_ids = sorted([960024, 981203]) found_ids = sorted(map(lambda x: int(x['id']), incident['details'])) assert found_ids == expected_ids
def test_parse_incident_with_B_record(): result = read_file('log_importer/tests/test_files/file_read_test.txt') session = setup_connection(create_db=True) incident = parse_incident(session, result[0], result[1], include_parts=True) assert incident.host == u"somehostname.at", "unexpected host, was: %r" % incident.host assert incident.path == u"/fubar/sr/10/SomeAction.do", "invalid path, was:%r" %incident.path assert incident.method == u"GET", "unexpected HTTP method, was: %r" % incident.method
def import_log_to_database(): parser = argparse.ArgumentParser(description="Import Log-Files into Database.") parser.add_argument('database', help="Database to import to") parser.add_argument('files', metavar='File', type=argparse.FileType('r'), nargs='+') parser.add_argument('--import-parts', help="import raw parts", action="store_true") args = parser.parse_args() if args.import_parts: print("also adding parts!") else: print("not adding parts") # open database session = setup_connection(create_db=True, path=args.database) # add files for f in args.files: print("parsing " + f.name) tmp = read_from_file(f) incident = parse_incident(session, tmp[0], tmp[1], include_parts=args.import_parts) print("adding " + f.name + " to db") session.add(incident) session.commit() # close database session.close()
def test_parse_incident_with_H_record(): result = read_file('log_importer/tests/test_files/file_read_test.txt') session = setup_connection(create_db=True) incident = parse_incident(session, result[0], result[1], include_parts=True) expected_ids = sorted([960024, 981203]) found_ids = sorted([i.incident_catalog.catalog_id for i in incident.details]) assert found_ids == expected_ids
def test_parse_incident(): result = read_file('log_importer/tests/test_files/file_read_test.txt') incident = parse_incident(result) assert incident['fragment_id'] == u'7cf8df3f' assert incident['timestamp'] == datetime.datetime(2015, 3, 30, 21, 10, 38) # should be in UTC assert incident['unique_id'] == u'VRm7zgr5AlMAAClwIZoAAAAU' assert incident['source'][0] == u'10.199.23.1' assert incident['source'][1] == 40889 assert incident['destination'][0] == u'1.2.3.4' assert incident['destination'][1] == 18060 assert len(incident['parts']) == 0
def test_parse_incident(): result = read_file('log_importer/tests/test_files/file_read_test.txt') session = setup_connection(create_db=True) incident = parse_incident(session, result[0], result[1]) assert incident.fragment_id == u'7cf8df3f' assert incident.timestamp == datetime.datetime(2015, 3, 30, 21, 10, 38) # should be in UTC assert incident.unique_id == u'VRm7zgr5AlMAAClwIZoAAAAU' assert incident.source.ip == u'10.199.23.1' assert incident.source.port == 40889 assert incident.destination.ip == u'1.2.3.4' assert incident.destination.port == 18060 assert not incident.parts
def test_import_with_parts(): """ import file while saving (optional) parts. """ result = read_file('log_importer/tests/test_files/file_read_test.txt') session = setup_connection(create_db=True) incident = parse_incident(session, result[0], result[1], include_parts=True) session.add(incident) session.commit() # reload from db i = session.query(Incident).filter(Incident.id == incident.id).first() common_data(i, incident) assert len(i.parts) == 6
def test_import_with_parts(): """ import file while saving (optional) parts. """ result = read_file('log_importer/tests/test_files/file_read_test.txt') session = setup_connection(True, "postgresql://modsec@localhost/modsec") cache_destination = DestinationCache(session) cache_source = SourceCache(session) cache_details = IncidentDetailCache(session) incident_counter = IncidentCount(session) incident_cache = IncidentCache() incident = parse_incident(result, include_parts=True) incidentObject = forward_to_db(session, incident, incident_counter, incident_cache, cache_destination, cache_source, cache_details, diff=1) # reload from db i = session.query(Incident).filter(Incident.id == incidentObject['id']).first() common_data(i, incident) assert len(i.parts) == 6
def test_parse_incident_with_B_record(): result = read_file('log_importer/tests/test_files/file_read_test.txt') incident = parse_incident(result, include_parts=True) assert incident['host'] == u"somehostname.at", "unexpected host, was: %r" % incident.host assert incident['path'] == u"/fubar/sr/10/SomeAction.do", "invalid path, was:%r" %incident.path assert incident['method'] == u"GET", "unexpected HTTP method, was: %r" % incident.method
def test_parse_incident_with_parts(): result = read_file('log_importer/tests/test_files/file_read_test.txt') incident = parse_incident(result, include_parts=True) assert len(incident['parts']) == 6
def tmp(f): return parse_incident(read_from_file(open(f, 'r')))
def test_parse_incident_with_parts(): result = read_file('log_importer/tests/test_files/file_read_test.txt') session = setup_connection(create_db=True) incident = parse_incident(session, result[0], result[1], include_parts=True) assert len(incident.parts) == 6