示例#1
0
def test_submission_namespace(datastore, sio):
    submission_queue = CommsQueue('submissions', private=True)
    monitoring = get_random_id()

    ingested = random_model_obj(SubmissionMessage).as_primitives()
    ingested['msg_type'] = "SubmissionIngested"
    received = random_model_obj(SubmissionMessage).as_primitives()
    received['msg_type'] = "SubmissionReceived"
    queued = random_model_obj(SubmissionMessage).as_primitives()
    queued['msg_type'] = "SubmissionQueued"
    started = random_model_obj(SubmissionMessage).as_primitives()
    started['msg_type'] = "SubmissionStarted"

    test_res_array = []

    @sio.on('monitoring', namespace='/submissions')
    def on_monitoring(data):
        # Confirmation that we are waiting for status messages
        test_res_array.append(('on_monitoring', data == monitoring))

    @sio.on('SubmissionIngested', namespace='/submissions')
    def on_submission_ingested(data):
        test_res_array.append(
            ('on_submission_ingested', data == ingested['msg']))

    @sio.on('SubmissionReceived', namespace='/submissions')
    def on_submission_received(data):
        test_res_array.append(
            ('on_submission_received', data == received['msg']))

    @sio.on('SubmissionQueued', namespace='/submissions')
    def on_submission_queued(data):
        test_res_array.append(('on_submission_queued', data == queued['msg']))

    @sio.on('SubmissionStarted', namespace='/submissions')
    def on_submission_started(data):
        test_res_array.append(
            ('on_submission_started', data == started['msg']))

    try:
        sio.emit('monitor', monitoring, namespace='/submissions')
        sio.sleep(1)

        submission_queue.publish(ingested)
        submission_queue.publish(received)
        submission_queue.publish(queued)
        submission_queue.publish(started)

        start_time = time.time()

        while len(test_res_array) < 5 and time.time() - start_time < 5:
            sio.sleep(0.1)

        assert len(test_res_array) == 5

        for test, result in test_res_array:
            if not result:
                pytest.fail(f"{test} failed.")
    finally:
        sio.disconnect()
示例#2
0
def test_alert_created(datastore, client):
    alert_queue = CommsQueue('alerts', private=True)

    created = random_model_obj(AlertMessage)
    created.msg_type = "AlertCreated"

    updated = random_model_obj(AlertMessage)
    updated.msg_type = "AlertUpdated"

    test_res_array = []

    def alerter_created_callback(data):
        test_res_array.append(('created', created['msg'] == data))

    def alerter_updated_callback(data):
        test_res_array.append(('updated', updated['msg'] == data))

    def publish_thread():
        time.sleep(1)
        alert_queue.publish(created.as_primitives())
        alert_queue.publish(updated.as_primitives())

    threading.Thread(target=publish_thread).start()
    client.socketio.listen_on_alerts_messages(
        alert_created_callback=alerter_created_callback,
        alert_updated_callback=alerter_updated_callback,
        timeout=2)
    assert len(test_res_array) == 2

    for test, result in test_res_array:
        if not result:
            pytest.fail("{} failed.".format(test))
示例#3
0
def datastore(datastore_connection):
    ds = datastore_connection
    try:
        create_users(ds)

        for x in range(TEST_RESULTS):
            f = random_model_obj(File)
            ds.file.save(f.sha256, f)
            file_list.append(f.sha256)
        ds.file.commit()

        for x in range(TEST_RESULTS):
            e = random_model_obj(Error)
            e.sha256 = file_list[x]
            ds.error.save(e.build_key(), e)
            error_key_list.append(e.build_key())
        ds.error.commit()

        for x in range(TEST_RESULTS):
            r = random_model_obj(Result)
            r.sha256 = file_list[x]
            ds.result.save(r.build_key(), r)
            result_key_list.append(r.build_key())
        ds.result.commit()
        yield ds
    finally:
        ds.error.wipe()
        ds.file.wipe()
        ds.result.wipe()
        wipe_users(ds)
def test_finish_heuristic(client, dispatch_client, heuristics):
    heuristics.get.return_value = None
    task = random_minimal_obj(Task)

    result: Result = random_model_obj(Result)
    while not any(sec.heuristic for sec in result.result.sections):
        result: Result = random_model_obj(Result)

    heuristics_count = sum(
        int(sec.heuristic is not None) for sec in result.result.sections)

    result.result.score = 99999
    result.response.extracted = []
    result.response.supplementary = []

    message = {
        'task': task.as_primitives(),
        'result': result.as_primitives(),
        'freshen': True
    }
    resp = client.post('/api/v1/task/', headers=headers, json=message)
    assert resp.status_code == 200
    assert dispatch_client.service_finished.call_count == 1
    assert dispatch_client.service_finished.call_args[0][0] == task.sid
    # Mock objects are always one on conversion to int, being changed to this, means that it looked at the
    # mocked out heuristics to load the score.
    assert dispatch_client.service_finished.call_args[0][2].result.score == 0
    assert heuristics.get.call_count == heuristics_count
def datastore(datastore_connection):
    global user_list
    ds = datastore_connection
    try:
        create_users(ds)

        data = {
            'alert': [],
            'error': [],
            'search': [],
            'signature': [],
            'submission': [],
        }
        for x in range(NUM_FAVS):
            f = random_model_obj(Favorite)
            f.name = f"test_{x+1}"
            for key in data:
                data[key].append(f)

        ds.user_favorites.save('admin', data)
        ds.user_favorites.save('user', data)

        for x in range(NUM_USERS):
            u = random_model_obj(User)
            u.uname = f"test_{x+1}"
            ds.user.save(u.uname, u)
            ds.user_favorites.save(u.uname, data)
            ds.user_avatar.save(u.uname, AVATAR)
            user_list.append(u.uname)

        yield ds
    finally:
        wipe_users(ds)
def datastore(datastore_connection, filestore):
    global test_file, file_res_list
    ds = datastore_connection
    try:
        create_users(ds)
        # noinspection PyUnusedLocal
        for _f in range(NUM_FILES):
            f = random_model_obj(File)
            if test_file is None:
                test_file = f
            ds.file.save(f.sha256, f)

            filestore.put(f.sha256, f.sha256)

            # noinspection PyUnusedLocal
            for _r in range(random.randint(1, 3)):
                r = random_model_obj(Result)
                r.sha256 = f.sha256
                file_res_list.append(r.build_key())
                ds.result.save(r.build_key(), r)

        ds.file.commit()
        ds.result.commit()
        yield ds
    finally:
        wipe_users(ds)
        ds.file.wipe()
        ds.result.wipe()
        for key in file_res_list:
            filestore.delete(key[:64])
def datastore(datastore_connection):
    ds = datastore_connection
    try:
        create_users(ds)

        for _ in range(NUM_ITEMS):
            f = random_model_obj(File)
            f_hash_list.append(f.sha256)
            ds.file.save(f.sha256, f)

        for x in range(NUM_ITEMS):
            a = random_model_obj(Alert)
            a.file.sha256 = f_hash_list[x]
            ds.alert.save(a.alert_id, a)

        for x in range(NUM_ITEMS):
            r = random_model_obj(Result)
            r.sha256 = f_hash_list[x]
            ds.result.save(r.build_key(), r)

        ds.alert.commit()
        ds.file.commit()
        ds.submission.commit()

        yield ds
    finally:
        ds.alert.wipe()
        ds.file.wipe()
        ds.submission.wipe()
        wipe_users(ds)
def test_dispatch_extracted(clean_redis, clean_datastore):
    redis = clean_redis
    ds = clean_datastore

    # def service_queue(name): return get_service_queue(name, redis)

    # Setup the fake datastore
    file_hash = get_random_hash(64)
    second_file_hash = get_random_hash(64)

    for fh in [file_hash, second_file_hash]:
        obj = random_model_obj(models.file.File)
        obj.sha256 = fh
        ds.file.save(fh, obj)

    # Inject the fake submission
    submission = random_model_obj(models.submission.Submission)
    submission.files = [dict(name='./file', sha256=file_hash)]
    sid = submission.sid = 'first-submission'

    disp = Dispatcher(ds, redis, redis)
    disp.running = ToggleTrue()
    client = DispatchClient(ds, redis, redis)
    client.dispatcher_data_age = time.time()
    client.dispatcher_data.append(disp.instance_id)

    # Launch the submission
    client.dispatch_submission(submission)
    disp.pull_submissions()
    disp.service_worker(disp.process_queue_index(sid))

    # Finish one service extracting a file
    job = client.request_work('0', 'extract', '0')
    assert job.fileinfo.sha256 == file_hash
    assert job.filename == './file'
    new_result: Result = random_minimal_obj(Result)
    new_result.sha256 = file_hash
    new_result.response.service_name = 'extract'
    new_result.response.extracted = [
        dict(sha256=second_file_hash,
             name='second-*',
             description='abc',
             classification='U')
    ]
    client.service_finished(sid, 'extracted-done', new_result)

    # process the result
    disp.pull_service_results()
    disp.service_worker(disp.process_queue_index(sid))
    disp.service_worker(disp.process_queue_index(sid))

    #
    job = client.request_work('0', 'extract', '0')
    assert job.fileinfo.sha256 == second_file_hash
    assert job.filename == 'second-*'
示例#9
0
def test_get_message_list(datastore, client):
    notification_queue = get_random_id()
    queue = NamedQueue("nq-%s" % notification_queue,
                       host=config.core.redis.persistent.host,
                       port=config.core.redis.persistent.port)
    queue.delete()
    msg_0 = random_model_obj(Submission).as_primitives()
    queue.push(msg_0)
    msg_1 = random_model_obj(Submission).as_primitives()
    queue.push(msg_1)

    res = client.ingest.get_message_list(notification_queue)
    assert len(res) == 2
    assert res[0] == msg_0
    assert res[1] == msg_1
def test_service_changes(updater: run_updater.ServiceUpdater):
    ds: MockDatastore = updater.datastore.ds
    # Base conditions, nothing anywhere
    assert updater.services.length() == 0
    assert len(updater.datastore.list_all_services()) == 0

    # Nothing does nothing
    updater.sync_services()
    assert updater.services.length() == 0
    assert len(updater.datastore.list_all_services()) == 0

    # Any non-disabled services should be picked up by the updater
    create_services(updater.datastore, limit=1)
    for data in ds._collections['service']._docs.values():
        data.enabled = True
        updater._service_stage_hash.set(data.name, ServiceStage.Update)
        data.update_config = random_model_obj(UpdateConfig)
    assert len(updater.datastore.list_all_services(full=True)) == 1
    updater.sync_services()
    assert updater.services.length() == 1
    assert len(updater.datastore.list_all_services(full=True)) == 1

    # It should be scheduled to update ASAP
    for data in updater.services.items().values():
        assert data['next_update'] <= now_as_iso()

    # Disable the service and it will disappear from redis
    for data in ds._collections['service']._docs.values():
        data.enabled = False
    updater.sync_services()
    assert updater.services.length() == 0
    assert len(updater.datastore.list_all_services(full=True)) == 1
示例#11
0
def test_service_message(redis_connection):
    try:
        _test_message_through_queue('status', random_model_obj(ServiceMessage),
                                    redis_connection)
    except (ValueError, TypeError, KeyError):
        pytest.fail(
            "Could not generate 'ServiceMessage' object and validate it.")
示例#12
0
def test_alert_message(redis_connection):
    try:
        _test_message_through_queue('alerts', random_model_obj(AlertMessage),
                                    redis_connection)
    except (ValueError, TypeError, KeyError):
        pytest.fail(
            "Could not generate 'AlertMessage' object and validate it.")
示例#13
0
def test_task_message(redis_connection):
    try:
        _test_message_through_queue('submissions',
                                    random_model_obj(TaskMessage),
                                    redis_connection)
    except (ValueError, TypeError, KeyError):
        pytest.fail("Could not generate 'TaskMessage' object and validate it.")
def test_finish_missing_file(client, dispatch_client, heuristics):
    heuristics.get.return_value = None
    task = random_minimal_obj(Task)
    fs = forge.get_filestore()

    result: Result = random_minimal_obj(Result)
    while not result.response.extracted:
        result: Result = random_model_obj(Result)
        result.response.extracted = [
            x for x in result.response.extracted if not fs.exists(x.sha256)
        ]
    missing = {
        x.sha256
        for x in result.response.extracted if not fs.exists(x.sha256)
    }
    missing |= {
        x.sha256
        for x in result.response.supplementary if not fs.exists(x.sha256)
    }

    message = {
        'task': task.as_primitives(),
        'result': result.as_primitives(),
        'freshen': True
    }
    resp = client.post('/api/v1/task/', headers=headers, json=message)
    assert resp.status_code == 200
    assert resp.json['api_response']['success'] is False
    assert set(resp.json['api_response']['missing_files']) == missing
def test_create_single_alert(config, datastore):
    persistent_redis = get_client(
        host=config.core.redis.persistent.host,
        port=config.core.redis.persistent.port,
        private=False,
    )
    alerter = Alerter()
    # Swap our alerter onto a private queue so our test doesn't get intercepted
    alerter.alert_queue = alert_queue = NamedQueue(uuid.uuid4().hex,
                                                   persistent_redis)

    # Get a random submission
    submission = random.choice(all_submissions)
    all_submissions.remove(submission)

    # Generate a task for the submission
    ingest_msg = random_model_obj(IngestTask)
    ingest_msg.submission.sid = submission.sid
    ingest_msg.submission.metadata = submission.metadata
    ingest_msg.submission.params = submission.params
    ingest_msg.submission.files = submission.files

    alert_queue.push(ingest_msg.as_primitives())
    alert_type = alerter.run_once()
    assert alert_type == 'create'
    datastore.alert.commit()

    res = datastore.alert.search("id:*", as_obj=False)
    assert res['total'] == 1

    alert = datastore.alert.get(res['items'][0]['alert_id'])
    assert alert.sid == submission.sid
示例#16
0
def test_heuristics_valid():
    heuristic_list = [random_model_obj(Heuristic) for _ in range(4)]
    heuristics = {x.heur_id: x for x in heuristic_list}

    attack_ids = list(
        set([
            random.choice(list(attack_map.keys()))
            for _ in range(random.randint(1, 3))
        ]))
    signatures = {}
    score_map = {}
    for x in range(random.randint(2, 4)):
        name = get_random_word()
        if x >= 2:
            score_map[name] = random.randint(10, 100)

        signatures[name] = random.randint(1, 3)

    service_heur = dict(heur_id=random.choice(list(heuristics.keys())),
                        score=0,
                        attack_ids=attack_ids,
                        signatures=signatures,
                        frequency=0,
                        score_map=score_map)

    result_heur = service_heuristic_to_result_heuristic(
        deepcopy(service_heur), heuristics)
    assert result_heur is not None
    assert service_heur['heur_id'] == result_heur['heur_id']
    assert service_heur['score'] != result_heur['score']
    for attack in result_heur['attack']:
        assert attack['attack_id'] in attack_ids
    for signature in result_heur['signature']:
        assert signature['name'] in signatures
        assert signature['frequency'] == signatures[signature['name']]
示例#17
0
def test_add_workflow(datastore, login_session):
    _, session, host = login_session

    workflow = random_model_obj(Workflow).as_primitives()
    workflow['query'] = "sha256:[1 AND 'This is invalid!'"
    workflow['creator'] = 'admin'
    workflow['edited_by'] = 'admin'

    with pytest.raises(APIError):
        resp = get_api_data(session,
                            f"{host}/api/v4/workflow/",
                            method="PUT",
                            data=json.dumps(workflow))

    workflow['query'] = "file.sha256:*"
    resp = get_api_data(session,
                        f"{host}/api/v4/workflow/",
                        method="PUT",
                        data=json.dumps(workflow))
    assert resp['success']
    workflow['workflow_id'] = resp['workflow_id']
    workflow_list.append(resp['workflow_id'])

    datastore.workflow.commit()

    new_workflow = datastore.workflow.get(resp['workflow_id'], as_obj=False)
    assert new_workflow == workflow
示例#18
0
def test_get_message(datastore, login_session):
    _, session, host = login_session

    r = random_model_obj(Result)
    wq.push({'status': "OK", 'cache_key': r.build_key()})
    resp = get_api_data(session, f"{host}/api/v4/live/get_message/{wq_id}/")
    assert resp['msg'] == r.build_key()
示例#19
0
def datastore(datastore_connection, filestore):
    ds = datastore_connection
    try:
        create_users(ds)
        create_services(ds)

        for _ in range(NUM_FILES):
            f = random_model_obj(File)
            ds.file.save(f.sha256, f)
            file_hashes.append(f.sha256)
            filestore.put(f.sha256, f.sha256)

        ds.file.commit()
        yield ds
    finally:
        # Cleanup Elastic
        ds.file.wipe()
        wipe_services(ds)
        wipe_users(ds)

        # Cleanup Minio
        for f in file_hashes:
            filestore.delete(f)

        # Cleanup Redis
        nq.delete()
        iq.delete()
示例#20
0
def test_alert_namespace(datastore, sio):
    alert_queue = CommsQueue('alerts', private=True)
    test_id = get_random_id()

    created = random_model_obj(AlertMessage)
    created.msg_type = "AlertCreated"

    updated = random_model_obj(AlertMessage)
    updated.msg_type = "AlertUpdated"

    test_res_array = []

    @sio.on('monitoring', namespace='/alerts')
    def on_monitoring(data):
        # Confirmation that we are waiting for alerts
        test_res_array.append(('on_monitoring', data == test_id))

    @sio.on('AlertCreated', namespace='/alerts')
    def on_alert_created(data):
        test_res_array.append(
            ('on_alert_created', data == created.as_primitives()['msg']))

    @sio.on('AlertUpdated', namespace='/alerts')
    def on_alert_updated(data):
        test_res_array.append(
            ('on_alert_updated', data == updated.as_primitives()['msg']))

    try:
        sio.emit('alert', test_id, namespace='/alerts')
        sio.sleep(1)

        alert_queue.publish(created.as_primitives())
        alert_queue.publish(updated.as_primitives())

        start_time = time.time()

        while len(test_res_array) < 3 or time.time() - start_time < 5:
            sio.sleep(0.1)

        assert len(test_res_array) == 3

        for test, result in test_res_array:
            if not result:
                pytest.fail(f"{test} failed.")

    finally:
        sio.disconnect()
示例#21
0
def test_service_delta_to_service_model():
    try:
        data = random_model_obj(ServiceDelta).as_primitives()
        Service(data).as_primitives()
    except (ValueError, TypeError, KeyError):
        pytest.fail(
            "Could not use a 'ServiceDelta' object to create a 'Service' object."
        )
def create_workflows(ds, log=None):
    for _ in range(20):
        w_id = get_random_id()
        ds.workflow.save(w_id, random_model_obj(Workflow))
        if log:
            log.info(f'\t{w_id}')

    ds.workflow.commit()
def test_submission_ingested(datastore, client):
    submission_queue = CommsQueue('submissions', private=True)
    test_res_array = []

    completed = random_model_obj(SubmissionMessage).as_primitives()
    completed['msg_type'] = "SubmissionCompleted"
    ingested = random_model_obj(SubmissionMessage).as_primitives()
    ingested['msg_type'] = "SubmissionIngested"
    received = random_model_obj(SubmissionMessage).as_primitives()
    received['msg_type'] = "SubmissionReceived"
    started = random_model_obj(SubmissionMessage).as_primitives()
    started['msg_type'] = "SubmissionStarted"

    def completed_callback(data):
        test_res_array.append(('completed', completed['msg'] == data))

    def ingested_callback(data):
        test_res_array.append(('ingested', ingested['msg'] == data))

    def received_callback(data):
        test_res_array.append(('received', received['msg'] == data))

    def started_callback(data):
        test_res_array.append(('started', started['msg'] == data))

    def publish_thread():
        time.sleep(1)
        submission_queue.publish(completed)
        submission_queue.publish(ingested)
        submission_queue.publish(received)
        submission_queue.publish(started)

    threading.Thread(target=publish_thread).start()
    client.socketio.listen_on_submissions(
        completed_callback=completed_callback,
        ingested_callback=ingested_callback,
        received_callback=received_callback,
        started_callback=started_callback,
        timeout=2)

    assert len(test_res_array) == 4

    for test, result in test_res_array:
        if not result:
            pytest.fail("{} failed.".format(test))
def make_error(file_hash, service, recoverable=True):
    new_error: Error = random_model_obj(Error)
    new_error.response.service_name = service
    new_error.sha256 = file_hash
    if recoverable:
        new_error.response.status = 'FAIL_RECOVERABLE'
    else:
        new_error.response.status = 'FAIL_NONRECOVERABLE'
    return new_error
示例#25
0
def test_safelist_exist(client, storage):
    valid_hash = randomizer.get_random_hash(64)
    valid_resp = randomizer.random_model_obj(Safelist, as_json=True)
    valid_resp['hashes']['sha256'] = valid_hash
    storage.safelist.get_if_exists.return_value = valid_resp

    resp = client.get(f'/api/v1/safelist/{valid_hash}/', headers=headers)
    assert resp.status_code == 200
    assert resp.json['api_response'] == valid_resp
示例#26
0
def create_heuristics(ds, log=None, heuristics_count=40):
    for _ in range(heuristics_count):
        h = random_model_obj(Heuristic)
        h.name = get_random_phrase()
        ds.heuristic.save(h.heur_id, h)
        if log:
            log.info(f'\t{h.heur_id}')

    ds.heuristic.commit()
示例#27
0
def test_add_update_signature_many(datastore, login_session):
    _, session, host = login_session
    ds = datastore

    # Insert a dummy signature
    source = "source"
    s_type = "type"
    sig_list = []
    for x in range(10):
        data = random_model_obj(Signature).as_primitives()
        data['signature_id'] = f"test_sig_{x}"
        data['name'] = f"sig_name_{x}"
        data['status'] = "DEPLOYED"
        data['source'] = source
        data['type'] = s_type
        sig_list.append(data)

    uri = f"{host}/api/v4/signature/add_update_many/?source={source}&sig_type={s_type}"
    resp = get_api_data(session, uri, data=json.dumps(sig_list), method="PUT")
    assert resp == {'errors': False, 'success': 10, 'skipped': []}

    # Test the signature data
    ds.signature.commit()
    data = random.choice(sig_list)
    key = f"{data['type']}_{data['source']}_{data['signature_id']}"
    added_sig = ds.signature.get(key, as_obj=False)
    assert data == added_sig

    # Change the signature status
    resp = get_api_data(
        session, f"{host}/api/v4/signature/change_status/{key}/DISABLED/")
    ds.signature.commit()
    assert resp['success']

    # Update signature data
    new_sig_data = "NEW SIGNATURE DATA"
    data['data'] = new_sig_data
    uri = f"{host}/api/v4/signature/add_update_many/?source={source}&sig_type={s_type}"
    resp = get_api_data(session, uri, data=json.dumps([data]), method="POST")
    assert resp == {'errors': False, 'success': 1, 'skipped': []}

    # Remove state change data
    data.pop('status', None)
    data.pop('state_change_date', None)
    data.pop('state_change_user', None)

    # Test the signature data
    ds.signature.commit()
    modded_sig = ds.signature.get(key, as_obj=False)

    modded_sig.pop('state_change_date')
    # Was state kept?
    assert "DISABLED" == modded_sig.pop('status')
    # Was state_change_user kept?
    assert "admin" == modded_sig.pop('state_change_user')
    assert data == modded_sig
示例#28
0
def test_get_message(datastore, login_session):
    _, session, host = login_session

    nq.delete()
    test_message = random_model_obj(Submission).as_primitives()
    nq.push(test_message)

    resp = get_api_data(session,
                        f"{host}/api/v4/ingest/get_message/{TEST_QUEUE}/")
    assert resp == test_message
def create_heuristics(ds, log=None):
    for srv in SERVICES.keys():
        for x in range(5):
            h = random_model_obj(Heuristic)
            h.heur_id = f"{srv.upper()}.{x + 1}"
            h.name = get_random_phrase()
            ds.heuristic.save(h.heur_id, h)
            if log:
                log.info(f'\t{h.heur_id}')

    ds.heuristic.commit()
示例#30
0
def main():
    datastores = {}
    try:
        data = {}

        print(f"\nGenerating random dataset of {DATASET_SIZE} documents...")
        for ii in range(DATASET_SIZE):
            data[str(ii)] = random_model_obj(Submission, as_json=True)

        print("Creating indexes...")
        log.setLevel(logging.ERROR)
        datastores = {
            'es': es_connection(Submission, False),
            'es_model': es_connection(Submission),
        }
        log.setLevel(logging.INFO)

        result = {}
        for name, ds in datastores.items():
            print(f"\nPerforming benchmarks for datastore: {name}")
            result[name] = {}
            run(ds, result[name], data)

        data = [
            [
                k, v['get_all'],
                v['insertion'],
                v[f'delete_{int(DATASET_SIZE/10)}'],
                v['search'],
                v['range_searches_50'],
                v['histogram'],
                v['facet'],
                v['groups']
            ] for k, v in result.items()]

        print("\n\n")
        print(tabulate(data, headers=['Datastore',
                                      f'GETs {DATASET_SIZE}',
                                      f'PUTs {DATASET_SIZE}',
                                      f'DEL {int(DATASET_SIZE/10)}',
                                      f'Search {DATASET_SIZE} docs',
                                      f'Search {50*DATASET_SIZE} docs',
                                      f'histogram',
                                      f'facet',
                                      f'groups']))
        print("\n\n")

    finally:
        log.setLevel(logging.ERROR)
        print("Wiping data on all datastores...")
        for store in datastores.values():
            store.wipe()