def datastore(datastore_connection, filestore): global test_file, file_res_list ds = datastore_connection try: create_users(ds) # noinspection PyUnusedLocal for _f in range(NUM_FILES): f = random_model_obj(File) if test_file is None: test_file = f ds.file.save(f.sha256, f) filestore.put(f.sha256, f.sha256) # noinspection PyUnusedLocal for _r in range(random.randint(1, 3)): r = random_model_obj(Result) r.sha256 = f.sha256 file_res_list.append(r.build_key()) ds.result.save(r.build_key(), r) ds.file.commit() ds.result.commit() yield ds finally: wipe_users(ds) ds.file.wipe() ds.result.wipe() for key in file_res_list: filestore.delete(key[:64])
def datastore(datastore_connection): global user_list ds = datastore_connection try: create_users(ds) data = { 'alert': [], 'error': [], 'search': [], 'signature': [], 'submission': [], } for x in range(NUM_FAVS): f = random_model_obj(Favorite) f.name = f"test_{x+1}" for key in data: data[key].append(f) ds.user_favorites.save('admin', data) ds.user_favorites.save('user', data) for x in range(NUM_USERS): u = random_model_obj(User) u.uname = f"test_{x+1}" ds.user.save(u.uname, u) ds.user_favorites.save(u.uname, data) ds.user_avatar.save(u.uname, AVATAR) user_list.append(u.uname) yield ds finally: wipe_users(ds)
def datastore(datastore_connection): ds = datastore_connection try: create_users(ds) for _ in range(NUM_ITEMS): f = random_model_obj(File) f_hash_list.append(f.sha256) ds.file.save(f.sha256, f) for x in range(NUM_ITEMS): a = random_model_obj(Alert) a.file.sha256 = f_hash_list[x] ds.alert.save(a.alert_id, a) for x in range(NUM_ITEMS): r = random_model_obj(Result) r.sha256 = f_hash_list[x] ds.result.save(r.build_key(), r) ds.alert.commit() ds.file.commit() ds.submission.commit() yield ds finally: ds.alert.wipe() ds.file.wipe() ds.submission.wipe() wipe_users(ds)
def datastore(datastore_connection, filestore): ds = datastore_connection try: create_users(ds) create_services(ds) for _ in range(NUM_FILES): f = random_model_obj(File) ds.file.save(f.sha256, f) file_hashes.append(f.sha256) filestore.put(f.sha256, f.sha256) ds.file.commit() yield ds finally: # Cleanup Elastic ds.file.wipe() wipe_services(ds) wipe_users(ds) # Cleanup Minio for f in file_hashes: filestore.delete(f) # Cleanup Redis nq.delete() iq.delete()
def test_wipe(datastore, cli): # This needs to run last as it deletes important data for other tests try: cli.do_wipe('bucket user') datastore.user.commit() assert datastore.user.search("id:user")['total'] == 0 finally: create_users(datastore) reset_logger() non_system = [ 'alert', 'cached_file', 'emptyresult', 'error', 'file', 'filescore', 'result', 'submission', 'submission_tree', 'submission_summary', 'workflow' ] cli.do_wipe('non_system') for (x, bucket) in enumerate(non_system): assert bucket.upper() in LOGS['info'][x] reset_logger() submission_data_buckets = [ 'emptyresult', 'error', 'file', 'filescore', 'result', 'submission', 'submission_tree', 'submission_summary' ] cli.do_wipe('submission_data') for (x, bucket) in enumerate(submission_data_buckets): assert bucket.upper() in LOGS['info'][x]
def create_basic_data(log=None, ds=None, svc=True, sigs=True, reset=False): ds = ds or forge.get_datastore() if reset: log.info("Wiping all collections...") for name in ds.ds._models: collection = ds.ds.__getattr__(name) collection.wipe() log.info(f"\t{name}") log.info("\nCreating user objects...") create_users(ds, log=log) if svc: log.info("\nCreating services...") create_services(ds, log=log) if sigs: log.info("\nImporting test signatures...") signatures = create_signatures(ds) for s in signatures: log.info(f"\t{s}") if svc: log.info("\nCreating random heuristics...") create_heuristics(ds, log=log)
def datastore(datastore_connection, filestore): ds = datastore_connection try: random_data.create_heuristics(ds, heuristics_count=10) random_data.create_services(ds) random_data.create_signatures(ds) random_data.create_users(ds) random_data.create_workflows(ds) submissions = [] for _ in range(2): submissions.append(random_data.create_submission( ds, filestore)) random_data.create_alerts(ds, alert_count=10, submission_list=submissions) yield ds finally: # Cleanup test data random_data.wipe_alerts(ds) random_data.wipe_heuristics(ds) random_data.wipe_services(ds) random_data.wipe_signatures(ds) random_data.wipe_submissions(ds, filestore) random_data.wipe_users(ds) random_data.wipe_workflows(ds)
def datastore(datastore_connection): ds = datastore_connection try: create_users(ds) for x in range(TEST_RESULTS): f = random_model_obj(File) ds.file.save(f.sha256, f) file_list.append(f.sha256) ds.file.commit() for x in range(TEST_RESULTS): e = random_model_obj(Error) e.sha256 = file_list[x] ds.error.save(e.build_key(), e) error_key_list.append(e.build_key()) ds.error.commit() for x in range(TEST_RESULTS): r = random_model_obj(Result) r.sha256 = file_list[x] ds.result.save(r.build_key(), r) result_key_list.append(r.build_key()) ds.result.commit() yield ds finally: ds.error.wipe() ds.file.wipe() ds.result.wipe() wipe_users(ds)
def test_user(datastore, cli): reset_logger() cli.do_user("list") assert len(LOGS.get('info', [])) == 2 reset_logger() cli.do_user("show user") assert yaml.safe_load(io.StringIO(LOGS['info'][0])) == datastore.user.get( "user", as_obj=False) cli.do_user("disable user") assert not datastore.user.get('user').is_active cli.do_user("enable user") assert datastore.user.get('user').is_active cli.do_user("set_admin user") assert 'admin' in datastore.user.get('user').type cli.do_user("unset_admin user") assert 'admin' not in datastore.user.get('user').type cli.do_user("set_otp user") assert datastore.user.get('user').otp_sk is not None cli.do_user("unset_otp user") assert datastore.user.get('user').otp_sk is None try: cli.do_user("remove user") datastore.user.commit() assert datastore.user.search("id:user")['total'] == 0 finally: create_users(datastore)
def datastore(datastore_connection, filestore): try: create_users(datastore_connection) create_submission(datastore_connection, filestore) yield datastore_connection finally: wipe_users(datastore_connection) wipe_submissions(datastore_connection, filestore)
def datastore(datastore_connection): try: create_users(datastore_connection) create_services(datastore_connection) yield datastore_connection finally: wipe_users(datastore_connection) wipe_services(datastore_connection)
def datastore(datastore_connection): try: create_users(datastore_connection) create_heuristics(datastore_connection) yield datastore_connection finally: wipe_users(datastore_connection) wipe_heuristics(datastore_connection)
def datastore(datastore_connection, filestore): try: create_users(datastore_connection) for _ in range(NUM_SUBMISSIONS): create_submission(datastore_connection, filestore) yield datastore_connection finally: wipe_users(datastore_connection) wipe_submissions(datastore_connection, filestore)
def datastore(datastore_connection, filestore): global submission try: create_users(datastore_connection) submission = create_submission(datastore_connection, filestore) yield datastore_connection finally: wipe_users(datastore_connection) wipe_submissions(datastore_connection, filestore) sq.delete()
def datastore(datastore_connection): ds = datastore_connection try: create_users(ds) signatures.extend(create_signatures(ds)) ds.signature.commit() for _ in range(TEST_SIZE): f = random_model_obj(File) ds.file.save(f.sha256, f) file_list.append(f.sha256) ds.file.commit() for x in range(TEST_SIZE): a = random_model_obj(Alert) a.file.sha256 = file_list[x] ds.alert.save(a.alert_id, a) ds.alert.commit() for x in range(TEST_SIZE): r = random_model_obj(Result) r.sha256 = file_list[x] ds.result.save(r.build_key(), r) ds.result.commit() for x in range(TEST_SIZE): s = random_model_obj(Submission) for f in s.files: f.sha256 = file_list[x] ds.submission.save(s.sid, s) ds.submission.commit() for x in range(TEST_SIZE): h = random_model_obj(Heuristic) h.heur_id = f"AL_TEST_{x}" ds.heuristic.save(h.heur_id, h) ds.heuristic.commit() for _ in range(TEST_SIZE): w_id = get_random_id() w = random_model_obj(Workflow) ds.workflow.save(w_id, w) ds.workflow.commit() yield ds finally: ds.alert.wipe() ds.file.wipe() ds.result.wipe() ds.signature.wipe() ds.submission.wipe() ds.heuristic.wipe() ds.workflow.wipe() wipe_users(ds)
def datastore(datastore_connection): global test_submission try: create_users(datastore_connection) test_submission = random_model_obj(Submission) datastore_connection.submission.save(test_submission.sid, test_submission) datastore_connection.submission.commit() yield datastore_connection finally: datastore_connection.submission.wipe() wipe_users(datastore_connection)
def datastore(request, datastore_connection, fs): purge_data(datastore_connection, fs) create_alerts(datastore_connection, alert_count=1) create_heuristics(datastore_connection) create_services(datastore_connection) create_signatures(datastore_connection) create_submission(datastore_connection, fs) create_users(datastore_connection) create_workflows(datastore_connection) request.addfinalizer(lambda: purge_data(datastore_connection, fs)) return datastore_connection
def datastore(datastore_connection, filestore): try: create_users(datastore_connection) submission = create_submission(datastore_connection, filestore) alert = random_model_obj(Alert) alert.alert_id = ALERT_ID alert.sid = submission.sid datastore_connection.alert.save(ALERT_ID, alert) yield datastore_connection finally: wipe_users(datastore_connection) wipe_submissions(datastore_connection, filestore) datastore_connection.alert.delete(ALERT_ID)
def datastore(datastore_connection): try: global test_error create_users(datastore_connection) for _ in range(NUM_ERRORS): e = random_model_obj(Error) if test_error is None: test_error = e datastore_connection.error.save(e.build_key(), e) datastore_connection.error.commit() yield datastore_connection finally: wipe_users(datastore_connection) datastore_connection.error.wipe()
def datastore(request, datastore_connection, filestore): global test_alert, test_submission ds = datastore_connection create_users(ds) test_submission = create_submission(ds, filestore) test_alert = random_model_obj(Alert) test_alert.sid = test_submission.sid test_alert.file.sha256 = test_submission.files[0].sha256 ds.alert.save(test_alert.alert_id, test_alert) ds.alert.commit() request.addfinalizer(lambda: cleanup(ds, filestore)) return ds
def datastore(datastore_connection): try: create_users(datastore_connection) create_services(datastore_connection) for _ in range(NUM_WORKFLOWS): workflow = random_model_obj(Workflow) workflow_list.append(workflow.workflow_id) datastore_connection.workflow.save(workflow.workflow_id, workflow) datastore_connection.workflow.commit() yield datastore_connection finally: wipe_users(datastore_connection) wipe_services(datastore_connection) datastore_connection.workflow.wipe()
def datastore(request, datastore_connection, filestore): global test_alert ds = datastore_connection create_users(ds) submission = create_submission(ds, filestore) for _ in range(NUM_ALERTS): a = random_model_obj(Alert) if test_alert is None: test_alert = a a.owner = None a.sid = submission.sid ds.alert.save(a.alert_id, a) ds.alert.commit() request.addfinalizer(lambda: purge_alert(ds, filestore)) return ds
def test_delete(datastore, cli, fs): try: # delete all users and check if there are still users cli.do_delete("user force id:*") datastore.user.commit() assert datastore.user.search("id:*")['total'] == 0 finally: # Restore users ... create_users(datastore) try: # Delete random submission and check if still there sub_id = datastore.submission.search("id:*", fl="id", rows=1, as_obj=False)['items'][0]['id'] cli.do_delete(f"submission full force id:{sub_id}") datastore.submission.commit() assert datastore.user.search(f"id:{sub_id}")['total'] == 0 finally: # Re-create a submission create_submission(datastore, fs)