def datastore(datastore_connection, filestore): ds = datastore_connection try: random_data.create_heuristics(ds, heuristics_count=10) random_data.create_services(ds) random_data.create_signatures(ds) random_data.create_users(ds) random_data.create_workflows(ds) submissions = [] for _ in range(2): submissions.append(random_data.create_submission( ds, filestore)) random_data.create_alerts(ds, alert_count=10, submission_list=submissions) yield ds finally: # Cleanup test data random_data.wipe_alerts(ds) random_data.wipe_heuristics(ds) random_data.wipe_services(ds) random_data.wipe_signatures(ds) random_data.wipe_submissions(ds, filestore) random_data.wipe_users(ds) random_data.wipe_workflows(ds)
def datastore(datastore_connection, filestore): ds = datastore_connection try: create_users(ds) create_services(ds) for _ in range(NUM_FILES): f = random_model_obj(File) ds.file.save(f.sha256, f) file_hashes.append(f.sha256) filestore.put(f.sha256, f.sha256) ds.file.commit() yield ds finally: # Cleanup Elastic ds.file.wipe() wipe_services(ds) wipe_users(ds) # Cleanup Minio for f in file_hashes: filestore.delete(f) # Cleanup Redis nq.delete() iq.delete()
def datastore(datastore_connection): try: create_users(datastore_connection) create_services(datastore_connection) yield datastore_connection finally: wipe_users(datastore_connection) wipe_services(datastore_connection)
def purge_data(ds, fs): wipe_alerts(ds) wipe_heuristics(ds) wipe_services(ds) wipe_signatures(ds) wipe_submissions(ds, fs) wipe_users(ds) wipe_workflows(ds)
def datastore(datastore_connection): try: create_users(datastore_connection) create_services(datastore_connection) for _ in range(NUM_WORKFLOWS): workflow = random_model_obj(Workflow) workflow_list.append(workflow.workflow_id) datastore_connection.workflow.save(workflow.workflow_id, workflow) datastore_connection.workflow.commit() yield datastore_connection finally: wipe_users(datastore_connection) wipe_services(datastore_connection) datastore_connection.workflow.wipe()