def datastore(datastore_connection, filestore): ds = datastore_connection try: create_users(ds) create_services(ds) for _ in range(NUM_FILES): f = random_model_obj(File) ds.file.save(f.sha256, f) file_hashes.append(f.sha256) filestore.put(f.sha256, f.sha256) ds.file.commit() yield ds finally: # Cleanup Elastic ds.file.wipe() wipe_services(ds) wipe_users(ds) # Cleanup Minio for f in file_hashes: filestore.delete(f) # Cleanup Redis nq.delete() iq.delete()
def datastore(datastore_connection, filestore): ds = datastore_connection try: random_data.create_heuristics(ds, heuristics_count=10) random_data.create_services(ds) random_data.create_signatures(ds) random_data.create_users(ds) random_data.create_workflows(ds) submissions = [] for _ in range(2): submissions.append(random_data.create_submission( ds, filestore)) random_data.create_alerts(ds, alert_count=10, submission_list=submissions) yield ds finally: # Cleanup test data random_data.wipe_alerts(ds) random_data.wipe_heuristics(ds) random_data.wipe_services(ds) random_data.wipe_signatures(ds) random_data.wipe_submissions(ds, filestore) random_data.wipe_users(ds) random_data.wipe_workflows(ds)
def create_basic_data(log=None, ds=None, svc=True, sigs=True, reset=False): ds = ds or forge.get_datastore() if reset: log.info("Wiping all collections...") for name in ds.ds._models: collection = ds.ds.__getattr__(name) collection.wipe() log.info(f"\t{name}") log.info("\nCreating user objects...") create_users(ds, log=log) if svc: log.info("\nCreating services...") create_services(ds, log=log) if sigs: log.info("\nImporting test signatures...") signatures = create_signatures(ds) for s in signatures: log.info(f"\t{s}") if svc: log.info("\nCreating random heuristics...") create_heuristics(ds, log=log)
def test_service_changes(updater: run_updater.ServiceUpdater): ds: MockDatastore = updater.datastore.ds # Base conditions, nothing anywhere assert updater.services.length() == 0 assert len(updater.datastore.list_all_services()) == 0 # Nothing does nothing updater.sync_services() assert updater.services.length() == 0 assert len(updater.datastore.list_all_services()) == 0 # Any non-disabled services should be picked up by the updater create_services(updater.datastore, limit=1) for data in ds._collections['service']._docs.values(): data.enabled = True updater._service_stage_hash.set(data.name, ServiceStage.Update) data.update_config = random_model_obj(UpdateConfig) assert len(updater.datastore.list_all_services(full=True)) == 1 updater.sync_services() assert updater.services.length() == 1 assert len(updater.datastore.list_all_services(full=True)) == 1 # It should be scheduled to update ASAP for data in updater.services.items().values(): assert data['next_update'] <= now_as_iso() # Disable the service and it will disappear from redis for data in ds._collections['service']._docs.values(): data.enabled = False updater.sync_services() assert updater.services.length() == 0 assert len(updater.datastore.list_all_services(full=True)) == 1
def datastore(datastore_connection): try: create_users(datastore_connection) create_services(datastore_connection) yield datastore_connection finally: wipe_users(datastore_connection) wipe_services(datastore_connection)
def datastore(request, datastore_connection, fs): purge_data(datastore_connection, fs) create_alerts(datastore_connection, alert_count=1) create_heuristics(datastore_connection) create_services(datastore_connection) create_signatures(datastore_connection) create_submission(datastore_connection, fs) create_users(datastore_connection) create_workflows(datastore_connection) request.addfinalizer(lambda: purge_data(datastore_connection, fs)) return datastore_connection
def datastore(datastore_connection): try: create_users(datastore_connection) create_services(datastore_connection) for _ in range(NUM_WORKFLOWS): workflow = random_model_obj(Workflow) workflow_list.append(workflow.workflow_id) datastore_connection.workflow.save(workflow.workflow_id, workflow) datastore_connection.workflow.commit() yield datastore_connection finally: wipe_users(datastore_connection) wipe_services(datastore_connection) datastore_connection.workflow.wipe()