def test_get_service(datastore, login_session): _, session, host = login_session service = random.choice(list(SERVICES.keys())) resp = get_api_data(session, f"{host}/api/v4/service/{service}/") service_data = datastore.get_service_with_delta(service, as_obj=False) assert resp == service_data
def test_delete_service(datastore, login_session): _, session, host = login_session ds = datastore service = random.choice(list(SERVICES.keys())) resp = get_api_data(session, f"{host}/api/v4/service/{service}/", method="DELETE") assert resp['success'] ds.service_delta.commit() delta_data = ds.service_delta.search("id:*", rows=100, as_obj=False) assert delta_data['total'] == (len(SERVICES) - 1) for svc in delta_data['items']: assert svc['id'] != service ds.service.commit() svc_data = ds.service.search("id:*", rows=100, as_obj=False) assert (svc_data['total'] / 2) == (len(SERVICES) - 1) for svc in svc_data['items']: assert svc['id'] != service SERVICES.pop(service, None)
def test_get_all_services(datastore, login_session): _, session, host = login_session svc_list = sorted(list(SERVICES.keys())) resp = get_api_data(session, f"{host}/api/v4/service/all/") assert len(resp) == len(svc_list) for svc in resp: assert svc['name'] in svc_list
def create_heuristics(ds, log=None): for srv in SERVICES.keys(): for x in range(5): h = random_model_obj(Heuristic) h.heur_id = f"{srv.upper()}.{x + 1}" h.name = get_random_phrase() ds.heuristic.save(h.heur_id, h) if log: log.info(f'\t{h.heur_id}') ds.heuristic.commit()
def _create_errors_for_file(ds, f, services_done, log=None): e_list = [] for _ in range(random.randint(0, 1)): e = random_model_obj(Error) # Only one error per service per file while e.response.service_name in services_done: e.response.service_name = random.choice(list(SERVICES.keys())) services_done.append(e.response.service_name) # Set the sha256 e.sha256 = f e_key = e.build_key() e_list.append(e_key) if log: log.info(f"\t\t\t{e_key}") ds.error.save(e_key, e) return e_list
def test_edit_service(datastore, login_session): _, session, host = login_session ds = datastore delta_data = ds.service_delta.search("id:*", rows=100, as_obj=False) svc_data = ds.service.search("id:*", rows=100, as_obj=False) service = random.choice(list(SERVICES.keys())) service_data = Service({ "name": service, "enabled": True, "category": SERVICES[service][0], "stage": SERVICES[service][1], "version": "3.3.0", "docker_config": { "image": f"cccs/alsvc_{service.lower()}:latest", }, }).as_primitives() resp = get_api_data(session, f"{host}/api/v4/service/{service}/", method="POST", data=json.dumps(service_data)) assert resp['success'] ds.service_delta.commit() ds.service.commit() new_delta_data = ds.service_delta.search("id:*", rows=100, as_obj=False) new_svc_data = ds.service.search("id:*", rows=100, as_obj=False) assert new_delta_data != delta_data assert new_svc_data == svc_data for svc in new_delta_data['items']: if svc['id'] == service: assert svc['version'] == '3.3.0' else: assert svc['version'] == '4.0.0'
def test_get_versions(datastore, login_session): _, session, host = login_session service = random.choice(list(SERVICES.keys())) resp = get_api_data(session, f"{host}/api/v4/service/versions/{service}/") assert resp == ['3.3.0', '4.0.0']
def _create_results_for_file(ds, fs, f, possible_childs=None, log=None): r_list = [] services_done = [] section_body_format = ["TEXT", "MEMORY_DUMP", "GRAPH_DATA", "URL", "JSON", "KEY_VALUE"] section_depth_list = [[1, 1, 2, 3, 1], [1, 2, 1], [1, 2, 3, 1], [1, 2]] section_depth = random.choice(section_depth_list) for _ in range(random.randint(2, 5)): r = random_model_obj(Result) # Only one result per service per file while r.response.service_name in services_done: r.response.service_name = random.choice(list(SERVICES.keys())) for depth_id, section in enumerate(r.result.sections): section.depth = section_depth[depth_id % len(section_depth)] section.body_format = random.choice(section_body_format) section.heuristic.heur_id = random.choice([f"{r.response.service_name.upper()}.{x+1}" for x in range(5)]) if section.body_format == "GRAPH_DATA": cmap_min = 0 cmap_max = random.choice([5, 10, 20]) color_map_data = { 'type': 'colormap', 'data': { 'domain': [cmap_min, cmap_max], 'values': [random.random() * cmap_max for _ in range(50)] } } section.body = json.dumps(color_map_data) elif section.body_format == "URL": data = [{"url": get_random_uri()} for _ in range(random.randint(1, 4))] section.body = json.dumps(data) elif section.body_format in ["JSON", "KEY_VALUE"]: data = {get_random_word(): get_random_id() for _ in range(random.randint(3, 9))} section.body = json.dumps(data) services_done.append(r.response.service_name) # Set the sha256 r.sha256 = f if random.randint(1, 10) > 8: # Generate and empty result r_key = f"{r.build_key()}.e" ds.emptyresult.save(r_key, random_model_obj(EmptyResult)) else: r_key = r.build_key() # Set random extracted files that are not top level if not possible_childs: r.response.extracted = [] else: for e in r.response.extracted: e.sha256 = random.choice(possible_childs) # Set random supplementary files that are not top level if r.response.supplementary: # Edit the first file to be an ontology file s = r.response.supplementary[0] # Create a random ontology onto = random_minimal_obj(ResultOntology).as_primitives(strip_null=True) onto['header']['sha256'] = f onto['header']['service_name'] = r.response.service_name onto['header']['service_version'] = r.response.service_version onto['header']['service_tool_version'] = r.response.service_tool_version # Create it's file record supp_file = random_model_obj(File) byte_str = json.dumps(onto).encode('utf-8') sha256 = hashlib.sha256(byte_str).hexdigest() supp_file.sha256 = sha256 ds.file.save(sha256, supp_file) fs.put(sha256, byte_str) # Add the random files s.sha256 = sha256 s.name = "random.ontology" s.description = f"Random Ontology file for: {f}" r.response.supplementary = [s] ds.result.save(r_key, r) if log: log.info(f"\t\t\t{r_key}") r_list.append(r_key) return r_list
def _create_results_for_file(ds, f, possible_childs=None, log=None): r_list = [] services_done = [] section_depth_list = [[1, 1, 2, 3, 1], [1, 2, 1], [1, 2, 3, 1], [1, 2]] section_depth = random.choice(section_depth_list) for _ in range(random.randint(2, 5)): r = random_model_obj(Result) for depth_id, section in enumerate(r.result.sections): section.depth = section_depth[depth_id % len(section_depth)] if section.body_format == "GRAPH_DATA": cmap_min = 0 cmap_max = random.choice([5, 10, 20]) color_map_data = { 'type': 'colormap', 'data': { 'domain': [cmap_min, cmap_max], 'values': [random.random() * cmap_max for _ in range(50)] } } section.body = json.dumps(color_map_data) elif section.body_format == "URL": data = [{ "url": get_random_uri() } for _ in range(random.randint(1, 4))] section.body = json.dumps(data) elif section.body_format in ["JSON", "KEY_VALUE"]: data = { get_random_word(): get_random_id() for _ in range(random.randint(3, 9)) } section.body = json.dumps(data) # Only one result per service per file while r.response.service_name in services_done: r.response.service_name = random.choice(list(SERVICES.keys())) services_done.append(r.response.service_name) # Set the sha256 r.sha256 = f if random.randint(1, 10) > 8: # Generate and empty result r_key = f"{r.build_key()}.e" ds.emptyresult.save(r_key, random_model_obj(EmptyResult)) else: r_key = r.build_key() # Set random extracted files that are not top level if not possible_childs: r.response.extracted = [] else: for e in r.response.extracted: e.sha256 = random.choice(possible_childs) # Set random supplementary files that are not top level if not possible_childs: r.response.supplementary = [] else: for s in r.response.supplementary: s.sha256 = random.choice(possible_childs) ds.result.save(r_key, r) if log: log.info(f"\t\t\t{r_key}") r_list.append(r_key) return r_list