def test_azure_sentinel_editor(mp_conf_ctrl): """Items edit controls.""" edit_comp = CEAzureSentinel(mp_controls=mp_conf_ctrl) n_opts = len(edit_comp.select_item.options) edit_comp.edit_buttons.btn_add.click() check.equal(n_opts + 1, len(edit_comp.select_item.options)) new_ws = edit_comp.current_workspace result, _ = _validate_ws(new_ws, mp_conf_ctrl, edit_comp._COMP_PATH) check.is_false(result) edit_comp.edit_ctrls.children[1].value = "40dcc8bf-0478-4f3b-b275-ed0a94f2c013" edit_comp.edit_ctrls.children[2].value = "40dcc8bf-0478-4f3b-b275-ed0a94f2c013" edit_comp.edit_buttons.btn_save.click() result, _ = _validate_ws(new_ws, mp_conf_ctrl, edit_comp._COMP_PATH) check.is_true(result) # Save the current item edit_comp.edit_buttons.btn_save.click() check.is_not_none(mp_conf_ctrl.get_value(f"{edit_comp._COMP_PATH}.{new_ws}")) # Rename edit_comp.edit_ctrls.children[0].value = "TestWS" edit_comp.edit_buttons.btn_save.click() ren_workspace_settings = mp_conf_ctrl.get_value(f"{edit_comp._COMP_PATH}.TestWS") check.is_not_none(ren_workspace_settings) edit_comp.btn_set_default.click() def_ws = mp_conf_ctrl.get_value(f"{edit_comp._COMP_PATH}.Default") check.equal(def_ws, ren_workspace_settings)
def test_ip_summary_notebooklet_internal(monkeypatch): """Test basic run of notebooklet.""" test_data = str(Path(TEST_DATA_PATH).absolute()) monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) monkeypatch.setattr(data_providers, "TILookup", TILookupMock) data_providers.init( query_provider="LocalData", LocalData_data_paths=[test_data], LocalData_query_paths=[test_data], providers=["tilookup", "geolitelookup"], ) test_nb = nblts.azsent.network.IpAddressSummary() tspan = TimeSpan(period="1D") test_nb.query_provider.schema.update({tab: {} for tab in DEF_PROV_TABLES}) result = test_nb.run(value="40.76.43.124", timespan=tspan) check.is_not_none(result.ip_entity) check.equal(result.ip_type, "Public") check.equal(result.ip_origin, "Internal") check.is_not_none(result.whois) check.is_instance(result.related_alerts, pd.DataFrame) check.is_instance(result.heartbeat, pd.DataFrame) check.is_instance(result.az_network_if, pd.DataFrame) check.is_none(result.passive_dns) check.is_none(result.ti_results)
def test_notebooklet_create(monkeypatch): """Test method.""" # Should run because required providers are loaded monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) data_providers.init(query_provider="LocalData", providers=["tilookup", "geolitelookup"]) for _, nblt in nblts.iter_classes(): new_nblt = nblt() check.is_instance(new_nblt, Notebooklet) check.is_none(new_nblt.result) # Should throw a warning because of unrecognized provider data_providers.init(query_provider="LocalData") with pytest.raises(MsticnbDataProviderError) as err: for _, nblt in nblts.iter_classes(): curr_provs = nblt.metadata.req_providers bad_provs = [*curr_provs, "bad_provider"] try: nblt.metadata.req_providers = bad_provs new_nblt = nblt() check.is_instance(new_nblt, Notebooklet) check.is_none(new_nblt.result) finally: nblt.metadata.req_providers = curr_provs check.is_in("bad_provider", err.value.args[0]) test_nb = TstNBSummary() check.is_not_none(test_nb.get_provider("LocalData")) with pytest.raises(MsticnbDataProviderError): test_nb.get_provider("otherprovider")
def test_ip_summary_notebooklet(monkeypatch): """Test basic run of notebooklet.""" test_data = str(Path(TEST_DATA_PATH).absolute()) monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) monkeypatch.setattr(data_providers, "TILookup", TILookupMock) data_providers.init( query_provider="LocalData", LocalData_data_paths=[test_data], LocalData_query_paths=[test_data], providers=["tilookup", "geolitelookup"], ) test_nb = nblts.azsent.network.IpAddressSummary() tspan = TimeSpan(period="1D") result = test_nb.run(value="11.1.2.3", timespan=tspan) check.is_not_none(result.ip_entity) check.equal(result.ip_type, "Public") check.equal(result.ip_origin, "External") check.is_in("CountryCode", result.geoip) check.is_not_none(result.location) check.is_not_none(result.notebooklet) check.is_not_none(result.whois) check.is_instance(result.related_alerts, pd.DataFrame) check.is_not_none(test_nb.browse_alerts()) check.is_instance(result.passive_dns, pd.DataFrame) check.is_instance(result.ti_results, pd.DataFrame)
def test_live_connect(): """Use this to do live testing.""" sp_driver = SplunkDriver() www = "splunk-mstic.westus2.cloudapp.azure.com" sp_driver.connect(host=www, port=8089, username="******", password="******") # nosec query = """index="botsv2" earliest=08/25/2017:00:00:00 latest=08/26/2017:00:00:00 source="WinEventLog:Microsoft-Windows-Sysmon/Operational" | table TimeCreated, host, EventID, EventDescription, User, process | head 10 """ res_df = sp_driver.query(query) check.is_not_none(res_df) query0 = """index="botsv2" earliest=08/25/2020:00:00:00 + 'source="WinEventLog:Microsoft-Windows-Sysmon/Operational" | table TimeCreated, host, EventID, EventDescription, User, process | head 10 """ res_df = sp_driver.query(query0) check.is_instance(res_df, list) check.is_false(res_df) query1 = """ index=blackhat sourcetype=network earliest=0 | table TimeGenerated, TotalBytesSent """ res_df = sp_driver.query(query1) check.is_not_none(res_df)
def test_it_gets_total_times_and_distances(ors_client, member_list, meeting_location_list): subject = GroupDistanceCalculator(ors_client, member_list, meeting_location_list) result = subject.getTotals() check.is_not_none(result['times']) check.is_not_none(result['distances'])
def test_boardview(): agent_positions = [(4, 4)] bview = view.BoardView(board_data, agent_positions) check.equal(bview.hill_color, [128, 255, 180]) check.equal(bview.agent_color, [0, 255, 0]) check.equal(bview.board_data.shape, (12, 12, 3)) check.equal(bview.agent_positions, agent_positions) check.equal(bview.new_agent_positions, []) check.is_false(bview.agents_need_update) check.is_not_none(bview.screen)
def test_auditd_cluster(): input_file = os.path.join(_TEST_DATA, "linux_events.csv") input_df = pd.read_csv(input_file) input_df["AuditdMessage"] = input_df.apply( lambda x: ast.literal_eval(x.AuditdMessage), axis=1 ) output_df = extract_events_to_df(data=input_df) proc_events = get_event_subset(output_df, event_type="SYSCALL_EXECVE") clustered_procs = cluster_auditd_processes(proc_events, app=None) check.is_not_none(clustered_procs) check.equal(len(clustered_procs), 2)
def test_nbresult(): """Test method.""" host_result = HostSummaryResult() host_result.host_entity = {"host_name": "myhost"} host_result.related_alerts = pd.DataFrame() host_result.related_bookmarks = pd.DataFrame() check.is_in("host_entity:", str(host_result)) check.is_in("DataFrame:", str(host_result)) check.is_in("host_entity", host_result.properties) html_doc = host_result._repr_html_() _html_parser = etree.HTMLParser(recover=False) elem_tree = etree.parse(StringIO(html_doc), _html_parser) check.is_not_none(elem_tree)
def test_class_doc(): """Test class documentation.""" for _, nblt in nblts.iter_classes(): html_doc = nblt.get_help() check.not_equal(html_doc, "No documentation available.") check.greater(len(html_doc), 100) md_doc = nblt.get_help(fmt="md") html_doc2 = markdown(md_doc) check.equal(html_doc, html_doc2) _html_parser = etree.HTMLParser(recover=False) elem_tree = etree.parse(StringIO(html_doc), _html_parser) check.is_not_none(elem_tree)
def test_template_notebooklet(monkeypatch): """Test basic run of notebooklet.""" test_data = str(Path(TEST_DATA_PATH).absolute()) monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) data_providers.init( query_provider="LocalData", LocalData_data_paths=[test_data], LocalData_query_paths=[test_data], ) test_nb = TemplateNB() tspan = TimeSpan(period="1D") result = test_nb.run(value="myhost", timespan=tspan) check.is_not_none(result.all_events) check.is_not_none(result.description) check.is_not_none(result.plot) result = test_nb.run(value="myhost", timespan=tspan, options=["+get_metadata"]) check.is_not_none(result.additional_info) evts = test_nb.run_additional_operation( ["4679", "5058", "5061", "5059", "4776"]) check.is_instance(evts, pd.DataFrame)
def test_init_data_providers(monkeypatch): """Test creating DataProviders instance.""" monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) dprov = data_providers.DataProviders(query_provider="LocalData") check.is_not_none(dprov) check.equal(dprov, data_providers.DataProviders.current()) check.is_in("LocalData", dprov.providers) check.is_in("geolitelookup", dprov.providers) check.is_in("tilookup", dprov.providers) check.is_instance(dprov.providers["LocalData"], QueryProvider) check.is_instance(dprov.providers["geolitelookup"], GeoIPLiteMock) check.is_instance(dprov.providers["tilookup"], TILookup)
def test_winhostevents_notebooklet(monkeypatch): """Test basic run of notebooklet.""" test_data = str(Path(TEST_DATA_PATH).absolute()) monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) data_providers.init( query_provider="LocalData", LocalData_data_paths=[test_data], LocalData_query_paths=[test_data], ) test_nb = nblts.azsent.host.WinHostEvents() tspan = TimeSpan(period="1D") result = test_nb.run(value="myhost", timespan=tspan) check.is_not_none(result.all_events) check.is_instance(result.all_events, pd.DataFrame) check.is_not_none(result.event_pivot) check.is_instance(result.event_pivot, pd.DataFrame) check.is_not_none(result.account_events) check.is_instance(result.account_events, pd.DataFrame) check.is_not_none(result.event_pivot) check.is_instance(result.event_pivot, pd.DataFrame) # check.is_not_none(result.account_timeline) exp_events = test_nb.expand_events(["5058", "5061"]) check.is_instance(exp_events, pd.DataFrame)
def test_pivot_providers_namespace(_create_pivot_ns, test_case): """ Test pivot initialized from globals/namespace. Notes ----- Test that the expected number of functions have been added to entities. """ entity = getattr(entities, test_case.entity) query_contr = getattr(entity, test_case.container) check.is_not_none(query_contr) query_attrs = repr(query_contr).split("\n") check.greater_equal(len(query_attrs), test_case.funcs)
def test_pipeline_objects(_create_pivot): """Test parse pipeline.""" pipelines = list(Pipeline.from_yaml(_TEST_PIPELINES)) check.equal(len(pipelines), 2) check.equal(pipelines[0].name, "pipeline1") check.equal(pipelines[0].description, "Pipeline 1 description") check.equal(len(pipelines[0].steps), 5) for step in pipelines[0].steps: step_type = step.step_type check.is_not_none(step.name) check.is_not_none(step.comment) check.is_not_none(step.params) if step_type in ("pivot", "pivot_tee_exec", "pd_accessor"): check.is_not_none(step.function) if step_type == "pivot": check.is_not_none(step.entity) pl_repr = repr(pipelines[0]) check.is_in("Pipeline(name='pipeline1'", pl_repr) check.is_in("steps=[PipelineStep(name='get_logons", pl_repr) pl_txt = pipelines[0].print_pipeline(df_name="input_df") check.equal(pl_txt, _EXPECTED_OUTPUT) pl_txt = pipelines[0].print_pipeline(df_name="input_df", comments=False) exp_no_comments = "\n".join(nc_line for nc_line in _EXPECTED_OUTPUT.split("\n") if not nc_line.strip().startswith("#")) check.equal(pl_txt, exp_no_comments) pl_single_dict = yaml.safe_load(_TEST_SINGLE_PIPELINE) pl_single = Pipeline.parse_pipeline(pl_single_dict) check.equal(pl_single.name, "pipeline1") check.equal(pl_single.description, "Pipeline 1 description") check.equal(len(pl_single.steps), 5) # Test to_yaml out_yaml = pl_single.to_yaml() # The yaml won't be the same since None values will be null out_dict = yaml.safe_load(out_yaml) # but it should convert into an identical object new_pipeline = Pipeline.parse_pipeline(out_dict) check.equal(pl_single.name, new_pipeline.name) check.equal(pl_single.description, new_pipeline.description) for idx, step in enumerate(pl_single.steps): check.equal(step, new_pipeline.steps[idx])
def test_experiment_popup_plotter_graph(time, voltage, baseline): plotter = ExperimentPopupPlotter(time, voltage, baseline) graph = plotter() data_items = graph.getPlotItem().listDataItems() check.equal(len(data_items), 1) check_allclose(data_items[0].xData, time) check_allclose(data_items[0].yData, voltage) line = None for item in graph.getPlotItem().items: if isinstance(item, InfiniteLine) and item.label.format == "baseline": line = item check.is_not_none(line) check.equal(line.y(), baseline)
def compare_tables(table, ref): for row_num in range(table.num_rows): for col_num in range(table.num_cols): if ref[row_num][col_num] is None: check.is_none( table.cell(row_num, col_num).formula, f"!existsy@[{row_num},{col_num}]", ) else: check.is_not_none( table.cell(row_num, col_num).formula, f"exists@[{row_num},{col_num}]", ) check.equal( table.cell(row_num, col_num).formula, ref[row_num][col_num], f"formula@[{row_num},{col_num}]", )
def test_check_config(): """Test config check.""" mp_var = os.environ.get("MSTICPYCONFIG") mp_file = TEST_DATA_PATH + "/msticpyconfig.yaml" os.environ["MSTICPYCONFIG"] = mp_file result, err_warn = _check_config() if not result: # If failed - err_warn should be set # and item 0 should be populated with errors check.is_not_none(err_warn) check.is_true(err_warn[0]) else: # otherwise we have no errors or warnings or # just warnings if err_warn: check.is_false(err_warn[0]) check.is_true(err_warn[1]) os.environ["MSTICPYCONFIG"] = mp_var
def test_query_functions_methods(azure_sentinel): """Test attributes of retrieved functions.""" az_qry_funcs = PivotQueryFunctions(azure_sentinel) ip_addr_q_params = list( az_qry_funcs.get_queries_and_types_for_param("ip_address")) host_queries = list(az_qry_funcs.get_queries_for_param("host_name")) check.greater_equal(len(ip_addr_q_params), 4) check.greater_equal(len(host_queries), 20) func_name, func_family, func = [ q_tup for q_tup in host_queries if q_tup[0] == "get_info_by_hostname" ][0] check.is_instance(func, partial) check.is_true(callable(func)) q_params = az_qry_funcs.query_params.get(f"{func_family}.{func_name}") # expected results # all=['table', 'query_project', 'start', 'end', 'subscription_filter', # 'add_query_items', 'host_name', 'host_op'], # required=['host_name'], # full_required=['start', 'end', 'host_name'] check.is_in("start", q_params.all) check.is_in("host_name", q_params.required) check.is_in("host_name", q_params.full_required) check.is_in("start", q_params.full_required) check.is_in("end", q_params.full_required) param_attrs = az_qry_funcs.get_param_attrs("ip_address") # Expected return # [ParamAttrs(type='str', query='get_info_by_ipaddress', family='Heartbeat', # required=True), # ParamAttrs(type='str', query='list_logons_for_source_ip', family='LinuxSyslog', # required=True), # ParamAttrs(type='str', query='get_host_for_ip', family='Network', # required=True), # ParamAttrs(type='str', query='get_heartbeat_for_ip', family='Network', # required=True)] check.is_in(param_attrs[0].type, ("str", "list", "datetime")) check.is_true(param_attrs[0].required) check.is_not_none(param_attrs[0].query) check.is_not_none(param_attrs[0].family)
def test_key_vault_editor(mp_conf_ctrl): """Items edit controls.""" edit_comp = CEKeyVault(mp_controls=mp_conf_ctrl) check.is_not_none(edit_comp.help.html_help.value) check.is_not_none(edit_comp._DESCRIPTION) check.is_not_none(edit_comp._COMP_PATH) check.greater_equal(len(edit_comp._HELP_URI), 1) edit_comp.controls["TenantId"].value = "tenant" # invalid UUID edit_comp.controls["SubscriptionId"].value = "sub" # invalid UUID edit_comp.controls["ResourceGroup"].value = "" # OK to have empty edit_comp.controls["AzureRegion"].value = "" # OK to have empty edit_comp.controls["VaultName"].value = "" # invalid to have empty edit_comp.btn_save.click() results = mp_conf_ctrl.validate_setting(f"{edit_comp._COMP_PATH}") check.equal(len(results), 3) edit_comp.controls["TenantId"].value = "40dcc8bf-0478-4f3b-b275-ed0a94f2c013" edit_comp.controls["SubscriptionId"].value = "40dcc8bf-0478-4f3b-b275-ed0a94f2c013" edit_comp.controls["ResourceGroup"].value = "resgroup" edit_comp.controls["AzureRegion"].value = "Europe" edit_comp.controls["VaultName"].value = "MyVault" edit_comp.controls["Authority"].value = "global" edit_comp.btn_save.click() results = mp_conf_ctrl.validate_setting(f"{edit_comp._COMP_PATH}") check.equal(len(results), 0)
def test_notebooklet_options(monkeypatch): """Test option logic for notebooklet.""" monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) data_providers.init(query_provider="LocalData", providers=["tilookup", "geolitelookup"]) nb_test = TstNBSummary() # default options nb_res = nb_test.run() check.is_not_none(nb_res.default_property) check.is_none(nb_res.optional_property) # add optional option nb_res = nb_test.run(options=["+optional_opt"]) check.is_not_none(nb_res.default_property) check.is_not_none(nb_res.optional_property) # remove default option nb_res = nb_test.run(options=["-default_opt"]) check.is_none(nb_res.default_property) check.is_none(nb_res.optional_property) # specific options nb_res = nb_test.run(options=["heartbest", "azure_net"]) check.is_none(nb_res.default_property) check.is_none(nb_res.optional_property) # invalid option f_stream = StringIO() with redirect_stdout(f_stream): nb_test.run(options=["invalid_opt"]) output = str(f_stream.getvalue()) check.is_in("Invalid options ['invalid_opt']", output)
def compare_table_functions(sheet_name, filename=DOCUMENT): doc = Document(filename) sheet = doc.sheets[sheet_name] table = sheet.tables["Tests"] for i, row in enumerate(table.rows()): if i == 0 or not row[3].value: # Skip header and invalid test rows continue if isinstance(row[0], BoolCell): # Test value is true/false formula_text = str(row[0].value).upper() else: formula_text = row[0].value formula_result = row[1].value formula_ref_value = row[2].value check.is_not_none(row[1].formula, f"exists@{i}") check.equal(row[1].formula, formula_text, f"formula@{i}") if isinstance(row[1], ErrorCell): pass elif isinstance(row[1], TextCell) and formula_ref_value is None: pass else: check.equal(formula_result, formula_ref_value, f"value@{i}")
def test_mp_config_controls_load(mp_conf_ctrl: MpConfigControls): """Loading MpConfigControls.""" check.is_not_none(mp_conf_ctrl.mp_config) check.is_not_none(mp_conf_ctrl.config_defn) check.is_not_none(mp_conf_ctrl.controls) results = mp_conf_ctrl.validate_all_settings() # We have one bad setting due to our test mpconfig check.equal(len(results), 1) check.equal( results[0][1], "Validation failed for path 'TIProviders.AzureSentinel.Provider'") get_or_create_mpc_section(mp_conf_ctrl, "TestSettings", "TestSubkey") val = mp_conf_ctrl.get_value("TestSettings") check.is_not_none(val) check.is_in("TestSubkey", val)
def test_mp_config_file_show_kv(kv_client): """Test view secrets.""" del kv_client mpc_file = MpConfigFile() mpc_file.show_kv_secrets() check.is_not_none(mpc_file.kv_client) # set up mocked kv_client sec_list = list(_KV_SECS.keys()) sec_list.append("url/MissingValue") type(mpc_file.kv_client).secrets = PropertyMock(return_value=sec_list) mpc_file.kv_client.get_secret = lambda sec: _KV_SECS[f"url/{sec}"] # run show secrets again with mocked client mpc_file.show_kv_secrets() for name, val in _KV_SECS.items(): s_name = name.split("/")[-1] check.is_in(s_name, mpc_file.txt_viewer.value) check.is_in(val, mpc_file.txt_viewer.value) check.is_in("MissingValue", mpc_file.txt_viewer.value) check.is_in("Value: Could not display secret", mpc_file.txt_viewer.value)
def _execute_item_editor_test(edit_comp, cases): check.is_not_none(edit_comp.help.html_help.value) check.is_not_none(edit_comp._DESCRIPTION) check.is_not_none(edit_comp._COMP_PATH) check.greater_equal(len(edit_comp._HELP_URI), 1) for test_opt in cases: print(f"Testing {edit_comp.__class__.__name__}, {test_opt}") opts = edit_comp.select_item.options n_opts = len(opts) # If this control has an options list - select the first of these prov_opts = getattr(edit_comp, "prov_options", None) if prov_opts and prov_opts.options: edit_comp.prov_options.value = test_opt # If there is an existing item, delete this if _is_current_option(test_opt, edit_comp.select_item): edit_comp.select_item.label = test_opt edit_comp.edit_buttons.btn_del.click() n_opts -= 1 # Add a new one edit_comp.edit_buttons.btn_add.click() # Save the current item edit_comp.edit_buttons.btn_save.click() check.equal(len(edit_comp.select_item.options), n_opts + 1, "Item added") if isinstance(edit_comp, CEAzureSentinel): if _is_current_option("Default", edit_comp.select_item): edit_comp.select_item.label = "Default" edit_comp.edit_buttons.btn_del.click() n_opts -= 1 edit_comp.btn_set_default.click() edit_comp.edit_buttons.btn_save.click() n_opts += 1 check.equal( len(edit_comp.select_item.options), n_opts + 1, "AzSent default added" ) if prov_opts and prov_opts.options: edit_comp.prov_options.value = test_opt edit_comp.edit_buttons.btn_add.click() # check that we didn't add a duplicate check.equal( len(edit_comp.select_item.options), n_opts + 1, "Dup item not added" ) # delete whatever we've just added edit_comp.edit_buttons.btn_del.click() check.equal(len(edit_comp.select_item.options), n_opts, "New item deleted")
def test_host_summary_notebooklet(monkeypatch): """Test basic run of notebooklet.""" monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) test_data = str(Path(TEST_DATA_PATH).absolute()) data_providers.init( query_provider="LocalData", LocalData_data_paths=[test_data], LocalData_query_paths=[test_data], ) test_nb = nblts.azsent.host.HostSummary() tspan = TimeSpan(period="1D") result = test_nb.run(value="myhost", timespan=tspan) check.is_not_none(result.host_entity) check.is_not_none(result.related_alerts) check.is_instance(result.related_alerts, pd.DataFrame) check.is_not_none(result.alert_timeline) check.is_not_none(result.related_bookmarks) check.is_instance(result.related_bookmarks, pd.DataFrame)
def test_class_methods(): """Test method.""" for _, nblt in nblts.iter_classes(): check.is_not_none(nblt.description()) check.is_not_none(nblt.name()) all_opts = len(nblt.all_options()) check.greater_equal(all_opts, len(nblt.default_options())) check.greater(len(nblt.keywords()), 0) check.greater(len(nblt.entity_types()), 0) metadata = nblt.get_settings(print_settings=False) check.is_not_none(metadata) check.is_in("mod_name", metadata) check.is_in("default_options", metadata) check.is_in("keywords", metadata)
def test_resolver_funcs(): """Test domain utils functions.""" result = domain_utils.dns_resolve("www.microsoft.com") check.is_not_none(result["qname"]) check.is_true(result["rrset"]) ip = result["rrset"][0] result = domain_utils.dns_resolve("www.contoso.garbage") check.is_not_none(result) check.is_false(result.get("rrset")) result = domain_utils.ip_rev_resolve(ip) check.is_not_none(result) result = domain_utils.dns_components("www.microsoft.com") check.equal(result["subdomain"], "www") check.equal(result["domain"], "microsoft") check.equal(result["suffix"], "com") result = domain_utils.url_components("http://www.microsoft.com") check.equal(result["scheme"], "http") check.equal(result["host"], "www.microsoft.com")
def test_ip_summary_notebooklet_all(monkeypatch): """Test basic run of notebooklet.""" test_data = str(Path(TEST_DATA_PATH).absolute()) monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock) monkeypatch.setattr(data_providers, "TILookup", TILookupMock) data_providers.init( query_provider="LocalData", LocalData_data_paths=[test_data], LocalData_query_paths=[test_data], providers=["tilookup", "geolitelookup"], ) opts = ["+az_netflow", "+passive_dns", "+az_activity", "+office_365", "+ti"] test_nb = nblts.azsent.network.IpAddressSummary() tspan = TimeSpan(period="1D") test_nb.query_provider.schema.update({tab: {} for tab in DEF_PROV_TABLES}) result = test_nb.run(value="40.76.43.124", timespan=tspan, options=opts) check.is_not_none(result.ip_entity) check.is_not_none(result.host_entity) check.equal(result.host_entity.HostName, "MSTICAlertsWin1") check.equal(result.host_entity.OSFamily.name, "Linux") check.equal(result.ip_type, "Public") check.equal(result.ip_origin, "Internal") check.is_instance(result.heartbeat, pd.DataFrame) check.is_instance(result.az_network_if, pd.DataFrame) check.is_instance(result.az_network_flows, pd.DataFrame) check.is_instance(result.az_network_flow_summary, pd.DataFrame) check.is_instance(result.az_network_flows_timeline, LayoutDOM) check.is_instance(result.aad_signins, pd.DataFrame) check.is_instance(result.office_activity, pd.DataFrame) check.is_instance(result.vmcomputer, pd.DataFrame) check.is_instance(test_nb.netflow_total_by_protocol(), LayoutDOM) check.is_instance(test_nb.netflow_by_direction(), LayoutDOM) check.is_not_none(result.whois) check.is_instance(result.related_alerts, pd.DataFrame) check.is_instance(result.passive_dns, pd.DataFrame) check.is_instance(result.ti_results, pd.DataFrame)
def test_is_not_none(): a = 1 check.is_not_none(a)