示例#1
0
def test_enable_disable_memmap(tmpdir):
    f = os.path.join(tmpdir, 'npn_empty.npy')
    arr = [[0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5]]
    a = MemMapArray(arr, filename=f, dtype=None, memmap=False)
    check.is_false(a.memmap)
    check.is_false(os.path.exists(f))

    a.enable_memmap()
    check.is_true(a.memmap)
    check.is_true(os.path.exists(f))
    check.is_instance(a._contained, np.memmap)

    # First keep the file
    a.disable_memmap(remove=False)
    check.is_false(a.memmap)
    check.is_true(os.path.exists(f))
    check.is_not_instance(a._contained, np.memmap)

    a.enable_memmap()
    check.is_true(a.memmap)
    check.is_true(os.path.exists(f))
    check.is_instance(a._contained, np.memmap)

    # Remove the file
    a.disable_memmap(remove=True)
    check.is_false(a.memmap)
    check.is_false(os.path.exists(f))
    check.is_not_instance(a._contained, np.memmap)

    with pytest.raises(ValueError):
        # raises error if name is locked
        a.enable_memmap('not_the_same_name.npy')
def test_network_flow_summary_notebooklet(monkeypatch):
    """Test basic run of notebooklet."""
    test_data = str(Path(TEST_DATA_PATH).absolute())
    monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock)
    monkeypatch.setattr(data_providers, "TILookup", TILookupMock)
    data_providers.init(
        query_provider="LocalData",
        LocalData_data_paths=[test_data],
        LocalData_query_paths=[test_data],
    )

    test_nb = nblts.azsent.network.NetworkFlowSummary()
    tspan = TimeSpan(period="1D")

    test_nb.query_provider.schema.update({tab: {} for tab in DEF_PROV_TABLES})
    options = ["+geo_map"]
    result = test_nb.run(value="myhost", timespan=tspan, options=options)
    check.is_not_none(result.host_entity)
    check.is_not_none(result.network_flows)
    check.is_instance(result.network_flows, pd.DataFrame)
    check.is_not_none(result.plot_flows_by_protocol)
    check.is_instance(result.plot_flows_by_protocol, LayoutDOM)
    check.is_not_none(result.plot_flows_by_direction)
    check.is_instance(result.plot_flows_by_direction, LayoutDOM)
    check.is_not_none(result.plot_flow_values)
    check.is_instance(result.plot_flow_values, LayoutDOM)
    check.is_not_none(result.flow_index)
    check.is_instance(result.flow_summary, pd.DataFrame)

    result.select_asns()
    result.lookup_ti_for_asn_ips()
    result.show_selected_asn_map()
示例#3
0
def test_notebooklet_create(monkeypatch):
    """Test method."""
    # Should run because required providers are loaded
    monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock)
    data_providers.init(query_provider="LocalData",
                        providers=["tilookup", "geolitelookup"])
    for _, nblt in nblts.iter_classes():
        new_nblt = nblt()
        check.is_instance(new_nblt, Notebooklet)
        check.is_none(new_nblt.result)

    # Should throw a warning because of unrecognized provider
    data_providers.init(query_provider="LocalData")
    with pytest.raises(MsticnbDataProviderError) as err:
        for _, nblt in nblts.iter_classes():
            curr_provs = nblt.metadata.req_providers
            bad_provs = [*curr_provs, "bad_provider"]
            try:
                nblt.metadata.req_providers = bad_provs
                new_nblt = nblt()
                check.is_instance(new_nblt, Notebooklet)
                check.is_none(new_nblt.result)
            finally:
                nblt.metadata.req_providers = curr_provs
    check.is_in("bad_provider", err.value.args[0])
    test_nb = TstNBSummary()
    check.is_not_none(test_nb.get_provider("LocalData"))
    with pytest.raises(MsticnbDataProviderError):
        test_nb.get_provider("otherprovider")
示例#4
0
def test_splunk_saved_searches(splunk_client):
    """Check saved searches."""
    splunk_client.connect = cli_connect
    sp_driver = SplunkDriver()

    # trying to get these before connecting should throw
    with pytest.raises(MsticpyNotConnectedError) as mp_ex:
        sp_driver._get_saved_searches()
        check.is_false(sp_driver.connected)
        check.is_none(sp_driver._saved_searches)
    check.is_in("not connected to Splunk.", mp_ex.value.args)

    # [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Test code")]
    sp_driver.connect(host="localhost", username="******", password=_FAKE_STRING)  # nosec
    check.is_true(sp_driver.connected)

    check.is_instance(sp_driver._saved_searches, pd.DataFrame)
    for _, search in sp_driver._saved_searches.iterrows():
        check.is_true(search["name"].startswith("query"))
        check.equal(search["query"], "get stuff from somewhere")

    queries, name = sp_driver.service_queries
    check.equal(name, "SavedSearches")
    check.is_instance(queries, dict)
    for name, query in queries.items():
        check.is_true(name.startswith("query"))
        check.equal(query, "search get stuff from somewhere")
示例#5
0
def test_template_notebooklet(monkeypatch):
    """Test basic run of notebooklet."""
    test_data = str(Path(TEST_DATA_PATH).absolute())
    monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock)
    data_providers.init(
        query_provider="LocalData",
        LocalData_data_paths=[test_data],
        LocalData_query_paths=[test_data],
    )

    test_nb = TemplateNB()
    tspan = TimeSpan(period="1D")

    result = test_nb.run(value="myhost", timespan=tspan)
    check.is_not_none(result.all_events)
    check.is_not_none(result.description)
    check.is_not_none(result.plot)

    result = test_nb.run(value="myhost",
                         timespan=tspan,
                         options=["+get_metadata"])
    check.is_not_none(result.additional_info)

    evts = test_nb.run_additional_operation(
        ["4679", "5058", "5061", "5059", "4776"])
    check.is_instance(evts, pd.DataFrame)
def test_mp_config_controls_ctrls(mp_conf_ctrl: MpConfigControls):
    """Loading MpConfigControls."""
    val = mp_conf_ctrl.get_value("AzureSentinel.Workspaces")
    check.is_instance(val, dict)
    check.equal(len(val), 2)

    mp_conf_ctrl.set_value(TEST_PATH_WS, NEW_WS)

    ctrl = TestCtrl(value=NEW_WS)

    mp_conf_ctrl.set_control(TEST_PATH_WS, ctrl)
    ctrl2 = mp_conf_ctrl.get_control(TEST_PATH_WS)
    check.equal(ctrl, ctrl2)

    # Change the value of the dict and set the control value to this
    NEW_WS["TenantId"] = "TestGUID---1"
    ctrl.value = NEW_WS
    ctrl2 = mp_conf_ctrl.get_control(TEST_PATH_WS)
    check.equal(ctrl2.value, NEW_WS)
    # Test saving control value
    mp_conf_ctrl.save_ctrl_values(TEST_PATH_WS)
    check.equal(mp_conf_ctrl.get_value(TEST_PATH_WS), NEW_WS)

    # Change dict and set the settings value
    NEW_WS["TenantId"] = "TestGUID---2"
    mp_conf_ctrl.set_value(TEST_PATH_WS, NEW_WS)

    mp_conf_ctrl.populate_ctrl_values(TEST_PATH_WS)
    ctrl2 = mp_conf_ctrl.get_control(TEST_PATH_WS)
    check.equal(ctrl2.value, NEW_WS)
示例#7
0
def test_splunk_saved_searches(splunk_client):
    """Check saved searches."""
    splunk_client.connect = cli_connect
    sp_driver = SplunkDriver()

    # trying to get these before connecting should throw
    with pytest.raises(MsticpyNotConnectedError) as mp_ex:
        sp_driver._get_saved_searches()
        check.is_false(sp_driver.connected)
        check.is_none(sp_driver._saved_searches)
    check.is_in("not connected to Splunk.", mp_ex.value.args)

    sp_driver.connect(host="localhost", username="******",
                      password="******")  # nosec
    check.is_true(sp_driver.connected)

    check.is_instance(sp_driver._saved_searches, pd.DataFrame)
    for _, search in sp_driver._saved_searches.iterrows():
        check.is_true(search["name"].startswith("query"))
        check.equal(search["query"], "get stuff from somewhere")

    queries, name = sp_driver.service_queries
    check.equal(name, "SavedSearches")
    check.is_instance(queries, dict)
    for name, query in queries.items():
        check.is_true(name.startswith("query"))
        check.equal(query, "search get stuff from somewhere")
示例#8
0
def test_winhostevents_notebooklet(monkeypatch):
    """Test basic run of notebooklet."""
    test_data = str(Path(TEST_DATA_PATH).absolute())
    monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock)
    data_providers.init(
        query_provider="LocalData",
        LocalData_data_paths=[test_data],
        LocalData_query_paths=[test_data],
    )

    test_nb = nblts.azsent.host.WinHostEvents()
    tspan = TimeSpan(period="1D")

    result = test_nb.run(value="myhost", timespan=tspan)
    check.is_not_none(result.all_events)
    check.is_instance(result.all_events, pd.DataFrame)
    check.is_not_none(result.event_pivot)
    check.is_instance(result.event_pivot, pd.DataFrame)
    check.is_not_none(result.account_events)
    check.is_instance(result.account_events, pd.DataFrame)
    check.is_not_none(result.event_pivot)
    check.is_instance(result.event_pivot, pd.DataFrame)
    # check.is_not_none(result.account_timeline)

    exp_events = test_nb.expand_events(["5058", "5061"])
    check.is_instance(exp_events, pd.DataFrame)
示例#9
0
def test_emitted_tuple_format(error_objects: Tuple[Tuple, error_codes.Error]) -> None:
    """
    Test that the emitted message is a tuple with the appropriate information.

    The tuple should be formatted with the following information:
      (line number: int, column number: int, message: str, checker type: TypeHintChecker object)
    """
    emitted_error = error_objects[0]

    # Emitted warning should be a tuple
    check.is_instance(emitted_error, Tuple)

    # Tuple should be of length 4
    check.equal(len(emitted_error), 4)

    # First two values should be integers
    check.is_instance(emitted_error[0], int)
    check.is_instance(emitted_error[1], int)

    # Third value should be a string
    check.is_instance(emitted_error[2], str)

    # Fourth value should be a type (not an instance) and the same as TypeHintChecker
    check.is_instance(emitted_error[3], type)
    check.equal(emitted_error[3], TypeHintChecker)
示例#10
0
def test_live_connect():
    """Use this to do live testing."""
    sp_driver = SplunkDriver()
    www = "splunk-mstic.westus2.cloudapp.azure.com"
    sp_driver.connect(host=www, port=8089, username="******",
                      password="******")  # nosec

    query = """index="botsv2" earliest=08/25/2017:00:00:00 latest=08/26/2017:00:00:00
    source="WinEventLog:Microsoft-Windows-Sysmon/Operational"
    | table TimeCreated, host, EventID, EventDescription, User, process | head 10
    """
    res_df = sp_driver.query(query)
    check.is_not_none(res_df)

    query0 = """index="botsv2" earliest=08/25/2020:00:00:00
    + 'source="WinEventLog:Microsoft-Windows-Sysmon/Operational"
    | table TimeCreated, host, EventID, EventDescription, User, process | head 10
    """
    res_df = sp_driver.query(query0)
    check.is_instance(res_df, list)
    check.is_false(res_df)

    query1 = """
    index=blackhat sourcetype=network earliest=0 | table TimeGenerated, TotalBytesSent
    """
    res_df = sp_driver.query(query1)
    check.is_not_none(res_df)
示例#11
0
def test_new_init_data_providers(monkeypatch):
    """Test creating new provider with new provider list."""
    monkeypatch.setattr(data_providers, "GeoLiteLookup", GeoIPLiteMock)

    data_providers.init(query_provider="LocalData", providers=[])
    dprov = data_providers.DataProviders.current()
    data_providers.init(query_provider="LocalData", providers=[])
    dprov2 = data_providers.DataProviders.current()
    check.equal(dprov2, dprov)

    # specify provider
    dprov = data_providers.DataProviders(query_provider="LocalData")
    data_providers.init(query_provider="LocalData", providers=["tilookup"])
    msticnb = sys.modules["msticnb"]
    dprov2 = data_providers.DataProviders.current()
    pkg_providers = getattr(msticnb, "data_providers")
    check.not_equal(dprov2, dprov)
    check.is_in("LocalData", dprov2.providers)
    check.is_in("tilookup", dprov2.providers)
    check.is_not_in("geolitelookup", dprov2.providers)
    check.is_not_in("ipstacklookup", dprov2.providers)
    check.is_in("LocalData", pkg_providers)
    check.is_in("tilookup", pkg_providers)
    check.is_not_in("geolitelookup", pkg_providers)
    check.is_not_in("ipstacklookup", pkg_providers)

    check.is_instance(dprov2.providers["tilookup"], TILookup)
示例#12
0
def test_extract_header_nowcs():
    header = fits.Header.fromstring(_base_header, sep='\n')
    h, wcs = extract_header_wcs(header)
    check.is_none(wcs)
    check.is_instance(h, fits.Header)
    check.equal(h, header)
    check.is_false(h is header)
示例#13
0
def test_guess_coords_skycord_hexa():
    ra = "1:00:00"
    dec = "00:00:00"
    sk = guess_coordinates(ra, dec, skycoord=True)
    check.is_instance(sk, SkyCoord)
    check.is_true(sk.ra.degree - 15 < 1e-8)
    check.is_true(sk.dec.degree - 0 < 1e-8)
示例#14
0
def test_guess_coords_skycord_float():
    ra = 10.0
    dec = 0.0
    sk = guess_coordinates(ra, dec, skycoord=True)
    check.is_instance(sk, SkyCoord)
    check.equal(sk.ra.degree, ra)
    check.equal(sk.dec.degree, dec)
示例#15
0
def test_logger_no_loglist():
    mylog = logger.getChild('testing')
    msg = 'Some error happend here.'
    lh = ListHandler()
    check.is_instance(lh.log_list, list)
    mylog.addHandler(lh)
    mylog.error(msg)
    check.equal(lh.log_list[0], msg)
示例#16
0
def test_kql_query_success(get_ipython):
    """Check loaded true."""
    get_ipython.return_value = _MockIPython()
    kql_driver = KqlDriver()
    kql_driver.connect(connection_str="la://connection")

    result_df = kql_driver.query("test query")
    check.is_instance(result_df, pd.DataFrame)
示例#17
0
def test_data_query_value(_create_pivot, test_case):
    """Test calling function with value."""
    func = getattr(getattr(test_case.entity, test_case.provider), test_case.pivot_func)
    # Test value input
    val = next(iter(test_case.value))
    params = {test_case.func_param: val}
    result_df = func(**params)
    check.is_instance(result_df, pd.DataFrame)
示例#18
0
def test_extract_invalid_wcs_header():
    # It should no raise, just return empty wcs
    # No header change too
    header = fits.Header.fromstring(_base_header + _invalid_wcs, sep='\n')
    h, wcs = extract_header_wcs(header)
    check.is_none(wcs)
    check.is_instance(h, fits.Header)
    check.equal(h, header)
    check.is_false(h is header)
示例#19
0
def test_indexeddict_create():
    d = dict(a=1, b=2, c=3)
    i = IndexedDict(a=1, b=2, c=3)
    check.is_instance(i, dict)
    check.equal(len(d), len(i))
    # Python 3.6 and above ensure items order
    check.equal(list(d.keys()), list(i.keys()))
    check.equal(list(d.values()), list(i.values()))
    check.equal(i, d)
示例#20
0
def test_solve_astrometry_image(tmpdir):
    data, index = get_image_index()
    hdu = fits.open(data)[0]
    header, wcs = _generate_wcs_and_update_header(hdu.header)
    hdu.header = header
    name = tmpdir.join('testimage.fits').strpath
    hdu.writeto(name)
    nwcs = solve_astrometry_image(name, return_wcs=True)
    check.is_instance(nwcs, WCS)
    check.equal(nwcs.naxis, 2)
    compare_wcs(wcs, nwcs)
示例#21
0
def test_kql_query_partial(get_ipython):
    """Check loaded true."""
    get_ipython.return_value = _MockIPython()
    kql_driver = KqlDriver()
    kql_driver.connect(connection_str="la://connection")

    output = io.StringIO()
    with redirect_stdout(output):
        result_df = kql_driver.query("test query_partial")
    check.is_instance(result_df, pd.DataFrame)
    check.is_in("Warning - query returned partial", output.getvalue())
示例#22
0
 def test_builb_callback_no_logger(self):
     """ should work if logger is None. """
     callbacks = [Callback(), Callback()]
     logger = None
     params = {'param123': 123}
     callbacklist = self.playground._build_callbacks(
         callbacks, logger, params)
     check.is_instance(callbacklist, CallbackList)
     for callback in callbacklist.callbacks:
         check.equal(callback.params, params)
         check.equal(callback.playground, self.playground)
示例#23
0
def test_data_query_entity(_create_pivot, test_case):
    """Test calling function with entity attributes."""
    # Test entity
    first_val = next(iter(test_case.value))
    init_args = {test_case.attrib: first_val}
    entity = test_case.entity(**init_args)
    func = getattr(getattr(entity, test_case.provider), test_case.pivot_func)
    # Test entity input
    # result_df = entity.LocalData.list_logons_by_account()
    result_df = func(entity)
    check.is_instance(result_df, pd.DataFrame)
示例#24
0
def test_mordor_download(mdr_driver: MordorDriver):
    """Test file download."""
    entry_id = "SDWIN-190319021158"
    entry = mdr_driver.mordor_data[entry_id]
    files = entry.get_file_paths()

    file_path = files[0]["file_path"]
    d_frame = download_mdr_file(file_path, save_folder=_SAVE_FOLDER2)

    check.is_instance(d_frame, pd.DataFrame)
    check.greater_equal(len(d_frame), 10)
    _cleanup_temp_files(_SAVE_FOLDER2)
示例#25
0
def test_extract_header_nosip():
    header = fits.Header.fromstring(_base_header + _wcs_no_sip, sep='\n')
    h, wcs = extract_header_wcs(header)
    check.is_instance(wcs, WCS)
    check.equal(wcs.wcs.ctype[0], 'RA---TAN')
    check.equal(wcs.wcs.ctype[1], 'DEC--TAN')
    check.is_instance(h, fits.Header)
    for i in _comon_wcs_keys:
        check.is_not_in(f'{i}1', h.keys())
        check.is_not_in(f'{i}2', h.keys())
    check.is_in('DATE-OBS', h.keys())
    check.is_false(h is header)
    check.not_equal(h, header)
示例#26
0
def test_mordor_download(mdr_driver: MordorDriver):
    """Test file download."""
    global _SAVE_PATH
    entry_id = "SDWIN-190319021158"
    entry = mdr_driver.mordor_data[entry_id]
    files = entry.get_file_paths()

    file_path = files[0]["file_path"]
    d_frame = download_mdr_file(file_path, save_folder="mordor_test")
    _SAVE_PATH = file_path.split("/")[-1]

    check.is_instance(d_frame, pd.DataFrame)
    check.greater_equal(len(d_frame), 10)
示例#27
0
def test_fit_wcs(tmpdir):
    data, index = get_image_index()
    hdu = fits.open(data)[0]
    imw, imh = hdu.data.shape
    header, wcs = _generate_wcs_and_update_header(hdu.header)
    hdu.header = header
    sources = starfind(hdu.data, 10, np.median(hdu.data), np.std(hdu.data), 4)
    sources['ra'], sources['dec'] = wcs.all_pix2world(sources['x'],
                                                      sources['y'], 1)
    nwcs = fit_wcs(sources['x'], sources['y'], sources['ra'], sources['dec'],
                   imw, imh)
    check.is_instance(nwcs, WCS)
    check.equal(nwcs.naxis, 2)
    compare_wcs(wcs, nwcs)
示例#28
0
def test_logger_remove_handler():
    mylog = logger.getChild('testing')
    msg = 'Some error happend here.'
    logs = []
    lh = log_to_list(mylog, logs)
    mylog.setLevel('DEBUG')
    mylog.error(msg)
    check.is_instance(lh, ListHandler)
    check.is_in(lh, mylog.handlers)
    mylog.removeHandler(lh)
    check.is_not_in(lh, mylog.handlers)
    check.equal(logs[0], msg)
    check.equal(lh.log_list[0], msg)
    check.equal(lh.log_list, logs)
示例#29
0
def test_user_config(settings, mp_settings):
    """Test user config."""
    settings.get = Mock()
    settings.get.return_value = mp_settings.get("UserDefaults")
    prov_dict = user_config.load_user_defaults()

    check.is_in("qry_asi", prov_dict)
    check.is_instance(prov_dict["qry_asi"], QueryProvider)
    check.equal(prov_dict["qry_asi"].environment, "AzureSentinel")
    check.is_in("qry_soc", prov_dict)
    check.is_instance(prov_dict["qry_soc"], QueryProvider)
    check.equal(prov_dict["qry_asi"].environment, "AzureSentinel")
    check.is_in("qry_splunk", prov_dict)
    check.is_instance(prov_dict["qry_splunk"], QueryProvider)
    check.equal(prov_dict["qry_splunk"].environment, "Splunk")
    check.is_in("qry_local", prov_dict)
    check.is_instance(prov_dict["qry_local"], QueryProvider)
    check.is_true(prov_dict["qry_local"].connected)
    check.equal(prov_dict["qry_local"].environment, "LocalData")

    check.is_in("ti_lookup", prov_dict)
    check.is_in("geoip", prov_dict)
    check.is_in("az_data", prov_dict)
    check.is_in("azs_api", prov_dict)

    check.is_true(hasattr(msticpy, "current_providers"))
示例#30
0
def test_user_config(mp_settings):
    """Test user config."""
    mpcfg_path = os.environ.get("MSTICPYCONFIG")
    with custom_mp_config(mp_path=mpcfg_path):
        settings["UserDefaults"] = mp_settings.get("UserDefaults")
        prov_dict = user_config.load_user_defaults()

    check.is_in("qry_asi", prov_dict)
    check.is_instance(prov_dict["qry_asi"], QueryProvider)
    check.equal(prov_dict["qry_asi"].environment, "AzureSentinel")
    check.is_in("qry_soc", prov_dict)
    check.is_instance(prov_dict["qry_soc"], QueryProvider)
    check.equal(prov_dict["qry_asi"].environment, "AzureSentinel")
    check.is_in("qry_splunk", prov_dict)
    check.is_instance(prov_dict["qry_splunk"], QueryProvider)
    check.equal(prov_dict["qry_splunk"].environment, "Splunk")
    check.is_in("qry_local", prov_dict)
    check.is_instance(prov_dict["qry_local"], QueryProvider)
    check.is_true(prov_dict["qry_local"].connected)
    check.equal(prov_dict["qry_local"].environment, "LocalData")

    check.is_in("ti_lookup", prov_dict)
    check.is_in("geoip", prov_dict)
    check.is_in("az_data", prov_dict)
    check.is_in("azs_api", prov_dict)

    check.is_true(hasattr(msticpy, "current_providers"))