def test_workflow_working_with_child(self): m = Mock() m.side_effect = [[{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }]] logflow.treebuilding.Inference.Inference.test = m default_pattern1 = Pattern(0, [], []) default_pattern1.id = 1 default_pattern2 = Pattern(0, [], []) default_pattern2.id = 2 default_pattern3 = Pattern(0, [], []) default_pattern3.id = 3 m_pattern = Mock() m_pattern.side_effect = [ default_pattern1, default_pattern2, default_pattern3 ] * 3000 logflow.logsparser.Journal.Journal.find_pattern = m_pattern workflow = Workflow(self.dataset) workflow.detect_workflow(25)
def test_workflow_wrong_first_log(self): read_data = pickle.dumps({ 'word2vec': { "1": np.asarray([1] * 20), "2": np.asarray([2] * 20), "3": np.asarray([3] * 20), "4": [4] * 20, "5": [5] * 20, "6": [6] * 20, "7": [7] * 20 }, 'counter_patterns': { 1: 100, 2: 100, 3: 100, 4: 100, 6: 1000, 5: 1000 }, "LSTM": { 3: self.model.state_dict() }, "dict_patterns": {} }) mockOpen = mock_open(read_data=read_data) with patch('builtins.open', mockOpen): dataset_local = Dataset(path_model="/", path_data="/", name_model="/") dataset_local.load_files() dataset_local.LSTM = self.list_model dataset_local.list_logs = [] default_pattern_local = Pattern(0, [], []) default_pattern_local.id = -1 m = Mock() m.side_effect = [default_pattern_local] * 30 logflow.logsparser.Journal.Journal.find_pattern = m for i in range(30): log = Log("-1", index_line=i) log.pattern = default_pattern_local dataset_local.list_logs.append(log) workflow = Workflow(dataset_local) #with self.assertRaises(Exception): tree = workflow.detect_workflow(10) self.assertEqual(tree, "-1")
path_model="model/").start() # Learn the correlations size = 100000000 list_cardinalities = Dataset_learning(path_model="model/", path_data="data/", name_dataset="Test", size=size).run() worker = Worker(cardinalities_choosen=[4, 5, 6, 7], list_cardinalities=list_cardinalities, path_model="model/", name_dataset="Test") worker.train() # Show the results results = Results(path_model="model/", name_model="Test") results.load_files() results.compute_results(condition="Test") results.print_results() # Get the tree dataset = Dataset_building(path_model="model/", name_model="Test", path_data="data/Windows/Windows.log", index_line_max=30000, parser_function=parser_function) dataset.load_files() dataset.load_logs() workflow = Workflow(dataset) workflow.get_tree(index_line=24712)
def test_workflow_working(self): workflow = Workflow(self.dataset) workflow.detect_workflow(25) workflow.get_tree(25)
def test_create(self): workflow = Workflow(self.dataset)
def test_workflow_after_last_line(self): workflow = Workflow(self.dataset) with self.assertRaises(Exception): workflow.get_tree(40)