def setUp(self): self.data_test = "2 2 2" self.path_data = "./" self.path_model = "./" self.name_dataset = "test" self.pattern = Pattern(cardinality=6, pattern_word=["session", "User"], pattern_index=[1, 4]) self.pattern.id = 2 self.dict_patterns = {7: {2: [self.pattern]}}
def setUp(self): self.log = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123" ) default_pattern = Pattern(0, ["home"], []) default_pattern.id = 0 self.log.pattern = default_pattern self.log.cardinality = 3 self.log2 = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 1234" ) default_pattern = Pattern(0, ["house"], []) default_pattern.id = 1 self.log2.pattern = default_pattern self.log2.cardinality = 3 self.log3 = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 1235" ) default_pattern = Pattern(0, ["HouseCat"], []) default_pattern.id = 2 self.log3.pattern = default_pattern self.log3.cardinality = 3 self.log5 = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 1235" ) default_pattern = Pattern(0, ["HouseCat"], []) default_pattern.id = 2 self.log5.pattern = default_pattern self.log5.cardinality = 3 self.log4 = Log( "1530388399a 2018 Jun 30 21:53:19 m21205 authpriv info") default_pattern = Pattern(0, [], []) default_pattern.id = 2 self.log4.pattern = default_pattern self.log5 = Log("-1")
def test_workflow_wrong_first_log(self): read_data = pickle.dumps({ 'word2vec': { "1": np.asarray([1] * 20), "2": np.asarray([2] * 20), "3": np.asarray([3] * 20), "4": [4] * 20, "5": [5] * 20, "6": [6] * 20, "7": [7] * 20 }, 'counter_patterns': { 1: 100, 2: 100, 3: 100, 4: 100, 6: 1000, 5: 1000 }, "LSTM": { 3: self.model.state_dict() }, "dict_patterns": {} }) mockOpen = mock_open(read_data=read_data) with patch('builtins.open', mockOpen): dataset_local = Dataset(path_model="/", path_data="/", name_model="/") dataset_local.load_files() dataset_local.LSTM = self.list_model dataset_local.list_logs = [] default_pattern_local = Pattern(0, [], []) default_pattern_local.id = -1 m = Mock() m.side_effect = [default_pattern_local] * 30 logflow.logsparser.Journal.Journal.find_pattern = m for i in range(30): log = Log("-1", index_line=i) log.pattern = default_pattern_local dataset_local.list_logs.append(log) workflow = Workflow(dataset_local) #with self.assertRaises(Exception): tree = workflow.detect_workflow(10) self.assertEqual(tree, "-1")