def setUp(self): self.model = LSTMLayer(num_classes=5) self.default_pattern = Pattern(0, [], []) self.list_model = { 1: self.model.state_dict(), 2: self.model.state_dict(), 3: self.model.state_dict() } default_pattern1 = Pattern(0, [], []) default_pattern1.id = 1 default_pattern2 = Pattern(0, [], []) default_pattern2.id = 2 default_pattern3 = Pattern(0, [], []) default_pattern3.id = 3 m = Mock() m.side_effect = [default_pattern1, default_pattern2, default_pattern3 ] * 30 # Mock(return_value=self.default_pattern) logflow.logsparser.Journal.Journal.find_pattern = m #mock_get_pattern.return_value = 1 read_data = pickle.dumps({ 'word2vec': { "1": np.asarray([1] * 20), "2": np.asarray([2] * 20), "3": np.asarray([3] * 20), "4": [4] * 20, "5": [5] * 20, "6": [6] * 20, "7": [7] * 20 }, 'counter_patterns': { 1: 100, 2: 100, 3: 100, 4: 100, 6: 1000, 5: 1000 }, "LSTM": { 3: self.model.state_dict() }, "dict_patterns": {} }) mockOpen = mock_open(read_data=read_data) with patch('builtins.open', mockOpen): self.dataset = Dataset(path_model="/", path_data="/", name_model="/") self.dataset.load_files() self.dataset.LSTM = self.list_model self.dataset.list_logs = [] for i in range(30): self.dataset.list_logs.append( Log("1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123", index_line=i))
def test_workflow_working_with_child(self): m = Mock() m.side_effect = [[{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }], [{ "log": 25, "weigth": 10 }, { "log": 15, "weigth": 10 }]] logflow.treebuilding.Inference.Inference.test = m default_pattern1 = Pattern(0, [], []) default_pattern1.id = 1 default_pattern2 = Pattern(0, [], []) default_pattern2.id = 2 default_pattern3 = Pattern(0, [], []) default_pattern3.id = 3 m_pattern = Mock() m_pattern.side_effect = [ default_pattern1, default_pattern2, default_pattern3 ] * 3000 logflow.logsparser.Journal.Journal.find_pattern = m_pattern workflow = Workflow(self.dataset) workflow.detect_workflow(25)
def setUp(self): self.log = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123" ) default_pattern = Pattern(0, ["home"], []) default_pattern.id = 0 self.log.pattern = default_pattern self.log.cardinality = 3 self.log2 = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 1234" ) default_pattern = Pattern(0, ["house"], []) default_pattern.id = 1 self.log2.pattern = default_pattern self.log2.cardinality = 3 self.log3 = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 1235" ) default_pattern = Pattern(0, ["HouseCat"], []) default_pattern.id = 2 self.log3.pattern = default_pattern self.log3.cardinality = 3 self.log5 = Log( "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 1235" ) default_pattern = Pattern(0, ["HouseCat"], []) default_pattern.id = 2 self.log5.pattern = default_pattern self.log5.cardinality = 3 self.log4 = Log( "1530388399a 2018 Jun 30 21:53:19 m21205 authpriv info") default_pattern = Pattern(0, [], []) default_pattern.id = 2 self.log4.pattern = default_pattern self.log5 = Log("-1")
def test_workflow_wrong_first_log(self): read_data = pickle.dumps({ 'word2vec': { "1": np.asarray([1] * 20), "2": np.asarray([2] * 20), "3": np.asarray([3] * 20), "4": [4] * 20, "5": [5] * 20, "6": [6] * 20, "7": [7] * 20 }, 'counter_patterns': { 1: 100, 2: 100, 3: 100, 4: 100, 6: 1000, 5: 1000 }, "LSTM": { 3: self.model.state_dict() }, "dict_patterns": {} }) mockOpen = mock_open(read_data=read_data) with patch('builtins.open', mockOpen): dataset_local = Dataset(path_model="/", path_data="/", name_model="/") dataset_local.load_files() dataset_local.LSTM = self.list_model dataset_local.list_logs = [] default_pattern_local = Pattern(0, [], []) default_pattern_local.id = -1 m = Mock() m.side_effect = [default_pattern_local] * 30 logflow.logsparser.Journal.Journal.find_pattern = m for i in range(30): log = Log("-1", index_line=i) log.pattern = default_pattern_local dataset_local.list_logs.append(log) workflow = Workflow(dataset_local) #with self.assertRaises(Exception): tree = workflow.detect_workflow(10) self.assertEqual(tree, "-1")