def test_create(self):
     with self.assertRaises(Exception):
         dataset = Dataset(path_model="", path_data="/", name_model="/")
     with self.assertRaises(Exception):
         dataset = Dataset(path_model="/", path_data="/", name_model="")
     with self.assertRaises(Exception):
         dataset = Dataset(path_model="/", path_data="", name_model="/")
     dataset = Dataset(path_model="/", path_data="/", name_model="/")
Esempio n. 2
0
    def setUp(self):
        self.model = LSTMLayer(num_classes=5)
        self.default_pattern = Pattern(0, [], [])
        self.list_model = {
            1: self.model.state_dict(),
            2: self.model.state_dict(),
            3: self.model.state_dict()
        }
        default_pattern1 = Pattern(0, [], [])
        default_pattern1.id = 1
        default_pattern2 = Pattern(0, [], [])
        default_pattern2.id = 2
        default_pattern3 = Pattern(0, [], [])
        default_pattern3.id = 3
        m = Mock()
        m.side_effect = [default_pattern1, default_pattern2, default_pattern3
                         ] * 30
        # Mock(return_value=self.default_pattern)
        logflow.logsparser.Journal.Journal.find_pattern = m
        #mock_get_pattern.return_value = 1
        read_data = pickle.dumps({
            'word2vec': {
                "1": np.asarray([1] * 20),
                "2": np.asarray([2] * 20),
                "3": np.asarray([3] * 20),
                "4": [4] * 20,
                "5": [5] * 20,
                "6": [6] * 20,
                "7": [7] * 20
            },
            'counter_patterns': {
                1: 100,
                2: 100,
                3: 100,
                4: 100,
                6: 1000,
                5: 1000
            },
            "LSTM": {
                3: self.model.state_dict()
            },
            "dict_patterns": {}
        })

        mockOpen = mock_open(read_data=read_data)
        with patch('builtins.open', mockOpen):
            self.dataset = Dataset(path_model="/",
                                   path_data="/",
                                   name_model="/")
            self.dataset.load_files()

        self.dataset.LSTM = self.list_model
        self.dataset.list_logs = []
        for i in range(30):
            self.dataset.list_logs.append(
                Log("1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123",
                    index_line=i))
    def test_slice(self): #, mock_get_pattern):
        default_pattern1 = Pattern.Pattern(0, [], [])
        default_pattern1.id = 1
        default_pattern2 = Pattern.Pattern(0, [], [])
        default_pattern2.id = 2
        default_pattern3 = Pattern.Pattern(0, [], [])
        default_pattern3.id = 3
        m = Mock()
        m.side_effect = [default_pattern1, default_pattern2, default_pattern3]
        # Mock(return_value=self.default_pattern)
        logflow.logsparser.Journal.Journal.find_pattern = m
        #mock_get_pattern.return_value = 1
        read_data = pickle.dumps(
            {
            'word2vec': {
                "1": np.asarray([1]*20), "2": np.asarray([2]*20), "3": np.asarray([3]*20),"4": [4]*20, "5": [5]*20, "6": [6]*20, "7": [7]*20
                }, 
            'counter_patterns': {
                1:10, 2:100, 3:100, 4:100, 6:1000, 5:1000
                },
            "LSTM": {
                3:self.model.state_dict()
                } ,
            "dict_patterns": {
                
                } 
            })

        mockOpen = mock_open(read_data=read_data)
        with patch('builtins.open', mockOpen):
            dataset = Dataset(path_model="/", path_data="/", name_model="/")
            dataset.load_files()

        read_data_str = "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123\n1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123\n1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123"
        # mockOpen = mock_open(read_data=read_data)
        mockOpen = unittest.mock.mock_open(read_data=''.join(read_data_str))
        mockOpen.return_value.__iter__ = lambda self: self
        mockOpen.return_value.__next__ = lambda self: next(iter(self.readline, ''))
        with patch('builtins.open', mockOpen):
            dataset.load_logs()
        # Normal, return a log
        output = dataset.get_slice(2)
        self.assertIsInstance(output, logflow.treebuilding.Log.Log)
        dataset.show_selected_lines(2)
 def test_load_files(self):
     read_data = pickle.dumps(
         {
         'word2vec': {
             "1": [1]*20, "2": [2]*20, "3": [3]*20,"4": [4]*20, "5": [5]*20, "6": [6]*20, "7": [7]*20
             }, 
         'counter_patterns': {
             1:10, 2:100, 3:100, 4:100, 6:1000, 5:1000
             },
         "LSTM": {
             3:self.model.state_dict()
             } ,
         "dict_patterns": {
             
             } 
         })
     mockOpen = mock_open(read_data=read_data)
     with patch('builtins.open', mockOpen):
         dataset = Dataset(path_model="/", path_data="/", name_model="/")
         dataset.load_files()
     self.assertEqual(len(dataset.dict_patterns), 0)
     self.assertEqual(len(dataset.counter_patterns), 6)
    def test_load_logs(self):
        read_data_str = "1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123\n1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123\n1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123"
        # m = unittest.mock.MagicMock(name='open', spec=open)
        # m.return_value = iter(read_data)

        #with unittest.mock.patch('builtins.open', m):
        
        #mockOpen = mock_open(read_data=read_data)
        m = unittest.mock.mock_open(read_data=''.join(read_data_str))
        m.return_value.__iter__ = lambda self: self
        m.return_value.__next__ = lambda self: next(iter(self.readline, ''))
        with patch('builtins.open', m):
            dataset = Dataset(path_model="/", path_data="/", name_model="/", index_line_max=1)
            dataset.load_logs()
        self.assertEqual(len(dataset.list_logs), 2)

        with patch('builtins.open', m):
            dataset = Dataset(path_model="/", path_data="/", name_model="/")
            dataset.load_logs()
        self.assertEqual(len(dataset.list_logs), 3)
Esempio n. 6
0
    def test_workflow_wrong_first_log(self):
        read_data = pickle.dumps({
            'word2vec': {
                "1": np.asarray([1] * 20),
                "2": np.asarray([2] * 20),
                "3": np.asarray([3] * 20),
                "4": [4] * 20,
                "5": [5] * 20,
                "6": [6] * 20,
                "7": [7] * 20
            },
            'counter_patterns': {
                1: 100,
                2: 100,
                3: 100,
                4: 100,
                6: 1000,
                5: 1000
            },
            "LSTM": {
                3: self.model.state_dict()
            },
            "dict_patterns": {}
        })
        mockOpen = mock_open(read_data=read_data)
        with patch('builtins.open', mockOpen):
            dataset_local = Dataset(path_model="/",
                                    path_data="/",
                                    name_model="/")
            dataset_local.load_files()

        dataset_local.LSTM = self.list_model
        dataset_local.list_logs = []
        default_pattern_local = Pattern(0, [], [])
        default_pattern_local.id = -1
        m = Mock()
        m.side_effect = [default_pattern_local] * 30
        logflow.logsparser.Journal.Journal.find_pattern = m
        for i in range(30):
            log = Log("-1", index_line=i)
            log.pattern = default_pattern_local
            dataset_local.list_logs.append(log)
        workflow = Workflow(dataset_local)
        #with self.assertRaises(Exception):
        tree = workflow.detect_workflow(10)
        self.assertEqual(tree, "-1")
Esempio n. 7
0
class UtilTest(unittest.TestCase):
    def setUp(self):
        self.model = LSTMLayer(num_classes=5)
        self.default_pattern = Pattern(0, [], [])
        self.list_model = {
            1: self.model.state_dict(),
            2: self.model.state_dict(),
            3: self.model.state_dict()
        }
        default_pattern1 = Pattern(0, [], [])
        default_pattern1.id = 1
        default_pattern2 = Pattern(0, [], [])
        default_pattern2.id = 2
        default_pattern3 = Pattern(0, [], [])
        default_pattern3.id = 3
        m = Mock()
        m.side_effect = [default_pattern1, default_pattern2, default_pattern3
                         ] * 30
        # Mock(return_value=self.default_pattern)
        logflow.logsparser.Journal.Journal.find_pattern = m
        #mock_get_pattern.return_value = 1
        read_data = pickle.dumps({
            'word2vec': {
                "1": np.asarray([1] * 20),
                "2": np.asarray([2] * 20),
                "3": np.asarray([3] * 20),
                "4": [4] * 20,
                "5": [5] * 20,
                "6": [6] * 20,
                "7": [7] * 20
            },
            'counter_patterns': {
                1: 100,
                2: 100,
                3: 100,
                4: 100,
                6: 1000,
                5: 1000
            },
            "LSTM": {
                3: self.model.state_dict()
            },
            "dict_patterns": {}
        })

        mockOpen = mock_open(read_data=read_data)
        with patch('builtins.open', mockOpen):
            self.dataset = Dataset(path_model="/",
                                   path_data="/",
                                   name_model="/")
            self.dataset.load_files()

        self.dataset.LSTM = self.list_model
        self.dataset.list_logs = []
        for i in range(30):
            self.dataset.list_logs.append(
                Log("1530388399 2018 Jun 30 21:53:19 m21205 authpriv info sshd pam_unix(sshd:session): session closed, for1 User Root/1 123",
                    index_line=i))

    def test_create(self):
        workflow = Workflow(self.dataset)

    def test_workflow_working(self):
        workflow = Workflow(self.dataset)
        workflow.detect_workflow(25)
        workflow.get_tree(25)

    def test_workflow_working_with_child(self):
        m = Mock()
        m.side_effect = [[{
            "log": 25,
            "weigth": 10
        }, {
            "log": 15,
            "weigth": 10
        }], [{
            "log": 25,
            "weigth": 10
        }, {
            "log": 15,
            "weigth": 10
        }], [{
            "log": 25,
            "weigth": 10
        }, {
            "log": 15,
            "weigth": 10
        }], [{
            "log": 25,
            "weigth": 10
        }, {
            "log": 15,
            "weigth": 10
        }], [{
            "log": 25,
            "weigth": 10
        }, {
            "log": 15,
            "weigth": 10
        }]]
        logflow.treebuilding.Inference.Inference.test = m

        default_pattern1 = Pattern(0, [], [])
        default_pattern1.id = 1
        default_pattern2 = Pattern(0, [], [])
        default_pattern2.id = 2
        default_pattern3 = Pattern(0, [], [])
        default_pattern3.id = 3
        m_pattern = Mock()
        m_pattern.side_effect = [
            default_pattern1, default_pattern2, default_pattern3
        ] * 3000
        logflow.logsparser.Journal.Journal.find_pattern = m_pattern

        workflow = Workflow(self.dataset)
        workflow.detect_workflow(25)

    def test_workflow_after_last_line(self):
        workflow = Workflow(self.dataset)
        with self.assertRaises(Exception):
            workflow.get_tree(40)

    def test_workflow_wrong_first_log(self):
        read_data = pickle.dumps({
            'word2vec': {
                "1": np.asarray([1] * 20),
                "2": np.asarray([2] * 20),
                "3": np.asarray([3] * 20),
                "4": [4] * 20,
                "5": [5] * 20,
                "6": [6] * 20,
                "7": [7] * 20
            },
            'counter_patterns': {
                1: 100,
                2: 100,
                3: 100,
                4: 100,
                6: 1000,
                5: 1000
            },
            "LSTM": {
                3: self.model.state_dict()
            },
            "dict_patterns": {}
        })
        mockOpen = mock_open(read_data=read_data)
        with patch('builtins.open', mockOpen):
            dataset_local = Dataset(path_model="/",
                                    path_data="/",
                                    name_model="/")
            dataset_local.load_files()

        dataset_local.LSTM = self.list_model
        dataset_local.list_logs = []
        default_pattern_local = Pattern(0, [], [])
        default_pattern_local.id = -1
        m = Mock()
        m.side_effect = [default_pattern_local] * 30
        logflow.logsparser.Journal.Journal.find_pattern = m
        for i in range(30):
            log = Log("-1", index_line=i)
            log.pattern = default_pattern_local
            dataset_local.list_logs.append(log)
        workflow = Workflow(dataset_local)
        #with self.assertRaises(Exception):
        tree = workflow.detect_workflow(10)
        self.assertEqual(tree, "-1")