예제 #1
0
    def setUpClass(cls):
        """Initiates the parameters to feed the test functions and previous
        functions to generated the necessary files."""

        gatherer.convert_pcap_nfcapd(pcap_path, pcap_file, nfcapd_path, 60)

        nfcapd_files = util.directory_content(nfcapd_path)[1]

        gatherer.convert_nfcapd_csv(nfcapd_path, nfcapd_files, csv_path,
                                    'test')

        csv_file = util.directory_content(csv_path)[1][0]

        cls.header, cls.flows = gatherer.open_csv(csv_path, csv_file, 30)
예제 #2
0
    def test_file_creation(self):
        """Tests if the process started by the function is creating the files
        correctly."""

        # delay one second to allow the start of the process
        time.sleep(1)
        self.assertIn('nfcapd.current',
                      util.directory_content(nfcapd_path)[1][0])
예제 #3
0
    def gathering(self, nfcapd_files):
        gatherer.convert_nfcapd_csv(util.paths['nfcapd'], nfcapd_files,
                                    f'{util.paths["csv"]}tmp/', 'realtime')
        csv_file = util.directory_content(f'{util.paths["csv"]}tmp/')[1]
        logger.info(f'csv files: {csv_file[0]}')
        _, flows = gatherer.open_csv(f'{util.paths["csv"]}tmp/', csv_file[0])

        return flows
예제 #4
0
    def execution(self):
        process = gatherer.capture_nfcapd(util.paths['nfcapd'], 60)
        dataset = Dataset.query.get(self.model.dataset_id)
        logger.info(f'process pid: {process.pid}')
        logger.info(f'dataset file: {dataset.file}')

        try:
            while not self.event.is_set():
                nfcapd_files = util.directory_content(util.paths['nfcapd'])[1]

                try:
                    if not 'current' in nfcapd_files[0]:
                        logger.info(f'nfcapd files: {nfcapd_files[:-1]}')

                        # gathering flows.
                        flows = self.gathering(nfcapd_files[:-1])

                        # cleaning remaining files
                        util.clean_directory(util.paths['nfcapd'],
                                             'nfcapd.20*')
                        util.clean_directory(f'{util.paths["csv"]}tmp/', '*')

                        if len(flows[0]) < 18:
                            raise ValueError('No matched flows')
                        logger.info(f'flow: {flows[0]}')

                        # preprocessing flows.
                        formatter = Formatter()
                        flows = formatter.format_flows(flows)
                        logger.info(f'formatted flow: {flows[0]}')

                        modifier = Modifier(2, dataset.aggregation)
                        extractor = Extractor([
                            feature.id + 7 for feature in self.model.features
                        ])

                        while flows:
                            flow, flows = modifier.aggregate(flows)
                            features, _ = extractor.extract(flow)
                            # detecting intrusions.
                            pred, _, _ = self.detector.test([features])

                            if pred[0]:
                                # mitigating intrusions.
                                self.mitigating(flow)
                    time.sleep(2)
                except IndexError:
                    time.sleep(2)
                    continue
                except ValueError as error:
                    logger.error(error)
                    util.clean_directory(util.paths['nfcapd'], 'nfcapd.20*')
                    util.clean_directory(f'{util.paths["csv"]}tmp/', '*')
                    continue
        finally:
            logger.info('thread status: false')
            process.kill()
예제 #5
0
    def setUp(self):
        """Initiates the parameters to feed the test functions and previous
        functions to generated the necessary files."""

        gatherer.convert_pcap_nfcapd(pcap_path, pcap_file, nfcapd_path, 60)

        nfcapd_files = util.directory_content(nfcapd_path)[1]

        gatherer.convert_nfcapd_csv(nfcapd_path, nfcapd_files, csv_path,
                                    'test')
예제 #6
0
    def test_last_file_size(self):
        """Tests if the last file generated has the expected remaining size."""

        pcap_files = util.directory_content(f'{pcap_path}split_normal0/')[1]

        last_file_size = os.path.getsize(f'{pcap_path}split_normal0/'
                                         f'{pcap_files[-1]}')

        # converts the last file size to megabytes and then rounded up
        self.assertEqual(ceil(last_file_size / (1000**2)), 173,
                         'different size of the last file')
예제 #7
0
    def setUpClass(cls):
        """Initiates the parameters to feed the test functions."""

        # gathering flows
        raw_csv_file = util.directory_content(formatter_path)[1][0]
        header, flows = gatherer.open_csv(formatter_path, raw_csv_file)

        # preprocessing flows
        formatter = Formatter()
        cls.header = formatter.format_header(header)
        cls.flows = formatter.format_flows(flows)
예제 #8
0
    def test_time_interval(self):
        """Tests if the time interval between the nfcapd files is according to
        the defined time."""

        nfcapd_files = util.directory_content(nfcapd_path)[1]

        # gets the minutes in the nfcapd file name
        minutes = [int(file.split('.')[-1][-2:]) for file in nfcapd_files]

        # checks if all files have the same time interval
        for x, y in zip(minutes[:-1], minutes[1:]):
            self.assertEqual(y - x, 1, 'wrong time interval')
예제 #9
0
    def test_extract_specific_features(self):
        """Tests if specifics features and labels were correctly extracted from
        the flows."""

        # gathering features
        expt_csv = util.directory_content(extractor_path)[1][-1]
        expt_features = gatherer.open_csv(extractor_path, expt_csv)[1]

        extractor = Extractor([feature + 7 for feature in [1, 3]])
        features, labels = extractor.extract_features_labels(self.flows)

        self.assertListEqual(features, expt_features,
                             'features extracted incorrectly')
예제 #10
0
    def test_extract_features_labels(self):
        """Tests if the features and labels were correctly extracted from
        the flows."""

        # gathering features
        expt_csv = util.directory_content(extractor_path)[1][0]
        expt_features = gatherer.open_csv(extractor_path, expt_csv)[1]

        extractor = Extractor([feature + 7 for feature in range(1, 10)])
        features, labels = extractor.extract_features_labels(self.flows)

        self.assertListEqual(features, expt_features,
                             'features extracted incorrectly')
        self.assertEqual(labels[0], '0', 'labels extracted incorrectly')
예제 #11
0
    def test_aggregate_flows(self):
        """Tests if the features were correctly aggregated."""

        # gathering flows
        expt_csv = util.directory_content(modifier_path)[1][0]
        expt_header, expt_flows = gatherer.open_csv(modifier_path, expt_csv)

        # preprocessing flows
        formatter = Formatter(gather=False, train=True)
        expt_flows = formatter.format_flows(expt_flows)

        self.assertListEqual(self.header, expt_header,
                             'aggregation performed incorrectly in header')
        self.assertListEqual(self.flows, expt_flows,
                             'aggregation performed incorrectly in flows')
예제 #12
0
    def setUpClass(cls):
        """Initiates the parameters to feed the test functions."""

        # gathering flows
        raw_csv_file = util.directory_content(modifier_path)[1][-1]
        header, flows = gatherer.open_csv(modifier_path, raw_csv_file)

        # preprocessing flows
        formatter = Formatter()
        header = formatter.format_header(header)
        flows = formatter.format_flows(flows)

        # threshold defined according to the expected result in test dataset
        modifier = Modifier(label=0, threshold=5)
        cls.header = modifier.extend_header(header)
        cls.flows = modifier.aggregate_flows(flows)
예제 #13
0
    def test_file_interval(self):
        """Tests if the csv file interval generate by the nfcapd files is
        correct."""

        csv_file = util.directory_content(csv_path)[1][0]

        flows = list()
        with open(f'{csv_path}{csv_file}') as file:
            reader = csv.reader(file)

            for line in reader:
                flows.append(line)

        # gets the first and last minute of the flows to check the interval
        start_time = datetime.strptime(flows[1][0], '%Y-%m-%d %H:%M:%S').minute
        final_time = datetime.strptime(flows[-4][0],
                                       '%Y-%m-%d %H:%M:%S').minute

        self.assertEqual(final_time - start_time, 13, 'wrong file interval')
예제 #14
0
def content(function, directory):
    form = ContentForm(request.form)

    # clearing the previous directory.
    if directory in ['pcap', 'nfcapd', 'csv']:
        paths_hist = {'root': util.root}
    else:
        paths_hist = session['paths_hist']

    # checking if the folder was already opened.
    if not directory in paths_hist.keys():
        paths_hist[directory] = f'{directory}/'

    # creating the full path.
    full_path = paths_hist['root'] + paths_hist[directory]
    # getting inner directory content.
    inner_dirs, files = util.directory_content(full_path)
    form.files_choices(files)

    if request.method == 'POST' and form.validate_on_submit():
        logger.info(f'file: {form.files.data}')
        session['files'] = form.files.data

        return redirect(url_for(f'creation.{function}', directory=directory))

    # creating the paths of the inner directories.
    for inner_dir in inner_dirs:
        paths_hist[inner_dir] = f'{paths_hist[directory]}{inner_dir}/'

    session['paths_hist'] = paths_hist

    return render_template('creation/content.html',
                           form=form,
                           inner_dirs=inner_dirs,
                           relative_path=session['paths_hist'][directory],
                           function=function)
예제 #15
0
def load():
    files = util.directory_content(f'{util.paths["models"]}')[1]
    models = [Model.query.get(file.split('_')[0]) for file in files]

    return render_template('setting/load.html', models=models)
예제 #16
0
 def datasets_choices(self):
     datasets = util.directory_content(f'{util.paths["csv"]}datasets/')[1]
     for ds in datasets:
         self.dataset.choices.append([ds, ' '.join(ds.split('.csv')[0]
                                                     .split('_'))])
예제 #17
0
    def test_num_files(self):
        """Tests the number of files created by splitting the pcap files."""

        self.assertEqual(
            len(util.directory_content(f'{pcap_path}split_normal0/')[1]), 4,
            'different number of pcap files')
예제 #18
0
    def test_num_files(self):
        """Tests if the number of files match if the number of minutes in the
        pcap file."""

        self.assertEqual(len(util.directory_content(nfcapd_path)[1]), 14,
                         'different number of nfcapd files')
예제 #19
0
from datetime import datetime
from math import ceil

base_path = os.path.abspath(os.path.dirname('intrusion_prevention_system'))
sys.path.append(base_path)

from app.core import gatherer
from app.core import util

# defines the main paths use during the tests
pcap_path = f'{base_path}/tests/app/core/data/gatherer/pcap/'
nfcapd_path = f'{base_path}/tests/app/core/data/gatherer/nfcapd/'
csv_path = f'{base_path}/tests/app/core/data/gatherer/csv/'

# file used in the tests
pcap_file = util.directory_content(pcap_path)[1]


# unit tests
class TestSplitPcap(unittest.TestCase):
    """Tests the split_pcap function in gatherer module."""
    @classmethod
    def setUpClass(cls):
        """Initiates the parameters to feed the test functions."""

        cls.split_size = 300

        # function to be tested
        gatherer.split_pcap(pcap_path, pcap_file, cls.split_size)

    @classmethod
예제 #20
0
    def setUpClass(cls):
        """Initiates the parameters to feed the test functions."""

        # gathering flows
        modified_csv_file = util.directory_content(extractor_path)[1][1]
        _, cls.flows = gatherer.open_csv(extractor_path, modified_csv_file)