Esempio n. 1
0
    def execution(self):
        process = gatherer.capture_nfcapd(util.paths['nfcapd'], 60)
        dataset = Dataset.query.get(self.model.dataset_id)
        logger.info(f'process pid: {process.pid}')
        logger.info(f'dataset file: {dataset.file}')

        try:
            while not self.event.is_set():
                nfcapd_files = util.directory_content(util.paths['nfcapd'])[1]

                try:
                    if not 'current' in nfcapd_files[0]:
                        logger.info(f'nfcapd files: {nfcapd_files[:-1]}')

                        # gathering flows.
                        flows = self.gathering(nfcapd_files[:-1])

                        # cleaning remaining files
                        util.clean_directory(util.paths['nfcapd'],
                                             'nfcapd.20*')
                        util.clean_directory(f'{util.paths["csv"]}tmp/', '*')

                        if len(flows[0]) < 18:
                            raise ValueError('No matched flows')
                        logger.info(f'flow: {flows[0]}')

                        # preprocessing flows.
                        formatter = Formatter()
                        flows = formatter.format_flows(flows)
                        logger.info(f'formatted flow: {flows[0]}')

                        modifier = Modifier(2, dataset.aggregation)
                        extractor = Extractor([
                            feature.id + 7 for feature in self.model.features
                        ])

                        while flows:
                            flow, flows = modifier.aggregate(flows)
                            features, _ = extractor.extract(flow)
                            # detecting intrusions.
                            pred, _, _ = self.detector.test([features])

                            if pred[0]:
                                # mitigating intrusions.
                                self.mitigating(flow)
                    time.sleep(2)
                except IndexError:
                    time.sleep(2)
                    continue
                except ValueError as error:
                    logger.error(error)
                    util.clean_directory(util.paths['nfcapd'], 'nfcapd.20*')
                    util.clean_directory(f'{util.paths["csv"]}tmp/', '*')
                    continue
        finally:
            logger.info('thread status: false')
            process.kill()
Esempio n. 2
0
from app import app, db, socketio
from app.core import util
from app.models import (Classifier, Dataset, Feature,
                        Intrusion, Model, Preprocessing,
                        Result)


@app.shell_context_processor
def make_shell_context():
    return {'app': app, 'db': db, 'socketio': socketio,
            'Classifier': Classifier, 'Dataset': Dataset,
            'Feature': Feature, 'Intrusion': Intrusion,
            'Model': Model, 'Preprocessing': Preprocessing,
            'Result': Result}


if __name__ == '__main__':
    try:
        socketio.run(app)
    finally:
        for itr in Intrusion.query.all():
            db.session.delete(itr)
        db.session.commit()

        util.clean_directory(util.paths['nfcapd'], '*')
Esempio n. 3
0
    def tearDownClass(cls):
        """Cleans all files used in tests."""

        util.clean_directory(f'{nfcapd_path}', '*')
Esempio n. 4
0
    def tearDownClass(cls):
        """Cleans all files used in tests."""

        util.clean_directory(f'{pcap_path}split_normal0/', '*')
        os.system(f'rm -rf {pcap_path}split_normal0/')
Esempio n. 5
0
    def tearDown(self):
        """Cleans all files used in tests and kills the process started by the
        function."""

        util.clean_directory(f'{nfcapd_path}', '*')
        self.process.kill()
Esempio n. 6
0
    def tearDown(self):
        """Cleans all files used in tests."""

        util.clean_directory(f'{nfcapd_path}', '*')
        util.clean_directory(f'{csv_path}', '*')