Example #1
0
    def xgb_scale(self, name='scale_xgb'):
        """

        Args:
            save_path:
            log_path:
            X_train:
            Y_train:
            name:

        Returns:

        """
        save_log_file(self.log_path)

        ### 加上了Src、Dst特征
        logging.info('使用CIDDS-001数据, 用xgboost完成实验分类,'
                     '使用xgboost中的sample_weight参数进行样本平衡')

        model = machine_learnings.do_xgboost_blance_scale(self.X_train, self.Y_train)
        # create HDF5 file
        model.save(self.model_path + name + 'model.h5')
Example #2
0
    def Dnn_sample(self, name='sample_Dnn'):
        """

        Args:
            save_path:
            log_path:
            X_train:
            Y_train:
            name:

        Returns:

        """
        save_log_file(self.log_path)

        logging.info('用dnn完成实验分类,'
                     '使用sample_weight参数进行样本平衡')

        model = dnn.do_dnn_1d_sample(self.X_train, self.Y_train, self.result_path, Input_shape=self.shape)

        # create HDF5 file
        model.save(self.model_path + name + 'model.h5')
Example #3
0
def dnn_test(X_test, Y_test, model_path, log_path, name):
    """

    Args:
        X_test:
        Y_test:
        save_path:
        name:

    Returns:

    """
    save_log_file(log_path)
    model = load_model(model_path + name + 'model.h5')
    Y_test = to_categorical(Y_test, num_classes=5)
    Y_pred = model.predict(X_test)
    do_metrics(Y_test, Y_pred)
    attack_types = ['normal', 'attacker', 'victim', 'suspicious', 'unknown']
    confusion_matrixs.plot_confusion_matrix(
        np.array(metrics.confusion_matrix(Y_test, Y_pred)),
        classes=attack_types,
        normalize=True,
        title='dnn Normalized confusion matrix')
Example #4
0
    def xgb_none(self, name='none_xgb'):
        """

        Args:
            save_path:
            log_path:
            X_train:
            Y_train:
            name:

        Returns:

        """
        save_log_file(self.log_path)

        ### 无样本平衡
        logging.info('使用CIDDS-001数据, 用xgboost完成实验分类,'
                     '无样本平衡, 设置了划分样本的random_state=1, 并且保存模型')

        model = machine_learnings.do_xgboost(self.X_train,self.Y_train)

        # create HDF5 file
        model.save(self.model_path + name + 'model.h5')
Example #5
0
from sources.utils.logfile import save_log_file
from sources.utils.calculation_metrics import do_metrics
from sources.preprocess.features import *
from sources.models import machine_learnings

import logging

if __name__=="__main__":

    log_path = '/home/liyulian/code/CIDDS/repositories'
    save_log_file(log_path)


    ### CIDDS-001
    path = '/home/liyulian/code/CIDDS/sources/utils/data_features_001.csv'

    ### 无样本平衡
    logging.info('使用CIDDS-001数据, 用xgboost完成实验分类,'
                 '无样本平衡, 设置了划分样本的random_state=1, 并且保存模型')

    X_train, X_test, Y_train, Y_test = get_features_FPPB(path)
    Y_pred, model = machine_learnings.do_xgboost(X_train, X_test, Y_train, Y_test)
    do_metrics(Y_test, Y_pred)
    import pickle  # pickle模块

    # 保存Model(注:save文件夹要预先建立,否则会报错)
    with open('repositories/model_001.pickle', 'wb') as f:
        pickle.dump(model, f)

    # ### 无样本平衡
    # logging.info('使用CIDDS-001数据, 用xgboost完成实验分类,'