Exemple #1
0
def train():
    """ Training
    """
    dataset = 'cus_mnist'
    dataroot = 'E:/ProjectSet/Pycharm/WAIBAO/Code01/GAN/data/cus_mnist'
    opt = Options().parse(dataset)

    opt.load_weights = False
    dataloader = load_data(opt)
    print(opt)

    # LOAD MODEL
    model = Ganomaly(opt, dataloader)
    model.train()
Exemple #2
0
def train():
    """ Training
    """

    ##
    # ARGUMENTS
    opt = Options().parse()
    ##
    # LOAD DATA
    dataloader = load_data(opt)
    ##
    # LOAD MODEL
    model = Ganomaly(opt, dataloader)
    ##
    # TRAIN MODEL
    model.train()
Exemple #3
0
    def run(self): #训练模型

        # 1. 模型训练
        self.option.signalInfo.emit(-1,"开始导入数据...")
        dataloader = load_data(self.option)
        model = Ganomaly(self.option, dataloader)
        self.option.signalInfo.emit(10, "导入数据完毕!")
        self.modelINFO = model.train()

        # 2. 对训练结果进行处理,生成一个字典
        self.option.signalInfo.emit(100,"")
        self.modelINFO['opt'] = vars(self.option)
        self.modelINFO['opt'].pop('signalInfo')
        signal = self.modelINFO['opt'].pop('signal')

        self.modelINFO['modelName'] = self.modelINFO['opt'].pop('dataset')
        self.modelINFO['raw_path'] = self.modelINFO['opt'].pop('dataroot')
        self.modelINFO['desc'] = self.modelINFO['opt'].pop('desc')

        # 3. 将训练结果字典保存到json文件中
        ## 默认保存路径./output/modelsData/models.json
        # filename = './output/modelsData/models.json'
        # data = {}
        # with open(filename,'r',encoding='utf-8') as f:
        #     try:
        #         data = json.load(f)
        #     except json.decoder.JSONDecodeError: # 此处源文件没有数据,即尚未有模型被训练
        #         data = {}
        # with open(filename, 'w', encoding="utf-8") as f:
        #     data[self.modelINFO['modelName']] = self.modelINFO
        #     json.dump(data,f,sort_keys=True,indent=2)


        # 3. 将训练结果字典通过信号传递给主函数
        signal.emit(copy.deepcopy(self.modelINFO))
Exemple #4
0
    def train(self):
        """ Training
        """
        self.textEdit.append('开始训练...')
        dataset = 'cus_mnist_2'
        # dataroot = './data/cus_mnist'
        dataroot = 'E:\ProjectSet\Pycharm\WAIBAO\cus_mnist2'
        opt = Options().parse(dataset,dataroot)
        opt.signal = self.Signal_TrainFinished
        opt.load_weights = False
        dataloader = load_data(opt)
        print(opt)

        # LOAD MODEL
        opt.showProcess = self.progressBar
        opt.showText = self.textEdit
        model = Ganomaly(opt, dataloader)
        model.train()
Exemple #5
0
def train():
    """ Training
    """

    ##
    # ARGUMENTS
    opt = Options().parse()
    ##
    # LOAD DATA
    dataloader = load_data(opt)
    ##
    # LOAD MODEL
    model = Ganomaly(opt, dataloader)
    ##
    if opt.phase == 'train':
        # TRAIN MODEL
        model.train()
    elif opt.phase =='test':
        performance=model.test()
        print(performance)
Exemple #6
0
from lib.data_preprocess_KDD import load_data_kdd
from lib.data_preprocess_arr import load_data_arr
from lib.data_preprocess_ucr import load_data_ucr

##
# def main():
""" Training
"""

##
# ARGUMENTS
opt = Options().parse()
print(opt.anomaly_class)
##
# LOAD DATA
# dataloader = load_data(opt)
# dataloader = load_data_kdd(opt)
# dataloader = load_data_arr(opt)
dataloader = load_data_ucr(opt)

##
# LOAD MODEL
model = Ganomaly(opt, dataloader)

##
# TRAIN MODEL
model.train()

# if __name__ == '__main__':
#     main()
Exemple #7
0
def train():
    """ Training
    """

    ##
    # ARGUMENTS
    opt = Options().parse()

    ##
    # LOAD DATA
    dataloader = load_data(opt)
    ##
    # LOAD MODEL
    model = Ganomaly(opt, dataloader)
    ##
    # TRAIN MODEL
    model.train()

    train_1 = model.train_final()
    train_1 = train_1.cpu().numpy()

    test_1, y_true, y_true_original, auroc_value, auprc_value = model.test_final(
    )
    test_1 = test_1.cpu().numpy()
    y_true = y_true.cpu().numpy()
    y_true_original = y_true_original.cpu().numpy()

    test_path = os.path.join(opt.outf, opt.dataset, 'test', 'OCSVM',
                             'abnormal' + str(opt.abnormal_class),
                             'seed' + str(opt.manualseed))
    if not os.path.isdir(test_path):
        os.makedirs(test_path)

    print("GANomaly AUROC: {}".format(auroc_value))
    np.save(test_path + '/ganomaly_aucroc.npy', auroc_value)

    for i in range(len(y_true)):
        if y_true[i] == 1:
            y_true[i] = 0
        else:
            y_true[i] = 1

    ################################

    cf = svm.OneClassSVM(gamma='scale', nu=0.1)
    train_ind = np.random.choice(train_1.shape[0], 10000, replace=False)
    cf.fit(train_1[train_ind, :])
    y_scores = cf.score_samples(test_1)
    y_scores = (y_scores - min(y_scores)) / (max(y_scores) - min(y_scores))

    auroc = metrics.roc_auc_score(y_true, y_scores)
    print("HybridGAN AUROC: {}".format(auroc))
    np.save(test_path + '/svm_aucroc1.npy', auroc)
    np.save(test_path + '/svm_aucroc1_transduct_' + str(0) + '.npy', auroc)

    bandwidth = get_bandwidth(y_scores, test_1)

    for trans_iter in np.arange(0, 30, 1):

        optimal_threshold = find_optimal_threshold(y_scores=y_scores,
                                                   train_1=train_1,
                                                   test_1=test_1,
                                                   y_true=y_true,
                                                   train_ind=train_ind,
                                                   test_path=test_path,
                                                   bandwidth=bandwidth)
        abn_idx = np.where(
            y_scores < np.percentile(y_scores, optimal_threshold))
        abn_tst_latent = test_1[abn_idx]
        kmeans = KMeans(n_clusters=1, random_state=0).fit(abn_tst_latent)
        train_1 = np.concatenate((train_1, kmeans.transform(train_1)), axis=1)
        test_1 = np.concatenate((test_1, kmeans.transform(test_1)), axis=1)
        cf = svm.OneClassSVM(gamma='scale', nu=0.1)
        cf.fit(train_1[train_ind, :])
        y_scores = cf.score_samples(test_1)
        y_scores = (y_scores - min(y_scores)) / (max(y_scores) - min(y_scores))
        auroc = metrics.roc_auc_score(y_true, y_scores)
        print("TransdeepOCSVM AUROC after {} iterations: {}".format(
            trans_iter + 1, auroc))
        print("Optimal_threshold after {} iterations: {}".format(
            trans_iter + 1, optimal_threshold[0]))
        np.save(
            test_path + '/svm_aucroc1_transduct_' + str(trans_iter + 1) +
            '.npy', auroc)
        np.save(
            test_path + '/optimal_threshold_' + str(trans_iter + 1) + '.npy',
            optimal_threshold)