Exemplo n.º 1
0
def train(do_k_fold, out_dir, log_f):
    if do_k_fold:
        utils.print_out("# do k_fold k=%d" % k_fold, log_f)
        k_fold_val = 0
        k_fold_tra = 0
        [k_TP, k_TN, k_FP, k_FN, k_SE, k_SP, k_MCC, k_ACC] = [0, 0, 0, 0, 0, 0, 0, 0]
        for i in range(k_fold):
            trX_=[]
            trY_=[]
            for j in range(k_fold):
                if j == i: continue
                trX_.append(k_fold_X[j])
                trY_.append(k_fold_Y[j])
            trX_ = np.concatenate(trX_)
            trY_ = np.concatenate(trY_)
            utils.print_out("#k_fold %d" % i, log_f)
            utils.print_out("#do DBN ...", log_f)
            dbn = DBN()
            dbn.train(trX_)
            utils.print_out("#end DBN", log_f)
            utils.print_out("#do caps ...", log_f)
            capsNet = CapsNet(is_training=True, dbn=dbn)

            i_k_fold_val, i_k_fold_tra = capsNet.train(trX_, trY_, k_fold_X[i], k_fold_Y[i], None, log_f)
            TP, TN, FP, FN, SE, SP, MCC, ACC = eva(capsNet, k_fold_X[i], k_fold_Y[i])
            print(i,", TP:", TP)
            print(i,", TN:", TN)
            print(i,", FP:", FP)
            print(i,", FN:", FN)
            print(i,", SE:", SE)
            print(i,", SP:", SP)
            print(i,", MCC:", MCC)
            print(i,", ACC: ", ACC)
            k_TP += TP
            k_TN += TN
            k_FP += FP
            k_FN += FN
            k_SE += SE
            k_SP += SP
            k_MCC += MCC
            k_ACC += ACC

        print("TP :", k_TP / 5)
        print("TN :", k_TN / 5)
        print("FP :", k_FP / 5)
        print("FN :", k_FN / 5)
        print("SE :", k_SE / 5)
        print("SP :", k_SP / 5)
        print("MCC: ", k_MCC / 5)
        print("ACC: ", k_ACC / 5)
    else:
        utils.print_out("#do DBN ...", log_f)
        dbn = DBN()
        dbn.train(trX)
        utils.print_out("#end DBN", log_f)
        utils.print_out("#do caps ...", log_f)
        utils.print_out("#test instead val set for test ...", log_f)
        capsNet = CapsNet(is_training=isTraining, dbn=dbn)
        if isTraining:
            i_k_fold_val, i_k_fold_tra = capsNet.train(trX, trY, teX, teY, "./board", log_f)
            utils.print_out("#end caps", log_f)

            tr_TP, tr_TN, tr_FP, tr_FN, tr_SE, tr_SP, tr_MCC, tr_ACC = eva(capsNet, trX, trY)
            val_TP, val_TN, val_FP, val_FN, val_SE, val_SP, val_MCC, val_ACC = eva(capsNet, vaX, vaY)
            te_P, te_TN, te_FP, te_FN, te_SE, te_SP, te_MCC,te_ACC = eva(capsNet, teX, teY)
            utils.print_out('train : TP:%.3f;   TN:%.3f;      FP:%.3f;     FN:%.3f;  SE:%.3f  SP:%.3f   MCC:%.3f  P:%.3f' \
                            %(tr_TP, tr_TN, tr_FP, tr_FN, tr_SE, tr_SP, tr_MCC, tr_ACC), log_f)
            utils.print_out('val : TP:%.3f;   TN:%.3f;      FP:%.3f;      FN:%.3f;  SE:%.3f  SP:%.3f   MCC:%.3f P:%.3f' \
                            % (val_TP, val_TN, val_FP, val_FN, val_SE, val_SP, val_MCC, val_ACC), log_f)
            utils.print_out('test : TP:%.3f;   TN:%.3f;      FP:%.3f;      FN:%.3f;  SE:%.3f  SP:%.3f   MCC:%.3f P:%.3f' \
                            % (te_P, te_TN, te_FP, te_FN, te_SE, te_SP, te_MCC, te_ACC), log_f)

        else:
            import csv
            csvFile = open("./"+train_datadir+"/"+setFileNames[1], "r")
            reader = csv.reader(csvFile)  # 返回的是迭代类型
            data = []
            for item in reader:
                data.append(item[0])
            csvFile.close()
            data = data[1:]

            utils.print_out("#end caps", log_f)
            pre_Y= pre(capsNet, vaX).tolist()[0]
            import pandas as pd

            dataFrame = pd.DataFrame({ "0_name": data,"1_class": pre_Y})
            dataFrame.to_csv('./data_set/test_dir/180831-result.csv', index=False, sep=",")
                              dataset=dataset,
                              data_path='data/char74k_preprocessed')
if torch.cuda.is_available():
    device = torch.device('cuda')
else:
    device = torch.device('cpu')

capsNet = CapsNet(conv_in=3, num_classes=num_classes).to(device)
cseloss = torch.nn.CrossEntropyLoss()
adamOptimizer = torch.optim.Adam(params=capsNet.parameters())
best_val_acc = -1
best_val_epoch = 0
for epoch in tqdm(range(epochs)):
    epoch_loss = 0
    for batch_id, (data, target) in enumerate(dataProcessor.train_loader):
        capsNet.train()
        target = torch.eye(num_classes).index_select(dim=0, index=target)

        data = data.to(device)
        target = target.to(device)

        adamOptimizer.zero_grad()
        # output,reconstructions,masked = capsNet(data)
        output = capsNet(data)

        # print(output.shape)
        # loss = capsNet.loss(data,reconstructions,target,output)
        loss = cseloss(output, target.argmax(dim=-1))
        loss.backward()
        adamOptimizer.step()
        epoch_loss += loss