def batch_centerloss_confusionmatrix(imagepath="images", netpath):
    #netpath="/home/g206/work_sdf/caffe1225/caffe/analysisfeatures/dvns/mnist"
    # netpath="./dvns/mnist/"
    modelist = [
        "mnistorig_iter_10000.caffemodel",
        "orimnistcaffe_alexnet_train_iter_400000.caffemodel",
        "lenet_dvn__iter_500000.caffemodel",
        "lenet_oridata__iter_500000.caffemodel",
        "orimnitbvlc_googlenet_iter_400000.caffemodel",
        "mnistgenerate_bvlc_googlenet_iter_480000.caffemodel",
    ]
    deploylist = [
        "alex_deploy.prototxt",
        "alex_deploy.prototxt",
        "lenet.prototxt",
        "lenet.prototxt",
        "google_deploy.prototxt",
        "google_deploy.prototxt",
    ]
    shapelists = [[10, 3, 227, 227], [10, 3, 227, 227], [64, 1, 28, 28],
                  [64, 1, 28, 28], [10, 3, 224, 224], [10, 3, 224, 224]]
    netlayerlist = [
        "fc8", "fc8", "ip2", "ip2", "loss3/classifier", "loss3/classifier"
    ]
    outnamelist = [
        "mnistlenetdvn.mat", "mnistlenetori.mat", "mnistalexdvn.mat",
        "mnistalexori.mat", "mnistgoogledvn.mat", "mnistgoogleori.mat"
    ]
    meanlist = [
        "patchmnistimages_256_mean.binaryproto",
        "patchmnistimages_256_mean.binaryproto",
        "patchmnistimages_256_mean.binaryproto",
        "patchmnistimages_256_mean.binaryproto",
        "patchmnistimages_256_mean.binaryproto",
        "patchmnistimages_256_mean.binaryproto"
    ]

    from sklearn.metrics import confusion_matrix
    y_true = []
    #networklist=["lenet","alext","google"]
    #net,transformer=load_models(modelpath = "./mninstmodel/lenet_iter_10000.caffemodel",deploypath = "./mnistmodel/lenet_deploy.prototxt",meanfacepath ="",shapelist=[64,1,28,28])
    #  net,transformer=load_models(modelpath = "/home/g206/work_sdf/caffe1225/caffe/analysisfeatures/dvns/mnist/orimnistcaffe_alexnet_train_iter_400000.caffemodel",deploypath = "/home/g206/work_sdf/caffe1225/caffe/analysisfeatures/dvns/mnist/alex_deploy.prototxt",meanfacepath ="",shapelist=shapelist)
    #orimnitbvlc_googlenet_iter_400000.caffemodel
    #  net,transformer=load_models(modelpath = "/home/g206/work_sdf/caffe1225/caffe/analysisfeatures/dvns
    imglist = GetFileList(imagepath, [])
    i = 0
    #  /mnist/mnistgenerate_bvlc_googlenet_iter_480000.caffemodel",deploypath = "/home/g206/work_sdf/caffe1225/caffe/analysisfeatures/dvns/mnist/alex_deploy.prototxt",meanfacepath ="",shapelist=shapelist)
    from tsne.tsne_1 import tsnepng
    from mnist_single_plot_roc import drawroc
    import traceback
    for i in range(0, len(modelist)):
        # for i in range(0, 2):
        try:
            pred = []
            prediction = []
            y_true = []
            net, transformer = load_models(modelpath=netpath + modelist[i],
                                           deploypath=netpath + deploylist[i],
                                           meanfacepath=netpath + meanlist[i],
                                           shapelist=shapelists[i])
            y_true = mklabel(imglist, "mnist_test")
            print "generating feature for model {0}....".format(modelist[i])
            pred, prediction = generatefeature(imglist, net, transformer,
                                               y_true, shapelists[i],
                                               netlayerlist[i], outnamelist[i])
            print len(y_true), len(pred)
            print "roc" + outnamelist[i].split(".mat")[0] + ".png"
            drawroc(y_true, prediction,
                    "roc" + outnamelist[i].split(".mat")[0] + ".png")
            #tsnepng(prediction,y_true,"tsne_"+outnamelist[i].split(".mat")[0]+".png")
            import pickle
            with open(outnamelist[i].split(".")[0] + "pred.pkl", 'wb') as f:
                pickle.dump(pred, f)
            with open(outnamelist[i].split(".")[0] + "true.pkl", 'wb') as f:
                pickle.dump(y_true, f)
            y_true = np.asarray(y_true, dtype=int)
            pred = np.asarray(pred, dtype=int)
            print pred
            print y_true
            cm = confusion_matrix(pred, y_true)
            print cm
            with open(outnamelist[i].split(".")[0] + "_cm.pkl", "wb") as f:
                pickle.dump(cm, f)
                print cm
                np.savetxt(outnamelist[i].split(".")[0] + "_cm" + ".csv",
                           cm,
                           delimiter=",")
        except Exception as e:
            print e
            traceback.print_exc()
            continue
Exemplo n.º 2
0
        mat.append(featline)
        label = image.split("/")[-2]
        # labels.append(str(lines[nn][1]))
        labels.append(int(label))
        #  print "===>>",out['prob'].argmax()
        #  print "=====>>",lines[nn][1]
        if (nn % 100 == 0):
            with open("./error/png_oversmapling_result/" + outdir, 'w') as f:
                scipy.io.savemat(f, {'data': mat, 'labels': labels})  # append
        nn += 1

    # print pred.shape
    # tsnepng(mat,labels,"gootsne_"+outdir)
    print "tsnepng=========================>>>>"

    drawroc(labels, predroc, "./error/png_oversmapling_result/" + "zoomroc_10" + outdir.split('.')[0] + ".png")
    print "roc=========================>>>>"
    print(classification_report(labels, pred))
    text_file = open("./error/png_oversmapling_result/" + outdir.split('.')[0] + ".txt", "w")
    text_file.write(outdir.split('.')[0] + " model\n")
    text_file.write(classification_report(labels, pred))
    import pickle
    with open("./error/png_oversmapling_result/" + outdir.split('.')[0] + "_pred.pkl", "wb") as f:
        pickle.dump(mat, f)
    with open("./error/png_oversmapling_result/" + outdir.split('.')[0] + "_true.pkl", "wb") as f:
        pickle.dump(labels, f)
    with open("./error/png_oversmapling_result/" + outdir, 'w') as f:
        scipy.io.savemat(f, {'data': mat, 'labels': labels})  # append
    cm = confusion_matrix(pred, labels)
    with open("./error/png_oversmapling_result/" + outdir.split(".")[0] + ".pkl", "wb") as f:
        pickle.dump(cm, f)
Exemplo n.º 3
0
        label = image.split("/")[-2]
        # labels.append(str(lines[nn][1]))
        labels.append(int(label))
        #  print "===>>",out['prob'].argmax()
        #  print "=====>>",lines[nn][1]
        if (nn % 100 == 0):
            with open("./error/mnist_result/" + outdir, 'w') as f:
                scipy.io.savemat(f, {'data': mat, 'labels': labels})  #append
        nn += 1

    # print pred.shape
    # tsnepng(mat,labels,"gootsne_"+outdir)
    print "tsnepng=========================>>>>"

    drawroc(
        labels, predroc,
        "./error/mnist_result/" + "zoomroc_10" + outdir.split('.')[0] + ".png")
    print "roc=========================>>>>"
    print(classification_report(labels, pred))
    text_file = open("./error/mnist_result/" + outdir.split('.')[0] + ".txt",
                     "w")
    text_file.write(outdir.split('.')[0] + " model\n")
    text_file.write(classification_report(labels, pred))
    import pickle
    with open("./error/mnist_result/" + outdir.split('.')[0] + "_pred.pkl",
              "wb") as f:
        pickle.dump(mat, f)
    with open("./error/mnist_result/" + outdir.split('.')[0] + "_true.pkl",
              "wb") as f:
        pickle.dump(labels, f)
    with open("./error/mnist_result/" + outdir, 'w') as f: