for j in range(4):
        plt.scatter(x[0, 0, 0, yl == j],
                    x[0, 0, 1, yl == j],
                    c=colors[j],
                    edgecolors='k',
                    lw=1,
                    s=10)

    plt.xticks([])
    plt.yticks([])
    plt.xlim(-3, 3)
    plt.ylim(-3, 3)


net2_4_400_1 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_400_1.load_state_dict(
    torch.load("models/classifier-generator-2-4-base.pth"))

net2_4_20_1 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_20_1.load_state_dict(
    torch.load("models/classifier-generator-2-4-N20.pth"))

net2_4_100_1 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_100_1.load_state_dict(
    torch.load("models/classifier-generator-2-4-N100.pth"))

net2_4_100_4 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_100_4.load_state_dict(
    torch.load("models/classifier-generator-2-4-diff4.pth"))

net2_4_gen = ClassifierGenerator(2, 4, 384).cuda()
net2_4_gen.load_state_dict(
コード例 #2
0

data_names = []
data_x = []
data_y = []

for file in glob.glob("data/*.npz"):
    data = np.load(file)
    if np.unique(data['y']).shape[0] <= 16:
        data_names.append(file[5:-4])
        data_x.append(data['x'].copy())
        data_y.append(data['y'].copy().astype(np.int32))

for didx in range(len(data_names)):
    net = ClassifierGenerator(FEATURES=128, CLASSES=16, NETSIZE=384).cuda()
    net.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))

    tdx = []
    tdy = []

    for didx2 in range(len(data_names)):
        if didx2 != didx:
            if data_x[didx2].shape[0] >= 120:
                tdx.append(data_x[didx2])
                tdy.append(data_y[didx2])

    for i in range(20):
        err = trainingStep(net, 100, 20, tdx, tdy)
        f = open("training_curves/finetuning-%s.txt" % data_names[didx], "a")
        f.write("%d %.6g\n" % (i, err))
        f.close()
コード例 #3
0
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import roc_auc_score
import xgboost as xgb

import warnings

def fxn():
    warnings.warn("deprecated", DeprecationWarning)
        
with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    fxn()

net128_16 = ClassifierGenerator(128, 16, NETSIZE=384).cuda()
net128_16.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))
net32_16 = ClassifierGenerator(32, 16, NETSIZE=384).cuda()
net32_16.load_state_dict(torch.load("models/classifier-generator-32-16.pth"))

dataset_descriptions = {
	"data/immunotherapy.npz": "Immunotherapy\\cite{khozeimeh2017expert, khozeimeh2017intralesional}",
	"data/foresttype.npz": "Forest type\\cite{johnson2012using}",
	"data/winetype.npz" : "Wine type\\cite{forina1990parvus}",
	"data/cryotherapy.npz" : "Cryotherapy\\cite{khozeimeh2017expert, khozeimeh2017intralesional}",
	"data/chronic-kidney.npz" : "Chronic kidney\\cite{chronickidney}",
	"data/echocardiogram.npz" : "Echocardiogram\\cite{echocardiogram}",
	"data/haberman.npz" : "Haberman\\cite{haberman1976generalized}",
	"data/iris.npz" : "Iris\\cite{fisher1936use}",
	"data/hcc-survival.npz" : "HCC Survival\\cite{santos2015new}",
	"data/horse-colic.npz" : "Horse Colic\\cite{horsecolic}",
	"data/lung-cancer.npz" : "Lung cancer\\cite{hong1991optimal}",