batch_mem = tovar(np.array(batch_mem).transpose(0,2,1).reshape(BS,1,FEATURES+CLASSES,NTRAIN))
	batch_test = tovar(np.array(batch_test).transpose(0,2,1).reshape(BS,1,FEATURES,100))
	batch_label = tovar(np.array(batch_label).transpose(0,2,1))
	class_count = torch.cuda.FloatTensor(np.array(class_count))
	
	net.zero_grad()
	p = net.forward(batch_mem, batch_test, class_count)
	loss = -torch.sum(p*batch_label,1).mean()
	loss.backward()
	net.adam.step()
	err = loss.cpu().data.numpy()[0]
	
	return err
		
net = ClassifierGenerator(FEATURES=2, CLASSES=4, NETSIZE=384).cuda()

difficulty_level = 1.0
errs = []

err = 0
err_count = 0

for i in range(40000):	
	err += trainingStep(net, 400, min_difficulty = difficulty_level, max_difficulty = difficulty_level, feature_variation = False, class_variation = False)
	err_count += 1
			
	if err_count >= 50:
		err = err/err_count
		errs.append(err)
		
    plt.imshow(im, extent=[-3, 3, 3, -3])
    for j in range(4):
        plt.scatter(x[0, 0, 0, yl == j],
                    x[0, 0, 1, yl == j],
                    c=colors[j],
                    edgecolors='k',
                    lw=1,
                    s=10)

    plt.xticks([])
    plt.yticks([])
    plt.xlim(-3, 3)
    plt.ylim(-3, 3)


net2_4_400_1 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_400_1.load_state_dict(
    torch.load("models/classifier-generator-2-4-base.pth"))

net2_4_20_1 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_20_1.load_state_dict(
    torch.load("models/classifier-generator-2-4-N20.pth"))

net2_4_100_1 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_100_1.load_state_dict(
    torch.load("models/classifier-generator-2-4-N100.pth"))

net2_4_100_4 = ClassifierGenerator(2, 4, 384).cuda()
net2_4_100_4.load_state_dict(
    torch.load("models/classifier-generator-2-4-diff4.pth"))
    return err


data_names = []
data_x = []
data_y = []

for file in glob.glob("data/*.npz"):
    data = np.load(file)
    if np.unique(data['y']).shape[0] <= 16:
        data_names.append(file[5:-4])
        data_x.append(data['x'].copy())
        data_y.append(data['y'].copy().astype(np.int32))

for didx in range(len(data_names)):
    net = ClassifierGenerator(FEATURES=128, CLASSES=16, NETSIZE=384).cuda()
    net.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))

    tdx = []
    tdy = []

    for didx2 in range(len(data_names)):
        if didx2 != didx:
            if data_x[didx2].shape[0] >= 120:
                tdx.append(data_x[didx2])
                tdy.append(data_y[didx2])

    for i in range(20):
        err = trainingStep(net, 100, 20, tdx, tdy)
        f = open("training_curves/finetuning-%s.txt" % data_names[didx], "a")
        f.write("%d %.6g\n" % (i, err))
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import roc_auc_score
import xgboost as xgb

import warnings

def fxn():
    warnings.warn("deprecated", DeprecationWarning)
        
with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    fxn()

net128_16 = ClassifierGenerator(128, 16, NETSIZE=384).cuda()
net128_16.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))
net32_16 = ClassifierGenerator(32, 16, NETSIZE=384).cuda()
net32_16.load_state_dict(torch.load("models/classifier-generator-32-16.pth"))

dataset_descriptions = {
	"data/immunotherapy.npz": "Immunotherapy\\cite{khozeimeh2017expert, khozeimeh2017intralesional}",
	"data/foresttype.npz": "Forest type\\cite{johnson2012using}",
	"data/winetype.npz" : "Wine type\\cite{forina1990parvus}",
	"data/cryotherapy.npz" : "Cryotherapy\\cite{khozeimeh2017expert, khozeimeh2017intralesional}",
	"data/chronic-kidney.npz" : "Chronic kidney\\cite{chronickidney}",
	"data/echocardiogram.npz" : "Echocardiogram\\cite{echocardiogram}",
	"data/haberman.npz" : "Haberman\\cite{haberman1976generalized}",
	"data/iris.npz" : "Iris\\cite{fisher1936use}",
	"data/hcc-survival.npz" : "HCC Survival\\cite{santos2015new}",
	"data/horse-colic.npz" : "Horse Colic\\cite{horsecolic}",
    net.zero_grad()
    p = net.forward(batch_mem, batch_test, class_count)
    loss = -torch.sum(p * batch_label, 1).mean()
    loss.backward()
    net.adam.step()
    err = loss.cpu().data.numpy()[0]

    return err


# Echocardiogram, blood transfusion, autism
echocardio = np.load("data/echocardiogram.npz")
bloodtransfusion = np.load("data/bloodtransfusion.npz")
autism = np.load("data/autism.npz")

net = ClassifierGenerator(FEATURES=128, CLASSES=16, NETSIZE=384).cuda()

difficulty_level = 0.0125
errs = []

err = 0
err_count = 0

for i in range(100000):
    err += trainingStep(net,
                        100,
                        min_difficulty=difficulty_level * 0.5,
                        max_difficulty=difficulty_level * 1.5)
    err_count += 1

    if i % 10000 == 5000:
Example #6
0
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import roc_auc_score
import xgboost as xgb

import warnings


def fxn():
    warnings.warn("deprecated", DeprecationWarning)


with warnings.catch_warnings():
    warnings.simplefilter("ignore")
    fxn()

net32_16 = ClassifierGenerator(32, 16, 384).cuda()
net32_16.load_state_dict(torch.load("models/classifier-generator-32-16.pth"))

net128_16 = ClassifierGenerator(128, 16, 384).cuda()
net128_16.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))

methods = [
    lambda: SVC(kernel='linear', C=1, probability=True),
    lambda: SVC(kernel='rbf', C=1, probability=True), RandomForestClassifier,
    lambda: xgb.XGBClassifier(n_jobs=64), KNeighborsClassifier,
    lambda: NetworkSKL(net32_16, ensemble=30),
    lambda: NetworkSKL(net128_16, ensemble=30)
]

# Sweep Feat
f = open("results/Feat-128-16-100.txt", "wb")
Example #7
0
        alltrain_x.append(train_x)
        alltrain_y.append(train_y)
        alltest_x.append(test_x)
        alltest_y.append(test_y)

    t0 = time.time()
    results = [
        evalClassifier(m, alltrain_x, alltrain_y, alltest_x, alltest_y)
        for m in methods
    ]
    t1 = time.time()

    return t1 - t0


net = ClassifierGenerator(128, 16, 384).cpu()
net.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))
netskl = NetworkSKL(net)

methods = [lambda: SVC(kernel='linear', probability=True), \
 lambda: SVC(kernel='rbf', probability=True), \
 RandomForestClassifier, \
 lambda: xgb.XGBClassifier(n_jobs = 12), \
 KNeighborsClassifier, \
 lambda: NetworkSKL(net,cuda=False), \
 lambda: NetworkSKL(net,cuda=True)]

values = [[100, 400], [200, 400], [400, 400], [100, 800], [100, 1600],
          [100, 3200], [100, 6400]]

f = open("results/timings.tex", "wb")
Example #8
0
    batch_test = tovar(
        np.array(batch_test).transpose(0, 2, 1).reshape(BS, 1, FEATURES, 100))
    batch_label = tovar(np.array(batch_label).transpose(0, 2, 1))
    class_count = torch.cuda.FloatTensor(np.array(class_count))

    net.zero_grad()
    p = net.forward(batch_mem, batch_test, class_count)
    loss = -torch.sum(p * batch_label, 1).mean()
    loss.backward()
    net.adam.step()
    err = loss.cpu().data.numpy()[0]

    return err


net = ClassifierGenerator(FEATURES=2, CLASSES=4, NETSIZE=384).cuda()

difficulty_level = 1.0
errs = []

err = 0
err_count = 0

for i in range(40000):
    err += trainingStep(net,
                        100,
                        min_difficulty=difficulty_level,
                        max_difficulty=difficulty_level,
                        feature_variation=False,
                        class_variation=False)
    err_count += 1