loss.backward()
	net.adam.step()
	err = loss.cpu().data.numpy()[0]
	
	return err
		
net = ClassifierGenerator(FEATURES=2, CLASSES=4, NETSIZE=384).cuda()

difficulty_level = 1.0
errs = []

err = 0
err_count = 0

for i in range(40000):	
	err += trainingStep(net, 400, min_difficulty = difficulty_level, max_difficulty = difficulty_level, feature_variation = False, class_variation = False)
	err_count += 1
			
	if err_count >= 50:
		err = err/err_count
		errs.append(err)
		
		f = open("training_curves/training2-base.txt","a")
		f.write("%d %.6g %.6g\n" % (i, err, difficulty_level))
		f.close()
		
		err = 0
		err_count = 0
		
		torch.save(net.state_dict(),open("models/classifier-generator-2-4-base.pth","wb"))
difficulty_level = 0.0125
errs = []

err = 0
err_count = 0

for i in range(100000):
    err += trainingStep(net,
                        100,
                        min_difficulty=difficulty_level * 0.5,
                        max_difficulty=difficulty_level * 1.5)
    err_count += 1

    if i % 10000 == 5000:
        torch.save(
            net.state_dict(),
            open("ckpt/classifier-generator-128-16-ckpt%d.pth" % i, "wb"))

    if err_count >= 50:
        err = err / err_count
        errs.append(err)

        methods = [lambda: NetworkSKL(net)]
        results1 = compareMethodsOnSet(methods,
                                       echocardio['x'],
                                       echocardio['y'].astype(np.int32),
                                       samples=200)
        auc1 = results1[0][1]
        results2 = compareMethodsOnSet(methods,
                                       bloodtransfusion['x'],
                                       bloodtransfusion['y'].astype(np.int32),
for file in glob.glob("data/*.npz"):
    data = np.load(file)
    if np.unique(data['y']).shape[0] <= 16:
        data_names.append(file[5:-4])
        data_x.append(data['x'].copy())
        data_y.append(data['y'].copy().astype(np.int32))

for didx in range(len(data_names)):
    net = ClassifierGenerator(FEATURES=128, CLASSES=16, NETSIZE=384).cuda()
    net.load_state_dict(torch.load("models/classifier-generator-128-16.pth"))

    tdx = []
    tdy = []

    for didx2 in range(len(data_names)):
        if didx2 != didx:
            if data_x[didx2].shape[0] >= 120:
                tdx.append(data_x[didx2])
                tdy.append(data_y[didx2])

    for i in range(20):
        err = trainingStep(net, 100, 20, tdx, tdy)
        f = open("training_curves/finetuning-%s.txt" % data_names[didx], "a")
        f.write("%d %.6g\n" % (i, err))
        f.close()

    torch.save(
        net.state_dict(),
        open("models/classifier-generator-128-16-%s.pth" % data_names[didx],
             "wb"))
errs = []

err = 0
err_count = 0

for i in range(40000):
	err += trainingStep(net, 20+np.random.randint(380), min_difficulty = 0.25, max_difficulty = 4.0, feature_variation = False, class_variation = False)
	err_count += 1
			
	if err_count >= 50:
		err = err/err_count
		errs.append(err)
		
		#methods = [lambda: NetworkSKL(net)]
		#results1 = compareMethodsOnSet(methods, echocardio['x'], echocardio['y'].astype(np.int32), samples=200)
		#auc1 = results1[0][1]
		#results2 = compareMethodsOnSet(methods, bloodtransfusion['x'], bloodtransfusion['y'].astype(np.int32), samples=200)
		#auc2 = results2[0][1]
		#results3 = compareMethodsOnSet(methods, autism['x'], autism['y'].astype(np.int32), samples=200)
		#auc3 = results3[0][1]
		
		f = open("training2-general.txt","a")
		f.write("%d %.6g\n" % (i, err))
		f.close()
		
		err = 0
		err_count = 0
		
		torch.save(net.state_dict(),open("classifier-generator-2-4-general.pth","wb"))