Ejemplo n.º 1
0
def run():
    global model
    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')

    # Create dataset
    template_dataset = TemplateDataset(config)
    training_loader, validation_loader, test_loader = template_dataset.get_loaders(
    )

    # Create the neural network
    model = NN(net, optimizer, loss_function, lr_scheduler, metric, device,
               config).to(device)

    # Create the data handler
    data_handler = DataHandler(training_loader, validation_loader, test_loader)

    for epoch in range(config['epochs']):
        # Training
        model.train()
        for i, data in enumerate(training_loader, 0):
            x, y = data
            x, y = x.to(device), y.to(device)
            y_hat = model(x)
            loss = model.backpropagate(y_hat, y)
            result = model.evaluate(y_hat, y)
            data_handler.train_loss.append(loss)
            data_handler.train_metric.append(result)

        with torch.no_grad():
            model.eval()
            # Validating
            if validation_loader is not None:
                for i, data in enumerate(validation_loader, 0):
                    x, y = data
                    x, y = x.to(device), y.to(device)
                    y_hat = model(x)
                    _, loss = model.calculate_loss(y_hat, y)
                    result = model.evaluate(y_hat, y)
                    data_handler.valid_loss.append(loss)
                    data_handler.valid_metric.append(result)

            # Testing
            if test_loader is not None:
                for i, data in enumerate(test_loader, 0):
                    x, y = data
                    x, y = x.to(device), y.to(device)
                    y_hat = model(x)
                    _, loss = model.calculate_loss(y_hat, y)
                    result = model.evaluate(y_hat, y)
                    data_handler.test_loss.append(loss)
                    data_handler.test_metric.append(result)

        model.lr_scheduler_step()
        data_handler.epoch_end(epoch, model.get_lr())
    data_handler.plot(loss=config['plot']['loss'],
                      metric=config['plot']['metric'])
Ejemplo n.º 2
0
        float(string[1]) / 10.0,
        float(string[2]) / 10.0,
        float(string[3]) / 10.0
    ]]
    if string[4].rstrip() == "Iris-setosa":
        x += [[1]]
    else:
        x += [[0]]
    testSet += [x]

trainingFile.close()
testFile.close()

#TRAINING
nn.backPropagation(trainingSet, n, 4, nhidden, 1)

#TESTING
p = 0
for x, t in testSet:

    O = nn.evaluate(x)

    if ((1 - O[0]) < 0.1):
        if (t[0] == 1):
            p += 1
    else:
        if (t[0] == 0):
            p += 1

print(str(p) + " out of " + str(len(testSet)) + " tests")
Ejemplo n.º 3
0
			nn.saveWeights(sys.argv[5])

else:
	print("Usage:\n   python Area.py training_file learning_rate number_hidden_neurons test_file \n   python Area.py weight_file test_file")
	quit()


#TESTING
p = 0
xr = []
yr = []
xc = []
yc = []

for x,t in testSet:
	O = nn.evaluate(x)
	if O[0] > 0.5:
		if t[0] == 1: ##THRESHOLD 0.5
			p += 1
		xr += [x[0]]
		yr += [x[1]]
	elif O[0] <= 0.5 :
		if t[0] == 0:				
			p += 1
		xc += [x[0]]
		yc += [x[1]]
	
print(p)

#PLOT
# f1, p2 = plt.subplots()
Ejemplo n.º 4
0
from nn import NN

nn = NN()

train = [[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1], [0]]]

for x in train:
    print(x)

nn.backPropagation(train, 0.5, 2, 3, 1)

print(nn.evaluate([0, 0]))
print(nn.evaluate([0, 1]))
print(nn.evaluate([1, 0]))
print(nn.evaluate([1, 1]))
Ejemplo n.º 5
0
from nn import NN

nn = NN()

train = [[[0, 0], [0]], [[0, 1], [1]], [[1, 0], [1]], [[1, 1], [0]]]

for x in train:
    print(x)

nn.backPropagation(train, 0.5, 2, 3, 1)

print(nn.evaluate([0, 0]))
print(nn.evaluate([0, 1]))
print(nn.evaluate([1, 0]))
print(nn.evaluate([1, 1]))