Esempio n. 1
0
def rodarAdaline(inputTreinamento, outputTreinamento):
    log.print(">> Adaline")
    adalines = [None]*5
    for e in range(5):
        log.print(f">> Treinamento {e + 1}")
        adaline = Adaline(log, len(inputTreinamento[0]))
        adaline.train(inputTreinamento, outputTreinamento, e + 1)
        adalines[e] = adaline
    testar(adalines, False)
def main(train=False):
    digit_arrays = {
        str(digit): [
            load_image(f"{BASE_PATH}{digit}_{index}.png")
            for index in range(HOW_MANY)
        ]
        for digit in range(DIGITS)
    }

    flat_arrays = {
        digit: list(map(image_to_bipolar_array, images))
        for digit, images in digit_arrays.items()
    }

    learning_rate = 0.0005
    network = Adaline(labels=list(digit_arrays.keys()),
                      learning_rate=learning_rate)

    if train:
        for label, sources in flat_arrays.items():
            for source in sources:
                network.add_sources(source, label)

        max_cycles = 200
        random_starting_weights = False
        weight_adjustment_tolerance = None
        square_error_tolerance = None

        print(f"Max cycles: {max_cycles}\n--------------------------")
        network.train(random_starting_weights=random_starting_weights,
                      max_cycles=max_cycles,
                      weight_adjustment_tolerance=weight_adjustment_tolerance,
                      square_error_tolerance=square_error_tolerance,
                      verbose=True)

        network.save_neurons('neurons.pkl')

    else:
        network.load_neurons('neurons.pkl')

    while True:
        # test_image = draw.get_character()
        test_image = load_image(input("image name:\n>>"))

        if test_image is None:
            break

        flat = image_to_bipolar_array(test_image)
        out = network.output(flat)

        for key, value in out.items():
            print(f"{key}: {value:.3f}")
Esempio n. 3
0
def main():
    sources_list = [[1, 1], [-1, 1], [1, -1], [-1, -1]]
    targets = [1, 1, 1, -1]

    square_errors = {}

    learning_rates = [1e-4, 1e-3, 1e-2, 1e-1, 2e-1, 3e-1, 0.35, 0.4]
    weight_adjustment_tolerance = None
    square_error_tolerance = None
    max_cycles = 50

    network = Adaline(activation_function=activation_function)
    for sources, target in zip(sources_list, targets):
        network.add_sources(sources, target)

    for learning_rate in learning_rates:
        network.learning_rate = learning_rate

        network.train(random_starting_weights=False,
                      max_cycles=max_cycles,
                      weight_adjustment_tolerance=weight_adjustment_tolerance,
                      square_error_tolerance=square_error_tolerance)

        print(
            f">>Learning rate: {learning_rate}\n\n"
            f"Final weights:\n"
            f"{[float(f'{weigth:.5f}') for weigth in network.neuron.weights]}\n"
            f"Final bias:\n"
            f"{network.neuron.bias:.5f}\n\n"
            f"Cycles: {network.cycles}\n"
            f"Final square error: {network.total_square_error_by_cycle[-1]:.5f}\n\n\n"
        )

        square_errors[learning_rate] = network.total_square_error_by_cycle

    curves = []
    for learning_rate, square_error in square_errors.items():
        curves.append(
            plt.plot(range(len(square_error)),
                     square_error,
                     '--',
                     linewidth=2,
                     label=str(learning_rate))[0])
    plt.ylim([-0.1, 4])
    plt.legend(handles=curves)
    plt.show()
Esempio n. 4
0
def adaline_implementation(targets_train, targets_test, patterns_train,
                           patterns_test, plot, d3):
    a = Adaline()
    max_epochs = int(input('Μέγιστος αριθμός εποχών: '))
    learning_rate = float(input('Ρυθμός εκμάθησης: '))
    min_mse = float(input('Ελάχιστο σφάλμα: '))
    weights = a.train(max_epochs, patterns_train,
                      targets_train, learning_rate, min_mse, plot, d3)
    # if plot == False:
    guesses = a.test(weights, patterns_test, targets_test)
    a.plot_accuracy(targets_test, guesses)
Esempio n. 5
0
### azuis ----> Versicolor (1)
cm_bright = ListedColormap(['#FF0000', '#0000FF'])
plt.scatter([sum(r) for index, r in df.iterrows()],
            x[:, 3],
            c=y,
            cmap=cm_bright)
plt.scatter(None, None, color='r', label='Versicolor')
plt.scatter(None, None, color='b', label='Setosa')
plt.legend()
plt.title('Visualizacao do Dataset (flores Iris)')
plt.savefig('train.png')

# Adaline com 4 entradas
adaline = Adaline(4)
# Treinamento
adaline.train(x, y)

## Test 1
A = [0.4329, -1.3719, 0.7022, -0.8535]  # Versicolor (1)
predict = adaline.predict(A)
print('## Teste 1')
print('Entrada: ', A)
print('Classe esperada: Versicolor (1)')
if predict == 1:
    print('Previsão: Versicolor (1)')
else:
    print('Previsão: Setosa (-1)')
#=> 1

## Test 2
B = [0.3024, 0.2286, 0.8630, 2.7909]  # Setosa (-1)
Esempio n. 6
0
secondGroupOutput = [-1] * nSamplesPerGroup

inputData = np.concatenate((firstGroupInput, secondGroupInput))
outputData = np.concatenate((firstGroupOutput, secondGroupOutput))

trainIndexes, testIndexes = separateIndexesByRatio(2 * nSamplesPerGroup,
                                                   trainSamplesRatio)
random.shuffle(trainIndexes)

# %% Initialize and Train Adaline

adaline = Adaline([0] * (adalineDimension), 0.1, lambda x: 1 if x >= 0 else -1)

xTrain = inputData[trainIndexes]
yTrain = outputData[trainIndexes]
adaline.train(xTrain, yTrain, tol, maxIterations)

# %% Test
xTest = inputData[testIndexes]
yTest = outputData[testIndexes]
testResult = adaline.test(xTest, yTest)
print(f"Mean Squared Error: {testResult}")

# %% Plot
adalineApproxYArr = adaline.evaluate(inputData)

weights = adaline.getWeights()


def hyperPlan(x):
    return -(weights[0] * x + weights[2]) / weights[1]
Esempio n. 7
0
# Embaralhar
x = np.arange(len(d))
np.random.shuffle(x)
X_new = X[x]
d_new = d[x]


#print(d)
#print(d_new)
X_base_de_treinamento = X_new[:155,:]
d_base_de_treinamento = d_new[:155,:]
X_base_de_testes = X_new[155:,:]
d_base_de_testes = d_new[155:,:]

p = Adaline(len(X_base_de_treinamento[0]), epochs=1000)
p.train(X_base_de_treinamento, d_base_de_treinamento)
p.printMatrizparaMatriz(X_base_de_testes,d_base_de_testes)
p.printValoresParaPlanilha()
#p.printWeights

#p.restartWeights
#plt.xlim(-1,3)
#plt.ylim(-1,3)
#for i in range(len(d)):
 #   if d[i] == 1:
 #       plt.plot(X[i, 0], X[i, 1], 'ro')
 #   else:
#       plt.plot(X[i, 0], X[i, 1], 'bo')
       
#f = lambda x: (p.weights[0]/p.weights[2]) - (p.weights[1]/p.weights[2] * x)
#vxH = list(range(-1,3))
    for learning_rate in [0.2, 0.02, 0.002, 0.0002, 0.00002, 0.000002]:
        print "Testing learning rate = %f" % learning_rate
        data_indices = [idx for idx in xrange(data_instances.shape[0])]
        # 10-fold cross validation
        fold_size = (data_instances.shape[0]) / 10
        total_performance = 0.0
        for holdout_fold_idx in xrange(5):
            # training_indices = data_indices - holdout_fold indices
            training_indices = np.array(
                np.setdiff1d(
                    data_indices,
                    data_indices[fold_size * holdout_fold_idx : \
                                 fold_size * holdout_fold_idx + fold_size]))
            # test_indices = holdout_fold indices
            test_indices = np.array([
                i for i in xrange(fold_size * holdout_fold_idx, fold_size *
                                  holdout_fold_idx + fold_size)
            ])

            model = Adaline(20.0, learning_rate)
            # Train the model
            model.train(data_instances[training_indices])
            # Test performance on test set
            predictions = model.predict(data_instances[test_indices, :-1])
            total_performance += \
                sum(predictions == data_instances[test_indices, -1]) / \
                float(test_indices.shape[0])
        print "Average overall classification rate: %f" % (total_performance /
                                                           10)
Esempio n. 9
0
for i in range(len(errors_d2)):
    ax2.plot(errors_d2[i], color=COLORS[i], label='eta = ' + str(etas[i]))
ax2.title.set_text('Learning Rate (Dataset 2)')

plt.legend()
plt.show()

# b. ADALINE
print '=== ADALINE ====='
# - DATASET 1
errors_d1, etas = [], [0.001, 0.003, 0.005, 0.007, 0.009]
for eta in etas:
    adal_d1 = Adaline(2)
    print '(D1) Training (eta =', eta, ')'
    errors_d1.append(adal_d1.train(x1, y1, eta))
print '(D1) FINISHED\n==='

# - DATASET 2
errors_d2, etas = [], [0.001, 0.003, 0.005, 0.007, 0.009]
for eta in etas:
    adal_d2 = Adaline(4)
    print '(D2) Training (eta =', eta, ')'
    errors_d2.append(adal_d2.train(x2, y2, eta))
print '(D2) FINISHED\n==='

# - plotting the learning rate for perceptron
fig = plt.figure()
ax1 = fig.add_subplot(121)
ax2 = fig.add_subplot(122)
plt.suptitle('Adaline')
def main():
    xs = [0.0, 0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0]
    ys = [2.26, 3.8, 4.43, 5.91, 6.18, 7.26, 8.15, 9.14, 10.87, 11.58, 12.55]

    a_regression, b_regression, correlation_coefficient, _, regression_standard_error = linregress(
        xs, ys)
    regression_equation = f"y={a_regression:.5f}*x+{b_regression:.5f}"
    ys_regression = [a_regression * x + b_regression for x in xs]

    determination_coefficient = correlation_coefficient**2

    print(f"Regression equation: {regression_equation}")
    print(f"r = {correlation_coefficient:.5f}")
    print(f"r² = {determination_coefficient:.5f}")
    print(f"σ = {regression_standard_error:.5f}\n")

    learning_rate = 0.0015
    list_max_cycles = [100, 200, 500, 1000]
    random_starting_weights = False
    weight_adjustment_tolerance = None
    square_error_tolerance = None

    network = Adaline(learning_rate=learning_rate)
    for source, target in zip(xs, ys):
        network.add_sources([source], target)

    adaline_plots = []

    for max_cycles in list_max_cycles:
        print(f"Max cycles: {max_cycles}\n--------------------------")
        network.train(random_starting_weights=random_starting_weights,
                      max_cycles=max_cycles,
                      weight_adjustment_tolerance=weight_adjustment_tolerance,
                      square_error_tolerance=square_error_tolerance,
                      verbose=False)

        a_adaline, b_adaline = network.neuron.weights[0], network.neuron.bias
        adaline_equation = f"y={a_adaline:.5f}*x+{b_adaline:.5f}"
        ys_adaline = [a_adaline * x + b_adaline for x in xs]

        total_square_error = sum([(y - y_line)**2
                                  for y, y_line in zip(ys, ys_adaline)])

        adaline_standard_error = (total_square_error / len(ys))**0.5

        print(f"Adaline equation: {adaline_equation}\n")

        print(
            f"Difference for a coefficient: {abs(a_adaline - a_regression):.5f}"
        )
        print(
            f"Difference for b coefficient: {abs(b_adaline - b_regression):.5f}"
        )
        print(f"σ = {adaline_standard_error}\n-----------------------\n")

        adaline_plots.append(
            plt.plot(xs,
                     ys_adaline,
                     linestyle='--',
                     linewidth=3,
                     label=f"Cycles: {max_cycles}",
                     zorder=1)[0])

    regression_plot, = plt.plot(xs,
                                ys_regression,
                                color='blue',
                                linestyle='-',
                                linewidth=5,
                                label=f"Regression: {regression_equation}",
                                zorder=0)

    scatter_plot = plt.scatter(xs,
                               ys,
                               color='black',
                               marker='x',
                               s=80,
                               label='Source points',
                               zorder=2)
    plt.legend(handles=[scatter_plot, *adaline_plots, regression_plot])

    plt.show()
Esempio n. 11
0
import numpy as np
from adaline import Adaline

x_train = np.random.randn(1000, 2)
x_test = np.random.randn(100, 2)
w = np.array([2, 16])
b = 18
y_train = np.dot(x_train, w) + b
y_test = np.dot(x_test, w) + b

adaline = Adaline(x_train.shape[1], 1e-3)

epochs = 10
batch_size = 10
for epoch in range(epochs):
    for batch_idx in range(int(np.ceil(x_train.shape[0] / batch_size))):
        batch_start_idx = batch_idx * batch_size
        batch_end_idx = batch_start_idx + batch_size
        if batch_end_idx > x_train.shape[0]:
            batch_end_idx = x_train.shape[0]

        adaline.train(x_train[batch_start_idx:batch_end_idx],
                      y_train[batch_start_idx:batch_end_idx])

print("weights:", adaline.weights)
print("bias:", adaline.bias)
print("root mean squared error on training set:",
      adaline.root_mean_squared_error(x_train, y_train))
print("root mean squared error on testing set:",
      adaline.root_mean_squared_error(x_test, y_test))
Esempio n. 12
0
# Separando testDataset em valores e resultados
testInputs = testDataset[:, 0:(len(testDataset[0])-1)]
testOutputs = testDataset[:, (len(testDataset[0])-1):]

# Criando Adaline
#a = Adaline(len(trainInputs[0]), epochs=1000,learning_rate=0.0025, precision=0.000001)
a = Adaline(len(trainInputs[0]), epochs=1000,learning_rate=0.0025, precision=0.000001)

# Salvando Pesos Anteriores
oldWeights = ';'.join(['%.8f' % num for num in a.weights])

# Treinando Adaline
<<<<<<< HEAD
oldWeights = a.weights
qntEpochs, erro = a.train(trainInputs, trainOutputs)
newWeights = a.weights
=======
qntEpochs = a.train(trainInputs, trainOutputs)

# Salvando Pesos Novos
newWeights = ';'.join(['%.8f' % num for num in a.weights])

# Printando Pesos Antigos e Novos, e Quantidade de epocas utilizadas
>>>>>>> 51909512688e130709dd516ea800287b75f958d5
print(f'Adaline - Pesos Anterior: {oldWeights}')
print(f'Adaline - Pesos Atuais: {newWeights}')
print(f'Adaline - Quantidade Epochs utilizadas: {qntEpochs}')
print('')

# Prevendo valores
Esempio n. 13
0
treino['d'] = treino['d'].astype('int')

teste = pd.read_csv('dados/dataset-teste.csv')

Y = treino['d'].values.copy()
X = treino[['x1', 'x2', 'x3', 'x4']].values.copy()

Resultado = teste.copy()
Treinamentos = pd.DataFrame()
hist_EQM = []
for i in range(5):

    activation_function = Adaline.sign_function
    adaline = Adaline(X, Y, 0.0025, activation_function)
    wi, wf, ep, EQM = adaline.train()
    hist_EQM.append(EQM)
    wi = [round(x, 4) for x in wi]
    wf = [round(x, 4) for x in wf]
    w = np.concatenate(([wi], [wf], [[ep]]), axis=1)

    Treino = pd.DataFrame(data=w,
                          columns=[
                              'Wi0', 'Wi1', 'Wi2', 'Wi3', 'Wi4', 'Wf0', 'Wf1',
                              'Wf2', 'Wf3', 'Wf4', 'N_Epocas'
                          ],
                          index=[f'T{i+1}'])
    Treinamentos = pd.concat([Treinamentos, Treino])

    resultado_parcial = pd.DataFrame(columns=[f'T{i+1}'])
    respostas = []
Esempio n. 14
0
lenTrain = int(0.8 * len(dataset_Iris)
               )  #Define 80% dos dados para o treinamento e 20% para teste

X_Iris = X_new[0:lenTrain, 0:4]
D_Iris = D_new[0:lenTrain, 4:]

X_IrisTest = X_new[lenTrain:, 0:4]
D_IrisTest = D_new[lenTrain:, 4:]

#Adaline
#Adaline Iris
print('Rodando Adaline para dados - Iris')
a_Iris = Adaline(
    len(X_Iris[0]), epochs=1000,
    labelGraphic='Adaline - Iris')  #epochs, quantidade de iteraçoes
a_Iris.train(X_Iris, D_Iris)
a_Iris.testNetwork(X_IrisTest, D_IrisTest)
a_Iris.Graphic()
a_Iris.showGraphic()
print('')
print('')

#Adaline Rochas
print('Rodando Adaline para dados - Rochas')

a_Rocha = Adaline(
    len(X_Rochas[0]), epochs=1000,
    labelGraphic='Adaline - Rochas')  #epochs, quantidade de iteraçoes
a_Rocha.train(X_Rochas, D_Rochas)
a_Rocha.testNetwork(X_Rocha_teste, D_Rocha_teste)
a_Rocha.Graphic()
Esempio n. 15
0
import pandas as pd
import matplotlib.pyplot as plt

from activation_functions import SignFunction
from adaline import Adaline

# Database dataset-treinamento:
dataset = pd.read_csv('database/dataset-treinamento_ADALINE.csv')
X = dataset.iloc[:, 0:4].values
d = dataset.iloc[:, 4:].values

adaline = Adaline(
    X, d, 0.0025, 10**(-6), SignFunction
)  # entrada, saída, taxa de ativação, precisão e função de ativação
values_eqm = adaline.train()

# Database dataset-teste:
dataset = pd.read_csv('database/dataset-teste_ADALINE.csv')
X_teste = dataset.iloc[:, 0:4].values

for x in X_teste:
    y = adaline.evaluate(x)
    print(f'Input: {x},Output: {y}')

# Plotando figura do problema
plt.plot(values_eqm)
plt.show()