Beispiel #1
0
 def __init__(self,
              module,
              etaminus=0.5,
              etaplus=1.2,
              deltamin=1.0e-6,
              deltamax=5.0,
              delta0=0.1,
              **kwargs):
     """ Set up training algorithm parameters, and objects associated with the trainer.
     
         :arg module: the module whose parameters should be trained. 
         :key etaminus: factor by which step width is decreased when overstepping (0.5)
         :key etaplus: factor by which step width is increased when following gradient (1.2)
         :key delta: step width for each weight 
         :key deltamin: minimum step width (1e-6)
         :key deltamax: maximum step width (5.0)
         :key delta0: initial step width (0.1)           
     """
     BackpropTrainer.__init__(self, module, **kwargs)
     self.epoch = 0
     # set descender to RPROP mode and update parameters
     self.descent.rprop = True
     self.descent.etaplus = etaplus
     self.descent.etaminus = etaminus
     self.descent.deltamin = deltamin
     self.descent.deltamax = deltamax
     self.descent.deltanull = delta0
     self.descent.init(module.params)  # reinitialize, since mode changed
    def trainingNetwork(self, words_classes_vector_list, number_epoch=1000):
        """
        Метод преобразования слова в векторную форму.
        """
        self.words_classes_vector_list = words_classes_vector_list

        # Параметры нейронной сети
        num_input_neuron = 25
        num_output_neuron = len(words_classes_vector_list[0])
        num_hidden_neuron = int((num_input_neuron + num_output_neuron) * 2 / 3)

        #print("\n Основные параметры нейронной сети:")
        #print(str(num_input_neuron) + " " + str(num_output_neuron) + " " + str(num_hidden_neuron))

        _bias = False
        _hiddenclass = SoftmaxLayer
        _outclass = LinearLayer
        _nb_classes = num_output_neuron
        num_epoch = number_epoch

        # Создание нейронной сети и датасетов
        self.net = buildNetwork(num_input_neuron,
                                num_hidden_neuron,
                                num_output_neuron,
                                bias=_bias,
                                hiddenclass=_hiddenclass,
                                outclass=_outclass)
        norgate = ClassificationDataSet(num_input_neuron,
                                        num_output_neuron,
                                        nb_classes=_nb_classes)
        nortrain = ClassificationDataSet(num_input_neuron,
                                         num_output_neuron,
                                         nb_classes=_nb_classes)

        # Заполнение нейронной сети
        #print("\n Данные для обучения: ")
        for elem in words_classes_vector_list[1]:
            word = elem[0]
            _class = elem[1]

            #print(str(elem[2]) + "  " + str(elem[0]) + "  " + str(_class))
            norgate.addSample(tuple(word), tuple(_class))
            nortrain.addSample(tuple(word), tuple(_class))

        # Обучение нейронной сети
        trainer = BackpropTrainer(
            self.net, norgate
        )  # , verbose=True, batchlearning=False, momentum= 0.1, learningrate= 0.01, weightdecay= 0.01, lrdecay=1.0
        trnerr, valerr = trainer.trainUntilConvergence(dataset=nortrain,
                                                       maxEpochs=num_epoch,
                                                       verbose=False)

        return (trnerr, valerr)
        """
Beispiel #3
0
	def retrain_complete(self, cmp_history_hr, cmp_history_dy):
		res = False

		if len(cmp_history_hr) >= CategoryAdvisor.OBSERVE_LENGTH:
			self.dataset_hr.clear()

			data_length = len(cmp_history_hr)

			for n in range(data_length):
				if n + (CategoryAdvisor.OBSERVE_LENGTH - 1) + CategoryAdvisor.PREDICT_LENGTH < data_length:
					self.dataset_hr.addSample(cmp_history_hr[n:n + CategoryAdvisor.OBSERVE_LENGTH], cmp_history_hr[n + 1:n + 1 + CategoryAdvisor.PREDICT_LENGTH])

			trainer = BackpropTrainer(self.bprnetw_hr, self.dataset_hr)
			trainer.trainEpochs(100)

			res = True

		if len(cmp_history_dy) >= CategoryAdvisor.OBSERVE_LENGTH:
			self.dataset_dy.clear()

			data_length = len(cmp_history_dy)

			for n in range(data_length):
				if n + (CategoryAdvisor.OBSERVE_LENGTH - 1) + CategoryAdvisor.PREDICT_LENGTH < data_length:
					self.dataset_dy.addSample(cmp_history_dy[n:n + CategoryAdvisor.OBSERVE_LENGTH], cmp_history_dy[n + 1:n + 1 + CategoryAdvisor.PREDICT_LENGTH])

			trainer = BackpropTrainer(self.bprnetw_dy, self.dataset_dy)
			trainer.trainEpochs(100)

			return res
		else:
			return False
Beispiel #4
0
	def retrain_single(self, value_hr, value_dy):
		# train the hour advisor network
		inp = numpy.append(self.dataset_hr['input'][-1][1:], self.dataset_hr['target'][-1])
		self.dataset_hr.addSample(inp, [value_hr])

		trainer_hr = BackpropTrainer(self.bprnetw_hr, self.dataset_hr)
		trainer_hr.trainEpochs(100)

		# train the day advisor network
		inp = numpy.append(self.dataset_dy['input'][-1][1:], self.dataset_dy['target'][-1])
		self.dataset_dy.addSample(inp, [value_dy])

		trainer_dy = BackpropTrainer(self.bprnetw_dy, self.dataset_dy)
		trainer_dy.trainEpochs(100)
Beispiel #5
0
    #8 horas dormidas, 2 horas estudadas, 7.1 de nota na prova
    ((8, 2), (7.1,)),
    ((10, 1), (2.3,)),
    ((7.5, 3), (8.0,)),
    ((3.5, 10), (2.5)),
)

#aqui aplicamos os numeros usados como base de aprendizado acima
for example in base:
    ds.addSample(example[0], example[1])


#aqui criamos a rede neural com a base de aprendizado anterior
nn = buildNetwork(2, 4, 1) # 2=neurónios, 4=camadas ocultas, 1 = uma saida


#aqui definimos o treinador, informamos a rede neural e a base de aprendizado
trainer = BackpropTrainer(nn, ds)

#aqui vamos treinar a rede neural, quanto maior de treinamentos, menores serão as chances de erros
for i in range(10000):#melhorar a base de aprendizado é infinitamente mais eficiente que deixar esse numero alto
    print(trainer.train())

#aqui vamos perguntar ao usuário o tempo dormindo e horas estudadas e vamos prever a nota dele
while True:
    horas_dormidas = float(input('O aluno vai dormir quantas horas? '))
    horas_estudadas = float(input('O aluno vai estudar quantas horas? '))
    nota = nn.activate((horas_dormidas, horas_estudadas))
    print(f'O aluno vai tirar aproxiamadamente {nota[0]:.2f} pontos na prova')

Beispiel #6
0
net = buildNetwork(7, 40, 2, bias=True)
ds = SupervisedDataSet(7, 2)

#lecctura de cada dato de la imagen
for j in range(0,len(datos)):
    data.clear()
    data2.clear()
    for i in range(0,7):
        data.append(datos.iloc[j,i])
        data2.append(datos2.iloc[j,i])
    #print('paso: ', data)
    ds.addSample((data), (1,))  #asignacion de caracteristicas y etiqueta
    ds.addSample((data2), (0,))
    #print('paso: ', j)
    #print(ds)
    trainer = BackpropTrainer(net, ds) #entrenamiento de la red mediante la comparacion del error esperado
    er = round(trainer.train(), 3)
    while er > 0.112: #error esperado
        er = round(trainer.train(), 3)
        #print(er)

filename = "NNa{0}b{1}.pk1".format(2,3)
pickle.dump(net, open(filename,'wb'))

def preprocesamiento_img(img):
    kernel = np.ones((3, 3), np.uint8)
    hsv_img = cv.cvtColor(img, cv.COLOR_BGR2HSV)
    gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
    hist = cv.equalizeHist(gray, dst=None)

    blue_low = np.array([49, 50, 50])
from pybrain3.supervised.trainers import BackpropTrainer
from literki import daneWejsciowe
import literki
import siec

litery = [
    "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "U", "M", "L", "O",
    "P", "R", "T", "W", "S"
]
inp = daneWejsciowe['input']
trener = BackpropTrainer(siec.siec,
                         dataset=literki.daneWejsciowe,
                         learningrate=0.1)
trener.trainEpochs(1000)

for i in range(20):
    print(litery[i])
    temp = siec.siec.activate(inp[i])
    for j in range(20):
        print(temp[j])
print("\n")
Beispiel #8
0
from pybrain3.supervised.trainers import BackpropTrainer
from inputLetters import inputDataSet

import inputLetters
import network

letters = [
    "A", "B", "C", "D", "I", "F", "G", "H", "K", "U", "M", "E", "L", "O", "P",
    "R", "T", "W", "X", "Y"
]

inp = inputDataSet['input']  # Making shortcut to the input section of DataSet

trainer = BackpropTrainer(
    network.network,
    dataset=inputLetters.inputDataSet,
    learningrate=0.05,
    verbose=True,
    momentum=0.1)  # verbose = true so printing errors for each epoch

trainer.trainEpochs(5000)  # Training network for X epochs

print("\n\n")
for i in range(20):  # Final print
    print("Dla litery", letters[i], "output wynosi:")
    temp = network.network.activate(inp[i])
    for k in range(20):
        if temp[k] < 0:
            temp[k] *= (-1)  # Only to improve the analysis of the result
    for j in range(20):
        print(temp[j])
    print("\n")
for i in range(0, 2):
    if i == 1:
        puntos = maligno
    for j in range(1, Ninput[i]):
        #print(str(i)+':'+str(j))
        #image = cv2.imread("all-mias/mdb076.pgm")
        image = cv2.imread('all-mias/' + str(i) + '/(' + str(j) + ').pgm', 0)
        if image is None:
            print("no se encontro")
        else:
            momentos = procesado(image, puntos[j - 1][0], puntos[j - 1][1],
                                 puntos[j - 1][2])
            #momentos = cal_momentos(img_procesada)
            ds.addSample(momentos, output[i])

trainer = BackpropTrainer(net, ds)

er = round(trainer.train(), 3)
#print(er)
while er <= 0.113:
    er = round(trainer.train(), 3)
    print(er)

puntos = benigno
for i in range(17, 27):
    img_test = cv2.imread(
        'C:/Users/David VM/Downloads/all-mias/' + str(0) + '/(' + str(i) +
        ').pgm', 0)
    carat = procesado(img_test, puntos[i - 1][0], puntos[i - 1][1],
                      puntos[i - 1][2])
    #carat = cal_momentos(img_p)
Beispiel #10
0
print('Quantidade para treino %d' % len(train_data))

#dividindo os dados para teste e validação
test_data, val_data = part_data.splitWithProportion(0.5)
print('Quantidade para teste %d' % len(test_data))
print('Quantidade para validação %d' % len(val_data))


# In[19]:


from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.supervised.trainers import BackpropTrainer

net = buildNetwork(dataset.indim, 3, dataset.outdim)
trainer = BackpropTrainer(net, dataset=train_data, learningrate=0.01, momentum=0.1, verbose=True)

train_errors, val_errors = trainer.trainUntilConvergence(dataset=train_data, maxEpochs=100)


# In[20]:


import matplotlib.pyplot as plt
get_ipython().run_line_magic('matplotlib', 'inline')

plt.plot(train_errors, 'b', val_errors, 'r')
plt.show()

trainer.totalepochs
Beispiel #11
0
#!/usr/local/bin/python3

from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.datasets import *
from pybrain3.supervised.trainers import BackpropTrainer
from math import *
import time

test_func = lambda x, k: 0.5 + k * sin(2 * 3.1415 * x / 64)
start_time = time.time()
if __name__ == "__main__":
    print(test_func(3, 5.54))
    net = buildNetwork(3, 3, 1, bias=True)
    ds = SupervisedDataSet(3, 1)

    for x in range(10):
        for k in range(10):
            ds.addSample((x, test_func(x, k)), k)

    train = BackpropTrainer(net, learningrate=0.01, momentum=0.05)
    train.trainOnDataset(ds, 1000)
    train.testOnData()
    print(net.activate((3, 2)))
    # print(test_func(90, 1))
    print('Время выполнения: %s сек' % (time.time() - start_time))
Beispiel #12
0
net = buildNetwork(ds_train.indim,
                   HIDDEN_NEURONS_NUM,
                   ds_train.outdim,
                   outclass=SoftmaxLayer)
# ds.indim -- количество нейронов входного слоя, равне количеству признаков
# ds.outdim -- количество нейронов выходного слоя, равное количеству меток классов
# SoftmaxLayer -- функция активации, пригодная для решения задачи многоклассовой классификации

init_params = np.random.random(
    (len(net.params)
     ))  # Инициализируем веса сети для получения воспроизводимого результата
net._setParameters(init_params)

# Модуль настройки параметров pybrain использует модуль random; зафиксируем seed для получения воспроизводимого результата
np.random.seed(0)
trainer = BackpropTrainer(
    net, dataset=ds_train)  # Инициализируем модуль оптимизации
err_train, err_val = trainer.trainUntilConvergence(maxEpochs=MAX_EPOCHS)
#line_train = plt.plot(err_train, 'b', err_val, 'r') # Построение графика
#xlab = plt.xlabel('Iterations')
#ylab = plt.ylabel('Error')
#plt.show()

res_train = net.activateOnDataset(ds_train).argmax(
    axis=1)  # Подсчет результата на обучающей выборке
print('Error on train: ',
      percentError(res_train, ds_train['target'].argmax(axis=1)),
      '%')  # Подсчет ошибки
res_test = net.activateOnDataset(ds_test).argmax(
    axis=1)  # Подсчет результата на тестовой выборке
print('Error on test: ',
      percentError(res_test, ds_test['target'].argmax(axis=1)),
if __name__ == "__main__":
    sinuses = []
    cosinuses = []

    net = buildNetwork(3, 6, 1, bias=True)
    ds = SupervisedDataSet(3, 1)

    for k in range(-10, 10):
        ds.addSample(
            tuple(0.5 + 0.3 * cos(k * pi * x / 64) for x in range(-1, 2)),
            (1, ))
        ds.addSample(
            tuple(0.5 + 0.3 * sin(k * pi * x / 64) for x in range(-1, 2)),
            (0, ))
    trainer = BackpropTrainer(net, ds, learningrate=0.01, verbose=True)
    trainer.trainUntilConvergence(validationProportion=0.5)

    a = classificate_func[int(
        net.activate(
            tuple((0.5 + 0.3 * cos(2 * pi * x / 64) for x in range(3)))))]
    b = classificate_func[int(
        net.activate(
            tuple((0.5 + 0.3 * sin(2 * pi * x / 64) for x in range(3)))))]
    print('Искомая функция: %s\n Время выполнения: %s' %
          (a, time.time() - start_time))
    print('Искомая функция: %s\n Время выполнения: %s' %
          (b, time.time() - start_time))
    # b = ('Искомая функция: %s\n Время выполнения: %s' %
    # 	(classificate_func[int(net.activate(tuple(( 0.5 + 0.3 * sin(2 * pi * x / 64) for x in range(3)))))],
    # 	 time.time() - start_time))
Beispiel #14
0
from pybrain3.supervised.trainers import BackpropTrainer
from inputLetters import inputDataSet
import inputLetters
import network

letters = [
    "A", "B", "C", "D", "I", "F", "G", "H", "K", "U", "M", "E", "L", "O", "P",
    "R", "T", "W", "X", "Y"
]
inp = inputDataSet['input']

trainer = BackpropTrainer(network.network,
                          dataset=inputLetters.inputDataSet,
                          learningrate=0.1,
                          verbose=True,
                          momentum=0.01)
trainer.trainEpochs(900)
print("\n\n")

for i in range(20):  # Wydruk
    print("Dla litery", letters[i], "wynik wynosi:")
    temp = network.network.activate(inp[i])
    for k in range(20):
        if temp[k] < 0:
            temp[k] *= (-1)  # poprawa wynikow
    for j in range(20):
        print(temp[j])
print("\n")
Beispiel #15
0
network.addModule(hiddenLayer)
network.addOutputModule(outLayer)

bias_to_hidden = FullConnection(bias, hiddenLayer)          #Creating connection between layers
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)

network.addConnection(bias_to_hidden)                       #Adding connection to network
network.addConnection(in_to_hidden)
network.addConnection(hidden_to_out)

network.sortModules()                                       #Sorting modules
inp = inputDataSet['input']                                 #Making shortcut to the input section of DataSet

print ("Number of training patterns: ", len(inputDataSet))      #Printing number of training patterns

trainer = BackpropTrainer(network, dataset=inputDataSet, learningrate=0.1, verbose=True, momentum=0.01)    #Initializing trainer with Backpropagation method

trainer.trainEpochs(5000)                                              #Training network for X epochs, verbose = true so printing errors for each epoch


print("\n\n")
for i in range(20):                                                     #Final print
    print("Dla litery",letters[i],"output wynosi:")
    temp = network.activate(inp[i])
    for j in range(20):
        print(temp[j])
    print("\n\n")


print("\n\n Koniec testów")
Beispiel #16
0
            (2))
# for i in range(height):
#     if i < 25:
#         ds.addSample((float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1]), float(trainFeaturesArray[i, 2])), (0))
#     elif i >= 25 and i < 50:
#         ds.addSample((float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1]), float(trainFeaturesArray[i, 2])), (1))
#     else:
#         ds.addSample((float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1]), float(trainFeaturesArray[i, 2])), (2))

# ds.addSample((0.8, 0.4), (0.7))
# ds.addSample((0.5, 0.7), (0.5))
# ds.addSample((1.0, 0.8), (0.95))
nn = buildNetwork(2, 4, 1, bias=True)

# trainer = BackpropTrainer(nn, ds)
trainer = BackpropTrainer(nn, dataset=ds)

for j in range(750):
    trainer.train()

for k in range(height):
    if k < 25:
        flag = 0
    elif k > 25 and k < 51:
        flag = 1
    else:
        flag = 2
    prevision = nn.activate((testFeaturesArray[k, 0], testFeaturesArray[k,
                                                                        1]))[0]
    # print(prevision)
    auxList = [(abs(prevision), 0), (abs(prevision - 1), 1)]
#!/usr/bin/python

from pybrain3.supervised.trainers import BackpropTrainer
from pybrain3.tools.validation import Validator

import inputLetters
import network

trainer = BackpropTrainer(network.network,
                          inputLetters.inputLettersDataSet,
                          learningrate=0.1,
                          verbose=True)

trainer.trainEpochs(2000)

testInput = inputLetters.inputLettersDataSet['input']
testTarget = inputLetters.inputLettersDataSet['target']
errorComparator = 0.900

print("Number of training patterns:", len(inputLetters.inputLettersDataSet))

letters = [
    "A", "B", "C", "D", "I", "F", "G", "H", "K", "U", "a", "b", "c", "d", "f",
    "h", "m", "o", "w", "z"
]

MSE = 0

for i in range(20):
    temp = network.network.activate(testInput[i])
    print("For letter", letters[i], "precision is", temp[0])
from pybrain3.tools.shortcuts import buildNetwork  # Para criar a rede neural
from pybrain3.datasets import SupervisedDataSet  # Datasets/Conjunto de dados
from pybrain3.supervised.trainers import BackpropTrainer  # Algoritmo para treinamento

datasets = SupervisedDataSet(2, 1)  # Entradas e saídas

datasets.addSample(
    (0.8, 0.4), 0.7
)  # Quantidade de horas dormidas e quantidade de horas estudadas tendo tirado a nota
datasets.addSample((0.5, 0.7), 0.5)
datasets.addSample((0.1, 0.7), 0.95)

rede_neural = buildNetwork(
    2, 4, 1, bias=True
)  # Passada arquitetura da rede (2 neuronios na camada de entrada +
# 4 neuronio na camada oculta + 1 neuronio na camada de saída)

trainer = BackpropTrainer(rede_neural, datasets)  # Treinador

for i in range(2000):  # Treinando a rede neural 2000 vezes
    print(trainer.train())

while True:
    dormiu = float(input('Dormiu quanto tempo? '))
    estudou = float(input('Estudou quanto tempo? '))
    z = rede_neural.activate((dormiu, estudou))[0] * 10
    print(f'Precisão da nota: {z}')