Пример #1
0
	def create(user, category_id, network_hr=None, dataset_hr=None, network_dy=None, dataset_dy=None):
		res = CategoryAdvisor()

		res.user = user
		res.category_id = category_id

		if network_hr is None:
			res.bprnetw_hr = buildNetwork(CategoryAdvisor.OBSERVE_LENGTH, 20, CategoryAdvisor.PREDICT_LENGTH, outclass=LinearLayer, bias=True, recurrent=True)
		else:
			res.bprnetw_hr = network_hr

		if dataset_hr is None:
			res.dataset_hr = SupervisedDataSet(CategoryAdvisor.OBSERVE_LENGTH, CategoryAdvisor.PREDICT_LENGTH)
		else:
			res.dataset_hr = dataset_hr

		if network_dy is None:
			res.bprnetw_dy = buildNetwork(CategoryAdvisor.OBSERVE_LENGTH, 20, CategoryAdvisor.PREDICT_LENGTH, outclass=LinearLayer, bias=True, recurrent=True)
		else:
			res.bprnetw_dy = network_dy

		if dataset_dy is None:
			res.dataset_dy = SupervisedDataSet(CategoryAdvisor.OBSERVE_LENGTH, CategoryAdvisor.PREDICT_LENGTH)
		else:
			res.dataset_dy = dataset_dy

		return res
def buildDecomposableNetwork():
    """ three hidden neurons, with 2 in- and 2 outconnections each. """
    n = buildNetwork(2, 3, 2, bias=False)
    ndc = NeuronDecomposableNetwork.convertNormalNetwork(n)
    # set all the weights to 1
    ndc._setParameters(ones(12))
    return ndc
Пример #3
0
 def setupNN(self, trainer=RPropMinusTrainer, hidden=None, **trnargs):
     """ Constructs a 3-layer FNN for regression. Optional arguments are passed on to the Trainer class. """
     if hidden is not None:
         self.hidden = hidden
     logging.info("Constructing FNN with following config:")
     FNN = buildNetwork(self.DS.indim, self.hidden, self.DS.outdim)
     logging.info(str(FNN) + "\n  Hidden units:\n    " + str(self.hidden))
     logging.info("Training FNN with following special arguments:")
     logging.info(str(trnargs))
     self.Trainer = trainer(FNN, dataset=self.DS, **trnargs)
    def trainingNetwork(self, words_classes_vector_list, number_epoch=1000):
        """
        Метод преобразования слова в векторную форму.
        """
        self.words_classes_vector_list = words_classes_vector_list

        # Параметры нейронной сети
        num_input_neuron = 25
        num_output_neuron = len(words_classes_vector_list[0])
        num_hidden_neuron = int((num_input_neuron + num_output_neuron) * 2 / 3)

        #print("\n Основные параметры нейронной сети:")
        #print(str(num_input_neuron) + " " + str(num_output_neuron) + " " + str(num_hidden_neuron))

        _bias = False
        _hiddenclass = SoftmaxLayer
        _outclass = LinearLayer
        _nb_classes = num_output_neuron
        num_epoch = number_epoch

        # Создание нейронной сети и датасетов
        self.net = buildNetwork(num_input_neuron,
                                num_hidden_neuron,
                                num_output_neuron,
                                bias=_bias,
                                hiddenclass=_hiddenclass,
                                outclass=_outclass)
        norgate = ClassificationDataSet(num_input_neuron,
                                        num_output_neuron,
                                        nb_classes=_nb_classes)
        nortrain = ClassificationDataSet(num_input_neuron,
                                         num_output_neuron,
                                         nb_classes=_nb_classes)

        # Заполнение нейронной сети
        #print("\n Данные для обучения: ")
        for elem in words_classes_vector_list[1]:
            word = elem[0]
            _class = elem[1]

            #print(str(elem[2]) + "  " + str(elem[0]) + "  " + str(_class))
            norgate.addSample(tuple(word), tuple(_class))
            nortrain.addSample(tuple(word), tuple(_class))

        # Обучение нейронной сети
        trainer = BackpropTrainer(
            self.net, norgate
        )  # , verbose=True, batchlearning=False, momentum= 0.1, learningrate= 0.01, weightdecay= 0.01, lrdecay=1.0
        trnerr, valerr = trainer.trainUntilConvergence(dataset=nortrain,
                                                       maxEpochs=num_epoch,
                                                       verbose=False)

        return (trnerr, valerr)
        """
Пример #5
0
def buildRecurrentNetwork():
    N = buildNetwork(1,
                     1,
                     1,
                     recurrent=True,
                     bias=False,
                     hiddenclass=LinearLayer,
                     outputbias=False)
    h = N['hidden0']
    N.addRecurrentConnection(FullConnection(h, h))
    N.sortModules()
    N.name = 'RecurrentNetwork'
    return N
Пример #6
0
def buildNestedNetwork():
    """ build a nested network. """
    N = FeedForwardNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.sortModules()
    return N
Пример #7
0
 def setupNN(self, trainer=RPropMinusTrainer, hidden=None, **trnargs):
     """ Setup FNN and trainer for classification. """
     self._convertAllDataToOneOfMany()
     if hidden is not None:
         self.hidden = hidden
     FNN = buildNetwork(self.DS.indim,
                        self.hidden,
                        self.DS.outdim,
                        outclass=SoftmaxLayer)
     logging.info("Constructing classification FNN with following config:")
     logging.info(str(FNN) + "\n  Hidden units:\n    " + str(self.hidden))
     logging.info("Trainer received the following special arguments:")
     logging.info(str(trnargs))
     self.Trainer = trainer(FNN, dataset=self.DS, **trnargs)
Пример #8
0
    def trainClasifier(self, training_set):
        dataSet = self.__createDataset(training_set)
        mpl_conf = self.configuration_map['mlp_classifier_config']

        self.__net = buildNetwork(
            dataSet.indim, mpl_conf['hidden_nodes'], dataSet.outdim,
            bias = mpl_conf['biasArg'] ,recurrent = mpl_conf['recurrentArg'] ,
            hiddenclass = mpl_conf['hiddenclassArg'] , outclass = mpl_conf['outclassArg'])

        self.__trainer = BackpropTrainer(
            self.__net, dataSet, learningrate = mpl_conf['learningRateArg'],
            momentum = mpl_conf['momentumArg'],verbose = mpl_conf['verbose'])

        self.__trainer.trainUntilConvergence(continueEpochs=12, maxEpochs = mpl_conf['epochs'])
Пример #9
0
    def __init__(self, unique_words, total_comments, hidden=400):
        self._max_value = 0.9
        self._min_value = 0.1
        self.__unique_words = unique_words
        self.__total_comments = total_comments
        self.__conversion_rate = 0.5
        print("Total de Comentários: ", self.__total_comments)
        print("Total de Palavras Únicas: ", len(self.__unique_words))

        unique_words_length = len(self.__unique_words)
        # Construcao da rede com quantPalavrasUnicas na entradas, 1000 camadas ocultas e 1 sai­da
        self.__network = buildNetwork(unique_words_length, hidden, 1)
        # Base de dados com quantPalavrasUnicas atributos prevzisores e uma clase
        self.__base = SupervisedDataSet(unique_words_length, 1)
        '''
Пример #10
0
def buildMixedNestedNetwork():
    """ build a nested network with the inner one being a ffn and the outer one being recurrent. """
    N = RecurrentNetwork('outer')
    a = LinearLayer(1, name='a')
    b = LinearLayer(2, name='b')
    c = buildNetwork(2, 3, 1)
    c.name = 'inner'
    N.addInputModule(a)
    N.addModule(c)
    N.addOutputModule(b)
    N.addConnection(FullConnection(a, b))
    N.addConnection(FullConnection(b, c))
    N.addRecurrentConnection(FullConnection(c, c))
    N.sortModules()
    return N
Пример #11
0
    def setupRNN(self, trainer=BackpropTrainer, hidden=None, **trnargs):
        """ Setup an LSTM RNN and trainer for sequence classification. """
        if hidden is not None:
            self.hidden = hidden
        self._convertAllDataToOneOfMany()

        RNN = buildNetwork(self.DS.indim,
                           self.hidden,
                           self.DS.outdim,
                           hiddenclass=LSTMLayer,
                           outclass=SoftmaxLayer)
        logging.info("Constructing classification RNN with following config:")
        logging.info(str(RNN) + "\n  Hidden units:\n    " + str(self.hidden))
        logging.info("Trainer received the following special arguments:")
        logging.info(str(trnargs))
        self.Trainer = trainer(RNN, dataset=self.DS, **trnargs)
Пример #12
0
from pybrain3.supervised.trainers import BackpropTrainer
import pickle
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt

#lectura de los datos con las caracteteristicas de cada imagen
data = []
archivo = 'caracteristicas con lata.csv'
datos =  pd.read_csv(archivo, header=0)

data2 = []
archivo2 = 'caracteristicas sin lata.csv'
datos2 =  pd.read_csv(archivo2, header=0)

#configuracion de la red neuronal (caracteristicas, capas de la red, salidas)
net = buildNetwork(7, 40, 2, bias=True)
ds = SupervisedDataSet(7, 2)

#lecctura de cada dato de la imagen
for j in range(0,len(datos)):
    data.clear()
    data2.clear()
    for i in range(0,7):
        data.append(datos.iloc[j,i])
        data2.append(datos2.iloc[j,i])
    #print('paso: ', data)
    ds.addSample((data), (1,))  #asignacion de caracteristicas y etiqueta
    ds.addSample((data2), (0,))
    #print('paso: ', j)
    #print(ds)
    trainer = BackpropTrainer(net, ds) #entrenamiento de la red mediante la comparacion del error esperado
Пример #13
0
    else:
        ds.addSample(
            (float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1])),
            (2))
# for i in range(height):
#     if i < 25:
#         ds.addSample((float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1]), float(trainFeaturesArray[i, 2])), (0))
#     elif i >= 25 and i < 50:
#         ds.addSample((float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1]), float(trainFeaturesArray[i, 2])), (1))
#     else:
#         ds.addSample((float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1]), float(trainFeaturesArray[i, 2])), (2))

# ds.addSample((0.8, 0.4), (0.7))
# ds.addSample((0.5, 0.7), (0.5))
# ds.addSample((1.0, 0.8), (0.95))
nn = buildNetwork(2, 4, 1, bias=True)

# trainer = BackpropTrainer(nn, ds)
trainer = BackpropTrainer(nn, dataset=ds)

for j in range(750):
    trainer.train()

for k in range(height):
    if k < 25:
        flag = 0
    elif k > 25 and k < 51:
        flag = 1
    else:
        flag = 2
    prevision = nn.activate((testFeaturesArray[k, 0], testFeaturesArray[k,
Пример #14
0
 def __init__(self, dimState, numActions, name=None):
     Module.__init__(self, dimState, 1, name)
     self.network = buildNetwork(dimState + numActions,
                                 dimState + numActions, 1)
     self.numActions = numActions
Пример #15
0
    # cv2.imshow("imagen libre", dilatado)
    # cv2.waitKey()
    # cv2.destroyAllWindows()
    momentos = []
    momentos = cal_momentos(dila)
    momentos.append(area)
    momentos.append(perimetro)
    momentos.append(circularidad)
    momentos.append(media)
    momentos.append(des_stand)
    #print(momentos)

    return momentos


net = buildNetwork(29, 5, 1, bias=True)
ds = SupervisedDataSet(29, 1)
veredicto = ""

output = [[0], [1]]
Ninput = [21, 21]
puntos = []
benigno = [[535, 425, 197], [522, 280, 69], [477, 133, 30], [525, 425, 33],
           [471, 458, 40], [667, 365, 31], [595, 864, 68], [547, 573, 48],
           [653, 477, 49], [493, 125, 49], [674, 443, 79], [322, 676, 43],
           [388, 742, 66], [546, 463, 33], [462, 406, 44], [432, 149, 20],
           [492, 473, 131], [544, 194, 38], [680, 494, 20], [612, 297, 34],
           [714, 340, 23], [357, 365, 50], [600, 621, 111], [492, 434, 87],
           [191, 549, 23], [523, 551, 48], [252, 788, 52], [347, 636, 26],
           [669, 543, 49], [351, 661, 62]]
maligno = [[538, 681, 29], [338, 314, 56], [318, 359, 27], [266, 517, 28],
Пример #16
0
import pandas as pd
from pybrain3.datasets.supervised import SupervisedDataSet
from pybrain3.structure.modules.tanhlayer import TanhLayer
from pybrain3.supervised.trainers.backprop import BackpropTrainer
from pybrain3.tools.shortcuts import buildNetwork
from sklearn.model_selection import train_test_split

p = pd.read_csv(
    'D:\pycharm_project\\fingerprint\Road_test_data_lte\my_test\single_cell_test_data\p3.csv'
)
p.fillna(value=0, inplace=True)
pX = p[['servingrsrp', 'rsrp0', 'rsrp1', 'rsrp2', 'rsrp3', 'rsrp4', 'rsrp5']]
py = p[['longitude', 'latitude']]
pX_train, pX_test, py_train, py_test = train_test_split(pX, py, test_size=0.1)
# 构建训练与测试数据集
training = SupervisedDataSet(pX_train.shape[1], 2)
for i in range(pX_train.shape[0]):
    training.addSample(pX_train.iloc[i], py_train.iloc[i])
testing = SupervisedDataSet(pX_test.shape[1], 2)
for i in range(pX_test.shape[0]):
    testing.addSample(pX_test.iloc[i], py_test.iloc[i])
# 构建人工神经外网络
net = buildNetwork(pX_train.shape[1], 4, 2, hiddenclass=TanhLayer, bias=True)
trainer = BackpropTrainer(net, training, learningrate=0.01)
trainer.trainEpochs(epochs=100)
predictions = net.activateOnDataset(dataset=testing)
y_predict = pd.DataFrame(predictions, columns=['longitude', 'latitude'])
print(y_predict)
Пример #17
0
# Redes Neurais utilizando Pybrain

from pybrain3.datasets import SupervisedDataSet
from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.supervised import BackpropTrainer

# dimensões dos vetores de entrada e do objetivo
dataset = SupervisedDataSet(2, 1)

dataset.addSample([1, 1], [0])
dataset.addSample([1, 0], [1])
dataset.addSample([0, 1], [1])
dataset.addSample([0, 0], [0])

network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)
trainer = BackpropTrainer(network, dataset, learningrate=0.01, momentum=0.99)
'''
for epoch in range(1000):
    trainer.train()
'''

trainer.trainEpochs(1000)
'''
    treinar até a convergência: trainer.trainUntilConvergence
'''

test_data = SupervisedDataSet(2, 1)

test_data.addSample([1, 1], [0])
test_data.addSample([1, 0], [1])
test_data.addSample([0, 1], [1])
Пример #18
0
ds.addSample((100, 6), (18.76))
ds.addSample((100, 7), (16.35))
ds.addSample((100, 8), (14.54))
ds.addSample((100, 9), (13.14))
ds.addSample((100, 10), (12.02))
ds.addSample((100, 11), (11.10))
ds.addSample((100, 12), (10.34))
'''
ds.addSample((200, 12), (20.68))
ds.addSample((6518.78, 1), (6518.78))
ds.addSample((6518.78, 3), (2326.33))
ds.addSample((6518.78, 5), (1443.38))
ds.addSample((6518.78, 8), (947.93))
ds.addSample((6518.78, 9), (856.47))
ds.addSample((6518.78, 10), (783.43))
ds.addSample((6518.78, 11), (723.79))
ds.addSample((6518.78, 12), (674.19))'''

nn = buildNetwork(2, 4, 1)

trainer = BackpropTrainer(nn, ds)

for i in range(20000):
    print(trainer.train())

while True:
    valor_inicial = float(input('Valor inicial: '))
    numero_parcela = int(input('Quantidade parcelas '))
    x = nn.activate((valor_inicial, numero_parcela))
    print(f'Corresponde ao valor {x[0]:.2f}')
Пример #19
0
xlist2 = [0.2, 10]
xlist100 = list(range(12, 112))

xa1 = array(xlist1)
xa2 = array(xlist2)
xa100 = array(xlist100)

pc1 = ParameterContainer(1)
pc2 = ParameterContainer(2)
pc100 = ParameterContainer(100)
pc1._setParameters(xa1)
pc2._setParameters(xa2)
pc100._setParameters(xa100)

# for the task object, we need a module
nnet = buildNetwork(task.outdim, 2, task.indim)


# a mimimalistic Evolvable subclass that is not (like usual) a ParameterContainer
class SimpleEvo(Evolvable):
    def __init__(self, x):
        self.x = x

    def mutate(self):
        self.x += random() - 0.3

    def copy(self):
        return SimpleEvo(self.x)

    def randomize(self):
        self.x = 10 * random() - 2
Пример #20
0
     data = SupervisedDataSet(33 * 33 * 33, L)
     for text in authors[author]:
         arr = np.zeros(33 * 33 * 33, dtype='int8')
         for j in text[0]:
             arr[int(j)] = text[0][j]
         arr2 = np.zeros(L, dtype='int8')
         arr2[classes.index(author)] = 1
         data.addSample(arr, arr2)
         del arr, arr2
         print('!')
     data.saveToFile('cashes/data_3_' + str(author) + '.mod')
     del data
     print(author, 'constructed')
 print('data constructed')
 del authors
 net = buildNetwork(33 * 33 * 33, 10, 10, L, bias=True)
 trainer = RPropMinusTrainer(net)
 print('training started')
 for i in range(20):
     for author in authors:
         data = SupervisedDataSet.loadFromFile('cashes/data_3_' +
                                               str(author) + '.mod')
         for j in range(5):
             e = trainer.trainOnDataset(data)
             print(i, author, j, 'time:', time.time() - time2, e)
         del data
 print(trainer.testOnData())
 NetworkWriter.writeToFile(net, 'filename_3.xml')
 s = 0
 time3 = time.time()
 print(time3 - time2)
Пример #21
0
#!/usr/local/bin/python3

from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.datasets import *
from pybrain3.supervised.trainers import BackpropTrainer
from math import *
import time

test_func = lambda x, k: 0.5 + k * sin(2 * 3.1415 * x / 64)
start_time = time.time()
if __name__ == "__main__":
    print(test_func(3, 5.54))
    net = buildNetwork(3, 3, 1, bias=True)
    ds = SupervisedDataSet(3, 1)

    for x in range(10):
        for k in range(10):
            ds.addSample((x, test_func(x, k)), k)

    train = BackpropTrainer(net, learningrate=0.01, momentum=0.05)
    train.trainOnDataset(ds, 1000)
    train.testOnData()
    print(net.activate((3, 2)))
    # print(test_func(90, 1))
    print('Время выполнения: %s сек' % (time.time() - start_time))
from pybrain3.tools.shortcuts import buildNetwork  # Para criar a rede neural
from pybrain3.datasets import SupervisedDataSet  # Datasets/Conjunto de dados
from pybrain3.supervised.trainers import BackpropTrainer  # Algoritmo para treinamento

datasets = SupervisedDataSet(2, 1)  # Entradas e saídas

datasets.addSample(
    (0.8, 0.4), 0.7
)  # Quantidade de horas dormidas e quantidade de horas estudadas tendo tirado a nota
datasets.addSample((0.5, 0.7), 0.5)
datasets.addSample((0.1, 0.7), 0.95)

rede_neural = buildNetwork(
    2, 4, 1, bias=True
)  # Passada arquitetura da rede (2 neuronios na camada de entrada +
# 4 neuronio na camada oculta + 1 neuronio na camada de saída)

trainer = BackpropTrainer(rede_neural, datasets)  # Treinador

for i in range(2000):  # Treinando a rede neural 2000 vezes
    print(trainer.train())

while True:
    dormiu = float(input('Dormiu quanto tempo? '))
    estudou = float(input('Estudou quanto tempo? '))
    z = rede_neural.activate((dormiu, estudou))[0] * 10
    print(f'Precisão da nota: {z}')
Пример #23
0
from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.datasets import SupervisedDataSet
from pybrain3.supervised.trainers import BackpropTrainer

ds = SupervisedDataSet(2, 1)

ds.addSample((0.8, 0.4), (0.7))
ds.addSample((0.5, 0.7), (0.5))
ds.addSample((1.0, 0.8), (0.95))

#Utilizando "bias" para o algoritmo treinar mais rápido
nn = buildNetwork(2, 16, 1, bias=True)

#treinando o algoritmo
trainer = BackpropTrainer(nn, ds)

#analisando a evolução do algoritmo
for i in range(2000):
    print(trainer.train())

#com o buildNetwork utilizando 4 neurônios, temos 0.013496550372475475 de margem de erro
#com o buildNetwork utilizando 16 neurônios, temos 0.0009338957668739359 de margem de erro
#teste 1 com 512 neurônios: 1.6434602192104412e-32 de margem de erro
#teste 2 com 512 neurônios: 1.3702349577667055e-30 de margem de erro
#irei utilizar neste algoritmo apenas 16 neurônios

#alguns testes por curiosidade
while True:
    sleep = float(input("How many hours did you sleep? "))
    study = float(input("How many hours did you study? "))
Пример #24
0
#aqui vai a nossa base de aprendizado
base = (
    #8 horas dormidas, 2 horas estudadas, 7.1 de nota na prova
    ((8, 2), (7.1,)),
    ((10, 1), (2.3,)),
    ((7.5, 3), (8.0,)),
    ((3.5, 10), (2.5)),
)

#aqui aplicamos os numeros usados como base de aprendizado acima
for example in base:
    ds.addSample(example[0], example[1])


#aqui criamos a rede neural com a base de aprendizado anterior
nn = buildNetwork(2, 4, 1) # 2=neurónios, 4=camadas ocultas, 1 = uma saida


#aqui definimos o treinador, informamos a rede neural e a base de aprendizado
trainer = BackpropTrainer(nn, ds)

#aqui vamos treinar a rede neural, quanto maior de treinamentos, menores serão as chances de erros
for i in range(10000):#melhorar a base de aprendizado é infinitamente mais eficiente que deixar esse numero alto
    print(trainer.train())

#aqui vamos perguntar ao usuário o tempo dormindo e horas estudadas e vamos prever a nota dele
while True:
    horas_dormidas = float(input('O aluno vai dormir quantas horas? '))
    horas_estudadas = float(input('O aluno vai estudar quantas horas? '))
    nota = nn.activate((horas_dormidas, horas_estudadas))
    print(f'O aluno vai tirar aproxiamadamente {nota[0]:.2f} pontos na prova')
Пример #25
0
train_data, part_data = dataset.splitWithProportion(0.6)
print('Quantidade para treino %d' % len(train_data))

#dividindo os dados para teste e validação
test_data, val_data = part_data.splitWithProportion(0.5)
print('Quantidade para teste %d' % len(test_data))
print('Quantidade para validação %d' % len(val_data))


# In[19]:


from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.supervised.trainers import BackpropTrainer

net = buildNetwork(dataset.indim, 3, dataset.outdim)
trainer = BackpropTrainer(net, dataset=train_data, learningrate=0.01, momentum=0.1, verbose=True)

train_errors, val_errors = trainer.trainUntilConvergence(dataset=train_data, maxEpochs=100)


# In[20]:


import matplotlib.pyplot as plt
get_ipython().run_line_magic('matplotlib', 'inline')

plt.plot(train_errors, 'b', val_errors, 'r')
plt.show()

trainer.totalepochs
        suavidad = 1 - (1 / (1 + (desv_estandar**2)))
        print(suavidad)

        oblicuidad = 0
        for i in range(0, len(hist)):
            oblicuidad = oblicuidad + ((hist[i] - media)**3)
        oblicuidad = oblicuidad / (len(hist) * (desv_estandar**3))
        print(oblicuidad)
        # areaInteres = regionInteres(img_limpia)
        # momentos=moments(areaInteres)
        #print(momentos)

        #-----------------------------Entrenamiento------------------------------
        net = buildNetwork(
            4, 10, 1,
            bias=True)  #Red neuronal. Num. Entradas,Capas ocultas, y salidas
        ds = SupervisedDataSet(4, 1)
        if j == 1:
            print("normales")
            ds.addSample((media, desv_estandar, suavidad, oblicuidad), (0, ))
        if j == 2:
            print("tumor")
            ds.addSample((media, desv_estandar, suavidad, oblicuidad), (1, ))

        trainer = BackpropTrainer(net, ds)

        error = round(trainer.train(), 7)

        while error > 0.15:  #Minimo error soportado
            error = round(trainer.train(), 7)
Пример #27
0
# Первый аргумент -- количество признаков np.shape(X)[1], второй аргумент -- количество меток классов len(np.unique(y_train)))
ds_train.setField('input', X_train)  # Инициализация объектов
ds_train.setField('target', y_train[:, np.newaxis]
                  )  # Инициализация ответов; np.newaxis создает вектор-столбец
ds_train._convertToOneOfMany()  # Бинаризация вектора ответов
# Контрольная часть
ds_test = ClassificationDataSet(np.shape(X)[1],
                                nb_classes=len(np.unique(y_train)))
ds_test.setField('input', X_test)
ds_test.setField('target', y_test[:, np.newaxis])
ds_test._convertToOneOfMany()

np.random.seed(0)
# Построение сети прямого распространения (Feedforward network)
net = buildNetwork(ds_train.indim,
                   HIDDEN_NEURONS_NUM,
                   ds_train.outdim,
                   outclass=SoftmaxLayer)
# ds.indim -- количество нейронов входного слоя, равне количеству признаков
# ds.outdim -- количество нейронов выходного слоя, равное количеству меток классов
# SoftmaxLayer -- функция активации, пригодная для решения задачи многоклассовой классификации

init_params = np.random.random(
    (len(net.params)
     ))  # Инициализируем веса сети для получения воспроизводимого результата
net._setParameters(init_params)

# Модуль настройки параметров pybrain использует модуль random; зафиксируем seed для получения воспроизводимого результата
np.random.seed(0)
trainer = BackpropTrainer(
    net, dataset=ds_train)  # Инициализируем модуль оптимизации
err_train, err_val = trainer.trainUntilConvergence(maxEpochs=MAX_EPOCHS)