Esempio n. 1
0
	def create(user, category_id, network_hr=None, dataset_hr=None, network_dy=None, dataset_dy=None):
		res = CategoryAdvisor()

		res.user = user
		res.category_id = category_id

		if network_hr is None:
			res.bprnetw_hr = buildNetwork(CategoryAdvisor.OBSERVE_LENGTH, 20, CategoryAdvisor.PREDICT_LENGTH, outclass=LinearLayer, bias=True, recurrent=True)
		else:
			res.bprnetw_hr = network_hr

		if dataset_hr is None:
			res.dataset_hr = SupervisedDataSet(CategoryAdvisor.OBSERVE_LENGTH, CategoryAdvisor.PREDICT_LENGTH)
		else:
			res.dataset_hr = dataset_hr

		if network_dy is None:
			res.bprnetw_dy = buildNetwork(CategoryAdvisor.OBSERVE_LENGTH, 20, CategoryAdvisor.PREDICT_LENGTH, outclass=LinearLayer, bias=True, recurrent=True)
		else:
			res.bprnetw_dy = network_dy

		if dataset_dy is None:
			res.dataset_dy = SupervisedDataSet(CategoryAdvisor.OBSERVE_LENGTH, CategoryAdvisor.PREDICT_LENGTH)
		else:
			res.dataset_dy = dataset_dy

		return res
Esempio n. 2
0
def cycle():
        lerns = SupervisedDataSet(1024, 2)
        do_R = input_image.analyse()
        for i in range():
            adress = "learning_data/folder"+n
            data = do_R.transform_in_data(adress)
            lerns.addSample((data), (1,0))
        return lerns
Esempio n. 3
0
    def __init__(self, filename=None):
        SupervisedDataSet.__init__(self, 0, 0)

        self.nCls = 0
        self.nSamples = 0
        self.classHist = {}
        self.filename = ''
        if filename is not None:
            self.loadData(filename)
Esempio n. 4
0
    def _setDataFields(self, x, y):
        if not len(x): raise Exception("no input data found")
        SupervisedDataSet.__init__(self, len(x[0]), 1)
        self.setField('input', x)
        self.setField('target', y)

        flat_labels = list(self.getField('target').flatten())
        classes = list(set(flat_labels))
        self._classes = classes
        self.nClasses = len(classes)
        for class_ in classes:
            self.classHist[class_] = flat_labels.count(class_)
Esempio n. 5
0
    def __init__(self, unique_words, total_comments, hidden=400):
        self._max_value = 0.9
        self._min_value = 0.1
        self.__unique_words = unique_words
        self.__total_comments = total_comments
        self.__conversion_rate = 0.5
        print("Total de Comentários: ", self.__total_comments)
        print("Total de Palavras Únicas: ", len(self.__unique_words))

        unique_words_length = len(self.__unique_words)
        # Construcao da rede com quantPalavrasUnicas na entradas, 1000 camadas ocultas e 1 sai­da
        self.__network = buildNetwork(unique_words_length, hidden, 1)
        # Base de dados com quantPalavrasUnicas atributos prevzisores e uma clase
        self.__base = SupervisedDataSet(unique_words_length, 1)
        '''
 def __init__(self, inp, target=1, nb_classes=0, class_labels=None):
     """Initialize an empty dataset. 
     
     `inp` is used to specify the dimensionality of the input. While the 
     number of targets is given by implicitly by the training samples, it can
     also be set explicity by `nb_classes`. To give the classes names, supply
     an iterable of strings as `class_labels`."""
     # FIXME: hard to keep nClasses synchronized if appendLinked() etc. is used.
     SupervisedDataSet.__init__(self, inp, target)
     self.addField('class', 1)
     self.nClasses = nb_classes
     if len(self) > 0:
         # calculate class histogram, if we already have data
         self.calculateStatistics()
     self.convertField('target', int)
     if class_labels is None:
         self.class_labels = list(set(self.getField('target').flatten()))
     else:
         self.class_labels = class_labels
     # copy classes (may be changed into other representation)
     self.setField('class', self.getField('target'))
Esempio n. 7
0
def buildAppropriateDataset(module):
    """ build a sequential dataset with 2 sequences of 3 samples, with arndom input and target values,
    but the appropriate dimensions to be used on the provided module. """
    if module.sequential:
        d = SequentialDataSet(module.indim, module.outdim)
        for dummy in range(2):
            d.newSequence()
            for dummy in range(3):
                d.addSample(randn(module.indim), randn(module.outdim))
    else:
        d = SupervisedDataSet(module.indim, module.outdim)
        for dummy in range(3):
            d.addSample(randn(module.indim), randn(module.outdim))
    return d
Esempio n. 8
0
def cycle():
    lerns = SupervisedDataSet(1024, 2)
    do_R = lb.analyse()
    for i in range(1, 104):
        adress = "folder 1/" + str(i) + ".jpg"
        data = do_R.transform_in_data(adress)
        lerns.addSample((data), (1, 0))

    for i in range(1, 106):
        adress = "folder 2/" + str(i) + ".jpg"
        data = do_R.transform_in_data(adress)
        lerns.addSample((data), (0, 0))

    for i in range(1, 75):
        adress = "folder 3/" + str(i) + ".jpg"
        data = do_R.transform_in_data(adress)
        lerns.addSample((data), (0, 1))

    for i in range(1, 66):
        adress = "folder 4/" + str(i) + ".jpg"
        data = do_R.transform_in_data(adress)
        lerns.addSample((data), (1, 1))
    return lerns
Esempio n. 9
0
class download_data_to_learn():
	
	def __init__(self):
		self.learnig_data_placement = os.path.dirname(os.path.realpath(__file__))+'/learnig_data/Folder'
		self.all_files = []
		self.imgs_arrays = [[],[],[],[]]
		self.weights = SupervisedDataSet(1024, 2)
		
	def Found_Learnig_Filles(self):
		for i in range(4):
			self.all_files.append(os.listdir(self.learnig_data_placement+str(i)))

	def add_learning_information(self,resultat_nember=0):
		img_adresse = self.all_files[resultat_nember]
		for image_name in img_adresse:
			img = self.binnarizing_img(self.learnig_data_placement+str(resultat_nember)+'/'+image_name)
			self.imgs_arrays[resultat_nember].append(np.asarray(img))

		for img_nember in range(len(self.all_files[resultat_nember])-1):	
			end_response = self.flatting_arrays(self.imgs_arrays[resultat_nember][img_nember].tolist())
			self.weights.addSample((end_response), self.tranlater(resultat_nember))

	def binnarizing_img(self, img_adresse):
		file = Image.open(img_adresse)
		img_convert = file.convert("L")
		data = np.asarray(img_convert)
		resultat = (data < 200) * 1
		return resultat
	
	def flatting_arrays(self, array):
		flatten = lambda l: [item for sublist in l for item in sublist]
		return flatten(array)

	def tranlater(self, x):
		binar_number = '{0:02b}'.format(x)
		return [int(i) for i in list(binar_number)]
 def castToRegression(self, values):
     """Converts data set into a SupervisedDataSet for regression. Classes
     are used as indices into the value array given."""
     regDs = SupervisedDataSet(self.indim, 1)
     fields = self.getFieldNames()
     fields.remove('target')
     for f in fields:
         regDs.setField(f, self[f])
     regDs.setField('target', values[self['class'].astype(int)])
     return regDs
def makeMnistDataSets(path):
    """Return a pair consisting of two datasets, the first being the training
    and the second being the test dataset."""
    test = SupervisedDataSet(28 * 28, 10)
    test_image_file = os.path.join(path, 't10k-images-idx3-ubyte')
    test_label_file = os.path.join(path, 't10k-labels-idx1-ubyte')
    test_images = images(test_image_file)
    test_labels = (flaggedArrayByIndex(l, 10) for l in labels(test_label_file))

    for image, label in zip(test_images, test_labels):
        test.addSample(image, label)

    train = SupervisedDataSet(28 * 28, 10)
    train_image_file = os.path.join(path, 'train-images-idx3-ubyte')
    train_label_file = os.path.join(path, 'train-labels-idx1-ubyte')
    train_images = images(train_image_file)
    train_labels = (flaggedArrayByIndex(l, 10)
                    for l in labels(train_label_file))
    for image, label in zip(train_images, train_labels):
        train.addSample(image, label)

    return train, test
from pybrain3.tools.shortcuts import buildNetwork  # Para criar a rede neural
from pybrain3.datasets import SupervisedDataSet  # Datasets/Conjunto de dados
from pybrain3.supervised.trainers import BackpropTrainer  # Algoritmo para treinamento

datasets = SupervisedDataSet(2, 1)  # Entradas e saídas

datasets.addSample(
    (0.8, 0.4), 0.7
)  # Quantidade de horas dormidas e quantidade de horas estudadas tendo tirado a nota
datasets.addSample((0.5, 0.7), 0.5)
datasets.addSample((0.1, 0.7), 0.95)

rede_neural = buildNetwork(
    2, 4, 1, bias=True
)  # Passada arquitetura da rede (2 neuronios na camada de entrada +
# 4 neuronio na camada oculta + 1 neuronio na camada de saída)

trainer = BackpropTrainer(rede_neural, datasets)  # Treinador

for i in range(2000):  # Treinando a rede neural 2000 vezes
    print(trainer.train())

while True:
    dormiu = float(input('Dormiu quanto tempo? '))
    estudou = float(input('Estudou quanto tempo? '))
    z = rede_neural.activate((dormiu, estudou))[0] * 10
    print(f'Precisão da nota: {z}')
Esempio n. 13
0
class NeuralNetwork:
    def __init__(self, unique_words, total_comments, hidden=400):
        self._max_value = 0.9
        self._min_value = 0.1
        self.__unique_words = unique_words
        self.__total_comments = total_comments
        self.__conversion_rate = 0.5
        print("Total de Comentários: ", self.__total_comments)
        print("Total de Palavras Únicas: ", len(self.__unique_words))

        unique_words_length = len(self.__unique_words)
        # Construcao da rede com quantPalavrasUnicas na entradas, 1000 camadas ocultas e 1 sai­da
        self.__network = buildNetwork(unique_words_length, hidden, 1)
        # Base de dados com quantPalavrasUnicas atributos prevzisores e uma clase
        self.__base = SupervisedDataSet(unique_words_length, 1)
        '''
        self.__network = buildNetwork(2, 3, 1, outclass = SoftmaxLayer,
                            hiddenclass = SigmoidLayer, bias = False)
        print(self.__network['in'])
        print(self.__network['hidden0'])
        print(self.__network['out'])
        print(self.__network['bias'])
        '''

    def float_round(self, number, close_to):
        """import math
        return math.isclose(float(number), close_to, abs_tol=0.45)"""
        if float(number) >= 0.5:
            return self._max_value == close_to
        else:
            return self._min_value == close_to

    def __add_training_set(self, training_base):

        # Adicionando os dados (Entrada), (Classe) para o treinamento
        for index in range(0, self.__total_comments):
            training_set_length = 2500  # int(self.__total_comments * 0.8)
            if index < training_set_length:
                array = []
                # ********************** TROCAR PELO ARRAY DE ENTRADAS *******************
                entry_array = training_base[index][0]

                if training_base[index][1] >= 3.5:
                    comment_class = self._max_value
                else:
                    comment_class = self._min_value

                    # print(entry_array, comment_class)
                for key in entry_array:
                    # print (entry_array[key])
                    array.append(entry_array[key])

                self.__base.addSample(array, comment_class)

        # Imprimir a entrada e a classe supervisionada
        # print(base['input'])
        # print(base['target'])

    def training_network(self, training_base, number_of_trainings=20):

        print("Start Training")
        self.__add_training_set(training_base)
        training = BackpropTrainer(self.__network,
                                   dataset=self.__base,
                                   learningrate=0.01,
                                   momentum=0.06)

        # Fazer o treinamento number_of_trainings vezes e mostrar o erro
        '''for count in range(0, number_of_trainings):
                print("Training Number %d" % count + 1)
                print("Error %s" % training.train())'''
        # above, use training with validation
        training.trainUntilConvergence(maxEpochs=number_of_trainings,
                                       verbose=True,
                                       validationProportion=0.25)

    def test_network(self, test_base):
        # self.__network = NetworkReader.readFrom('filename.xml')
        test_base_length = len(test_base)
        corrects = 0
        errors = 0
        # ******************* PASSAR OS COMENTÁRIOS PARA O TESTE *********************
        for index in range(0, test_base_length):
            array = []
            # ********************** TROCAR PELO ARRAY DE ENTRADAS *******************
            entry_array = test_base[index][0]

            if test_base[index][1] >= 3.5:
                comment_class = self._max_value
            else:
                comment_class = self._min_value

                # print(entry_array, comment_class)
            for key in entry_array:
                array.append(entry_array[key])
            try:
                found = self.__network.activate(array)
                if self.float_round(found, comment_class):
                    corrects += 1
                else:
                    errors += 1
                print(found, comment_class,
                      self.float_round(found, comment_class))
            except (AssertionError, IndexError) as error:
                print("Have an error message: %s" % error)
        print("%f%%" % ((corrects * 100) / (errors + corrects)))

    def save_network(self, location):
        NetworkWriter.writeToFile(self.__network, location)

    def load_network(self, location):
        self.__network = NetworkReader.readFrom(location)
Esempio n. 14
0
from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.datasets import SupervisedDataSet
from pybrain3.supervised.trainers import BackpropTrainer


ds = SupervisedDataSet(2, 1)

#aqui vai a nossa base de aprendizado
base = (
    #8 horas dormidas, 2 horas estudadas, 7.1 de nota na prova
    ((8, 2), (7.1,)),
    ((10, 1), (2.3,)),
    ((7.5, 3), (8.0,)),
    ((3.5, 10), (2.5)),
)

#aqui aplicamos os numeros usados como base de aprendizado acima
for example in base:
    ds.addSample(example[0], example[1])


#aqui criamos a rede neural com a base de aprendizado anterior
nn = buildNetwork(2, 4, 1) # 2=neurónios, 4=camadas ocultas, 1 = uma saida


#aqui definimos o treinador, informamos a rede neural e a base de aprendizado
trainer = BackpropTrainer(nn, ds)

#aqui vamos treinar a rede neural, quanto maior de treinamentos, menores serão as chances de erros
for i in range(10000):#melhorar a base de aprendizado é infinitamente mais eficiente que deixar esse numero alto
    print(trainer.train())
Esempio n. 15
0
import pickle
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt

#lectura de los datos con las caracteteristicas de cada imagen
data = []
archivo = 'caracteristicas con lata.csv'
datos =  pd.read_csv(archivo, header=0)

data2 = []
archivo2 = 'caracteristicas sin lata.csv'
datos2 =  pd.read_csv(archivo2, header=0)

#configuracion de la red neuronal (caracteristicas, capas de la red, salidas)
net = buildNetwork(7, 40, 2, bias=True)
ds = SupervisedDataSet(7, 2)

#lecctura de cada dato de la imagen
for j in range(0,len(datos)):
    data.clear()
    data2.clear()
    for i in range(0,7):
        data.append(datos.iloc[j,i])
        data2.append(datos2.iloc[j,i])
    #print('paso: ', data)
    ds.addSample((data), (1,))  #asignacion de caracteristicas y etiqueta
    ds.addSample((data2), (0,))
    #print('paso: ', j)
    #print(ds)
    trainer = BackpropTrainer(net, ds) #entrenamiento de la red mediante la comparacion del error esperado
    er = round(trainer.train(), 3)
from pybrain3.datasets import SupervisedDataSet

inputDataSet = SupervisedDataSet(35, 20)  # Creating new DataSet

# A
inputDataSet.addSample(
    (-1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
     -1, -1, 1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1),
    (1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

# B
inputDataSet.addSample(
    (1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1,
     -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1),
    (0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

# C
inputDataSet.addSample(
    (-1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1,
     1, -1, -1, -1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1),
    (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

# D
inputDataSet.addSample(
    (1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1,
     -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1),
    (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

# F
inputDataSet.addSample(
    (1, 1, 1, 1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1,
Esempio n. 17
0
from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.datasets import SupervisedDataSet
from pybrain3.supervised.trainers import BackpropTrainer

ds = SupervisedDataSet(2, 1)

ds.addSample((0.8, 0.4), (0.7))
ds.addSample((0.5, 0.7), (0.5))
ds.addSample((1.0, 0.8), (0.95))

#Utilizando "bias" para o algoritmo treinar mais rápido
nn = buildNetwork(2, 16, 1, bias=True)

#treinando o algoritmo
trainer = BackpropTrainer(nn, ds)

#analisando a evolução do algoritmo
for i in range(2000):
    print(trainer.train())

#com o buildNetwork utilizando 4 neurônios, temos 0.013496550372475475 de margem de erro
#com o buildNetwork utilizando 16 neurônios, temos 0.0009338957668739359 de margem de erro
#teste 1 com 512 neurônios: 1.6434602192104412e-32 de margem de erro
#teste 2 com 512 neurônios: 1.3702349577667055e-30 de margem de erro
#irei utilizar neste algoritmo apenas 16 neurônios

#alguns testes por curiosidade
while True:
    sleep = float(input("How many hours did you sleep? "))
    study = float(input("How many hours did you study? "))
Esempio n. 18
0
    [4.32506896e+02, 7.99685701e-01, 2.17952104e-04, 0.00000000e+00],
    [5.63802865e+02, 8.61732713e-01, 1.33318967e-04, 0.00000000e+00],
    [9.33106887e+02, 8.38514710e-01, 7.26942290e-05, 0.00000000e+00],
    [3.62702896e+01, 9.63936272e-01, 8.62667642e-04, 0.00000000e+00],
    [3.21061321e+02, 8.96041364e-01, 2.04813023e-04, 0.00000000e+00]
]
testFeaturesArray = np.array(testFeaturesArray, dtype=np.float64)
confusionMatrix = np.array(([[0, 0, 0], [0, 0, 0], [0, 0, 0]]), dtype=np.uint8)

height, width = trainFeaturesArray.shape

print(trainFeaturesArray[0, 0])
print((trainFeaturesArray[0, 0], trainFeaturesArray[0, 1],
       trainFeaturesArray[0, 2]), (0))
print((0.8, 0.4), (0.7))
ds = SupervisedDataSet(2, 1)
for i in range(height):
    if i < 25:
        ds.addSample(
            (float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1])),
            (0))
    elif i >= 25 and i < 50:
        ds.addSample(
            (float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1])),
            (1))
    else:
        ds.addSample(
            (float(trainFeaturesArray[i, 0]), float(trainFeaturesArray[i, 1])),
            (2))
# for i in range(height):
#     if i < 25:
Esempio n. 19
0
from pybrain3.datasets import SupervisedDataSet

inputDataSet = SupervisedDataSet(35, 1)         #Creating new DataSet

#A
inputDataSet.addSample((                        #Adding first sample to dataset
    -1, 1, 1, 1, -1,
    1, -1, -1, -1, 1,
    1, -1, -1, -1, 1,
    1, 1, 1, 1, 1,
    1, -1, -1, -1, 1,
    1, -1, -1, -1, 1,
    1, -1, -1, -1, 1
    ),
    1)

#B
inputDataSet.addSample((
    1, 1, 1, 1, -1,
    1, -1, -1, -1, 1,
    1, -1, -1, -1, 1,
    1, 1, 1, 1, -1,
    1, -1, -1, -1, 1,
    1, -1, -1, -1, 1,
    1, 1, 1, 1, -1
    ),
    1)

#C
inputDataSet.addSample((
    -1, 1, 1, 1, -1,
Esempio n. 20
0
    # cv2.waitKey()
    # cv2.destroyAllWindows()
    momentos = []
    momentos = cal_momentos(dila)
    momentos.append(area)
    momentos.append(perimetro)
    momentos.append(circularidad)
    momentos.append(media)
    momentos.append(des_stand)
    #print(momentos)

    return momentos


net = buildNetwork(29, 5, 1, bias=True)
ds = SupervisedDataSet(29, 1)
veredicto = ""

output = [[0], [1]]
Ninput = [21, 21]
puntos = []
benigno = [[535, 425, 197], [522, 280, 69], [477, 133, 30], [525, 425, 33],
           [471, 458, 40], [667, 365, 31], [595, 864, 68], [547, 573, 48],
           [653, 477, 49], [493, 125, 49], [674, 443, 79], [322, 676, 43],
           [388, 742, 66], [546, 463, 33], [462, 406, 44], [432, 149, 20],
           [492, 473, 131], [544, 194, 38], [680, 494, 20], [612, 297, 34],
           [714, 340, 23], [357, 365, 50], [600, 621, 111], [492, 434, 87],
           [191, 549, 23], [523, 551, 48], [252, 788, 52], [347, 636, 26],
           [669, 543, 49], [351, 661, 62]]
maligno = [[538, 681, 29], [338, 314, 56], [318, 359, 27], [266, 517, 28],
           [468, 717, 23], [510, 547, 49], [423, 662, 43], [415, 460, 38],
Esempio n. 21
0
            # plot mean on pcolor map
            gray()
            # (x, y, z) = map(lambda m: m.reshape(sqrt(len(m)), sqrt(len(m))), (self.testx[:,0], self.testx[:,1], self.pred_mean))
            m = floor(sqrt(len(self.pred_mean)))
            pcolor(self.pred_mean.reshape(m, m)[::-1, :])

        else:
            print("plotting only supported for indim=1 or indim=2.")


if __name__ == '__main__':

    from pylab import figure, show

    # --- example on how to use the GP in 1 dimension
    ds = SupervisedDataSet(1, 1)
    gp = GaussianProcess(indim=1, start=-3, stop=3, step=0.05)
    figure()

    x = mgrid[-3:3:0.2]
    y = 0.1 * x**2 + x + 1
    z = sin(x) + 0.5 * cos(y)

    ds.addSample(-2.5, -1)
    ds.addSample(-1.0, 3)
    gp.mean = 0

    # new feature "autonoise" adds uncertainty to data depending on
    # it's distance to other points in the dataset. not tested much yet.
    # gp.autonoise = True
Esempio n. 22
0
# Redes Neurais utilizando Pybrain

from pybrain3.datasets import SupervisedDataSet
from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.supervised import BackpropTrainer

# dimensões dos vetores de entrada e do objetivo
dataset = SupervisedDataSet(2, 1)

dataset.addSample([1, 1], [0])
dataset.addSample([1, 0], [1])
dataset.addSample([0, 1], [1])
dataset.addSample([0, 0], [0])

network = buildNetwork(dataset.indim, 4, dataset.outdim, bias=True)
trainer = BackpropTrainer(network, dataset, learningrate=0.01, momentum=0.99)
'''
for epoch in range(1000):
    trainer.train()
'''

trainer.trainEpochs(1000)
'''
    treinar até a convergência: trainer.trainUntilConvergence
'''

test_data = SupervisedDataSet(2, 1)

test_data.addSample([1, 1], [0])
test_data.addSample([1, 0], [1])
test_data.addSample([0, 1], [1])
Esempio n. 23
0
    return rounded, res, list(zip(classes, all_))


if __name__ == "__main__":
    time1 = time.time()
    authors = read_all(3)
    time2 = time.time()
    print(time2 - time1)
    max_ = max(map(lambda t: len(authors[t]), authors))
    max_len = max(
        map(lambda t: max(map(lambda t1: t1[1], authors[t])), authors))
    classes = [_ for _ in authors]
    L = len(classes)

    for author in authors:
        data = SupervisedDataSet(33 * 33 * 33, L)
        for text in authors[author]:
            arr = np.zeros(33 * 33 * 33, dtype='int8')
            for j in text[0]:
                arr[int(j)] = text[0][j]
            arr2 = np.zeros(L, dtype='int8')
            arr2[classes.index(author)] = 1
            data.addSample(arr, arr2)
            del arr, arr2
            print('!')
        data.saveToFile('cashes/data_3_' + str(author) + '.mod')
        del data
        print(author, 'constructed')
    print('data constructed')
    del authors
    net = buildNetwork(33 * 33 * 33, 10, 10, L, bias=True)
Esempio n. 24
0
            ind = i
    rounded = classes[ind]
    return rounded, res, list(zip(classes, all_))


if __name__ == "__main__":
    time1 = time.time()
    authors = read_all()
    time2 = time.time()
    print(time2 - time1)
    max_ = max(map(lambda t: len(authors[t]), authors))
    max_len = max(
        map(lambda t: max(map(lambda t1: t1[1], authors[t])), authors))
    classes = [_ for _ in authors]
    L = len(classes)
    ds = SupervisedDataSet(33 * 33, L)
    for author in authors:
        for text in authors[author]:
            arr = np.zeros(33 * 33)
            for j in text[0]:
                arr[int(j)] = text[0][j]
            for _ in range(
                    math.ceil(max_ / len(authors[author]) * 10 *
                              (text[1] / max_len))):
                arr2 = np.zeros(L)
                arr2[classes.index(author)] = 1
                ds.addSample(arr, arr2)

    net = buildNetwork(33 * 33, 10, 10, L, bias=True)
    trainer = RPropMinusTrainer(net, dataset=ds)
    trainer.trainEpochs(100)
Esempio n. 25
0
from pybrain3.datasets import SupervisedDataSet

daneWejsciowe = SupervisedDataSet(35, 20)

daneWejsciowe.addSample(
    (-1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
     -1, -1, 1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1),
    (1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

daneWejsciowe.addSample(
    (1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1,
     -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1),
    (0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

daneWejsciowe.addSample(
    (-1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1,
     1, -1, -1, -1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1),
    (0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

daneWejsciowe.addSample(
    (1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1,
     -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1),
    (0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

daneWejsciowe.addSample(
    (1, 1, 1, 1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, 1, 1, -1,
     -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, 1),
    (0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))

daneWejsciowe.addSample(
    (1, 1, 1, 1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, -1, -1, 1,
Esempio n. 26
0
	def __init__(self):
		self.learnig_data_placement = os.path.dirname(os.path.realpath(__file__))+'/learnig_data/Folder'
		self.all_files = []
		self.imgs_arrays = [[],[],[],[]]
		self.weights = SupervisedDataSet(1024, 2)
#!/usr/bin/python

from pybrain3.datasets import SupervisedDataSet

inputLettersDataSet = SupervisedDataSet(35, 1)

#A
inputLettersDataSet.addSample(
    (-1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
     -1, -1, 1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1), 1)

#B
inputLettersDataSet.addSample(
    (1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1, 1, -1,
     -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1), 1)

#C
inputLettersDataSet.addSample(
    (-1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1,
     1, -1, -1, -1, -1, 1, -1, -1, -1, 1, -1, 1, 1, 1, -1), 1)

#D
inputLettersDataSet.addSample(
    (1, 1, 1, 1, -1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, -1, -1, -1, 1, 1,
     -1, -1, -1, 1, 1, -1, -1, -1, 1, 1, 1, 1, 1, -1), 1)

#F
inputLettersDataSet.addSample(
    (1, 1, 1, 1, 1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, 1, 1, 1, -1, 1,
     -1, -1, -1, -1, 1, -1, -1, -1, -1, 1, -1, -1, -1, -1), 1)
        print(suavidad)

        oblicuidad = 0
        for i in range(0, len(hist)):
            oblicuidad = oblicuidad + ((hist[i] - media)**3)
        oblicuidad = oblicuidad / (len(hist) * (desv_estandar**3))
        print(oblicuidad)
        # areaInteres = regionInteres(img_limpia)
        # momentos=moments(areaInteres)
        #print(momentos)

        #-----------------------------Entrenamiento------------------------------
        net = buildNetwork(
            4, 10, 1,
            bias=True)  #Red neuronal. Num. Entradas,Capas ocultas, y salidas
        ds = SupervisedDataSet(4, 1)
        if j == 1:
            print("normales")
            ds.addSample((media, desv_estandar, suavidad, oblicuidad), (0, ))
        if j == 2:
            print("tumor")
            ds.addSample((media, desv_estandar, suavidad, oblicuidad), (1, ))

        trainer = BackpropTrainer(net, ds)

        error = round(trainer.train(), 7)

        while error > 0.15:  #Minimo error soportado
            error = round(trainer.train(), 7)
            #print(error)
Esempio n. 29
0
from pybrain3.tools.shortcuts import buildNetwork
from pybrain3.datasets import SupervisedDataSet
from pybrain3.supervised.trainers import BackpropTrainer

ds = SupervisedDataSet(2, 1)
#valor_inicial, numero_parcela, valor_final
ds.addSample((100, 1), (100))
ds.addSample((100, 2), (52.63))
ds.addSample((100, 3), (35.69))
ds.addSample((100, 4), (27.22))
ds.addSample((100, 5), (22.14))
ds.addSample((100, 6), (18.76))
ds.addSample((100, 7), (16.35))
ds.addSample((100, 8), (14.54))
ds.addSample((100, 9), (13.14))
ds.addSample((100, 10), (12.02))
ds.addSample((100, 11), (11.10))
ds.addSample((100, 12), (10.34))
'''
ds.addSample((200, 12), (20.68))
ds.addSample((6518.78, 1), (6518.78))
ds.addSample((6518.78, 3), (2326.33))
ds.addSample((6518.78, 5), (1443.38))
ds.addSample((6518.78, 8), (947.93))
ds.addSample((6518.78, 9), (856.47))
ds.addSample((6518.78, 10), (783.43))
ds.addSample((6518.78, 11), (723.79))
ds.addSample((6518.78, 12), (674.19))'''

nn = buildNetwork(2, 4, 1)