Example #1
0
    def __init__(self,
                 input_size,
                 hidden_sizes,
                 output_size,
                 memory_size,
                 previous_memory_size,
                 previous_hidden_size,
                 device,
                 batch_size,
                 feed_mem=False,
                 type_A=True,
                 out_activation=None,
                 orthogonal=False):
        '''
        To use a LMN with a single layer functional component pass as hidden_sizes a list of one element e.g.: [10]
        In case of multi layer functional component only the last layer is used to compute the memory state.

        :param hidden_sizes: a list containing hidden sizes for the layers of the functional component
        :param type_A: True to use LMN-A, False to use LMN-B. Default True.
        :feed_mem: True if previous memory has to be fed to the current memory. False otherwise. Default True.
        '''

        super(LMNModule, self).__init__()

        self.memory_size = memory_size
        self.previous_hidden_size = previous_hidden_size
        self.previous_memory_size = previous_memory_size
        self.input_size_f = input_size + self.memory_size + self.previous_hidden_size + self.previous_memory_size
        self.hidden_sizes = hidden_sizes
        self.output_size = output_size
        self.type_A = type_A
        self.device = device
        self.batch_size = batch_size
        self.current_h_f = None
        self.out_activation = out_activation

        self.mem_connection = 0 if feed_mem is False else self.previous_memory_size
        if self.type_A:
            self.functional = MLP(self.input_size_f,
                                  self.hidden_sizes,
                                  self.output_size,
                                  out_activation=self.out_activation).to(
                                      self.device)
            self.memory = MemoryModule(self.memory_size,
                                       self.hidden_sizes[-1],
                                       prev_mem_size=self.mem_connection).to(
                                           self.device)
        else:
            self.functional = MLP(self.input_size_f,
                                  self.hidden_sizes).to(self.device)
            self.memory = MemoryModule(self.memory_size,
                                       self.hidden_sizes[-1],
                                       prev_mem_size=self.mem_connection,
                                       output_size=self.output_size,
                                       out_activation=self.out_activation).to(
                                           self.device)

        if orthogonal:
            nn.init.orthogonal_(self.memory.linear_memory.weight)
Example #2
0
    def __init__(self,
                 input_size,
                 hidden_sizes,
                 output_size,
                 memory_size,
                 device,
                 batch_size,
                 type_A=True,
                 out_activation=None,
                 orthogonal=True):
        '''
        To use a LMN with a single layer functional component pass as hidden_sizes a list of one element e.g.: [10]
        In case of multi layer functional component only the last layer is used to compute the memory state.

        :param hidden_sizes: a list containing hidden sizes for the layers of the functional component
        :param type_A: True to use LMN-A, False to use LMN-B. Default True.
        '''

        super(LMN, self).__init__()

        self.memory_size = memory_size
        self.input_size_f = input_size + self.memory_size
        self.hidden_sizes = hidden_sizes
        self.output_size = output_size
        self.type_A = type_A
        self.device = device
        self.batch_size = batch_size

        self.current_h_f = None

        if self.type_A:
            self.functional = MLP(self.input_size_f,
                                  self.hidden_sizes,
                                  self.output_size,
                                  out_activation=out_activation).to(
                                      self.device)
            self.memory = Memory(self.memory_size,
                                 self.hidden_sizes[-1]).to(self.device)
        else:
            self.functional = MLP(self.input_size_f,
                                  self.hidden_sizes).to(self.device)
            self.memory = Memory(self.memory_size,
                                 self.hidden_sizes[-1],
                                 self.output_size,
                                 out_activation=out_activation).to(self.device)

        if orthogonal:
            nn.init.orthogonal_(self.memory.linear_memory.weight)
Example #3
0
from core.MLP import MLP
from random import shuffle

EPOCHS = 2000

mlp = MLP(2, 4, 1, 0.3)

dataset = [([0, 0], [0]), ([0, 1], [1]), ([1, 0], [1]), ([1, 1], [0])]

for i in range(EPOCHS):
    erroAproxEpoca = 0
    erroClassEpoca = 0
    data = dataset
    shuffle(data)
    for sample in data:
        erro_aprox, erro_class = mlp.treinar(sample[0], sample[1])
        erroAproxEpoca += erro_aprox
        erroClassEpoca += erro_class
    print(
        f"Época {i + 1} \t| Erro aprox: { erroAproxEpoca } \t\t| Erro class: { erroClassEpoca } "
    )
Example #4
0
from core.MLP import MLP
from random import shuffle

EPOCHS = 2000

mlp = MLP(3, 5, 2, 0.3)

dataset = [
    ([ 0, 0, 0 ], [ 1, 1 ]),
    ([ 0, 0, 1 ], [ 0, 1 ]),
    ([ 0, 1, 0 ], [ 0, 1 ]),
    ([ 0, 1, 1 ], [ 0, 1 ]),
    ([ 1, 0, 0 ], [ 1, 0 ]),
    ([ 1, 0, 1 ], [ 0, 1 ]),
    ([ 1, 1, 0 ], [ 1, 0 ]),
    ([ 1, 1, 1 ], [ 0, 1 ])
]

for i in range ( EPOCHS ) :
    erroAproxEpoca = 0
    erroClassEpoca = 0
    data = dataset
    shuffle ( data )
    for sample in data :
        erro_aprox, erro_class = mlp.treinar ( sample[0], sample[1] )
        erroAproxEpoca += erro_aprox
        erroClassEpoca += erro_class
    print(f"Época {i + 1} \t\t| Erro aprox: { erroAproxEpoca } \t\t| Erro class: { erroClassEpoca } ")
Example #5
0
#!/usr/bin/env python3

from core.MLP import MLP
from random import shuffle
from core.data_prep.prepare_data import split_proportionally, filter_dataset

file_path = './data/breast-cancer-wisconsin/wdbc-norm.data'

data = filter_dataset ( file_path, format={ 'input_size' : 30 }, normalize=False)
shuffle ( data )
train_data, test_data = split_proportionally ( data, 0.8 )

mlp = MLP(30, 15, 1, 0.8)

epochs = 200
PLOT = False
decreases = 10
variable_ni = True

erros_class_graf = []
erros_aprox_graf = []
erros_class_graf_teste = []
erros_aprox_graf_teste = []

if not PLOT :
    print("Train dataset size: " + str(len(train_data)))
    print("Test dataset size.: " + str(len(test_data)))

plot_content = 'erroClassGraf,erroAproxGraf,erroClassGrafTeste,erroAproxGrafTeste\n'

if not PLOT :