def new_neuralnet(train_set): neural_net = NeuralNetwork() # Adicionando a camada de input no camada 0 neural_net.camadas.append( Layer(False, train_set.shape[1], train_set.shape[1])) neural_net.functions.append(identidade) neural_net.derivatives.append(identidade) # Adicionando a camada escondida com 342 neurônios na camada 1 neural_net.camadas.append(Layer(True, train_set.shape[1], 342)) neural_net.functions.append(relu) neural_net.derivatives.append(reluDerivative) # Adicionando a camada escondida com 180 neurônios na camada 2 neural_net.camadas.append(Layer(True, 342, 180)) neural_net.functions.append(relu) neural_net.derivatives.append(reluDerivative) # Adicionando a camada de saída com 10 neurônios na camada 3 neural_net.camadas.append(Layer(True, 180, 10)) neural_net.functions.append(softmax) neural_net.derivatives.append(softmax_derivative) return neural_net
def new_neuralnet(train_set): neural_softmax = NeuralNetwork() # Adicionando a camada de input no indice 0 neural_softmax.camadas.append(Layer(False, train_set.shape[1], train_set.shape[1])) neural_softmax.functions.append(identidade) neural_softmax.derivatives.append(identidade) # Adicionando a camada de saída com 10 neurônios no índice 1 neural_softmax.camadas.append(Layer(True, train_set.shape[1], 10)) neural_softmax.functions.append(softmax) neural_softmax.derivatives.append(softmax_derivative) return neural_softmax
def generate_layers_linear_regression(): input_neuron = Neuron('input_neuron', sigmoid) output_neuron = Neuron('output_neuron', sigmoid) independ_neuron = Neuron('independ_neuron', sigmoid, initial=1) input_neurons = [input_neuron] input_synapses = [ Synapse(input_neuron, output_neuron, 1.0), ] input_layer = Layer('input_layer', input_neurons, input_synapses) input_layer.set_linked_synapses() layer_1_neurons = [independ_neuron] layer_1_synapses = [ Synapse(independ_neuron, output_neuron, 1.0), ] layer_1 = Layer('layer_1', layer_1_neurons, layer_1_synapses) layer_1.set_linked_synapses() output_neurons = [output_neuron] output_synapses = [ Synapse(output_neuron, output_neuron, 1.0), ] output_layer = Layer('output_layer', output_neurons, output_synapses) output_layer.set_linked_synapses() all_layers = [input_layer, layer_1, output_layer] return all_layers
TODO: use GUI to generate input file """ import numpy as np import matplotlib.pyplot as plt from LayerClass import Layer from ExperimentClass import Experiment from Functions import SFCalc, MakeSequence, Post _OutputFile = "../Projects/test1" _iter = 10 # SAMPLE DEFINITION # Sample = [vacuum , Top Layer, ..., Substrate] Sample = [Layer(), Layer(), Layer(), Layer()] # Sample[i].TakeValues[["Element"], nmultilayer, # Sample.density, # Sample.thick, # Sample.roughness, # Sample.MMC, # Sample.phi, # SAmple.gamma] Sample[1].TakeValues([ "W", 10, [0.05, 0.0653, .12], [5., 10., 20.], [0, 0, 2], [0, 0, 1], [0, 0, 90], [0, 0, 90] ]) Sample[2].TakeValues([ "Si", 10, [0.02, 0.05, .12], [0, 40, 60], [0, 0, 2], [0, 0, 1], [0, 0, 90],
def generate_layers_xor(): input_neuron_1 = Neuron('input_neuron_1', linear) input_neuron_2 = Neuron('input_neuron_2', linear) neuron_1_1 = Neuron('neuron_1_1', sigmoid) neuron_1_2 = Neuron('neuron_1_2', sigmoid) output_neuron = Neuron('output_neuron', sigmoid) input_neurons = [input_neuron_1, input_neuron_2] input_synapses = [ Synapse(input_neuron_1, neuron_1_1, 0.45), Synapse(input_neuron_1, neuron_1_2, 0.78), Synapse(input_neuron_2, neuron_1_1, -0.12), Synapse(input_neuron_2, neuron_1_2, 0.13), ] input_layer = Layer('input_layer', input_neurons, input_synapses) input_layer.set_linked_synapses() layer_1_neurons = [ neuron_1_1, neuron_1_2, ] layer_1_synapses = [ Synapse(neuron_1_1, output_neuron, 1.5), Synapse(neuron_1_2, output_neuron, -2.3), ] layer_1 = Layer('layer_1', layer_1_neurons, layer_1_synapses) layer_1.set_linked_synapses() output_neurons = [output_neuron] output_synapses = [ Synapse(output_neuron, output_neuron, 1.0), ] output_layer = Layer('output_layer', output_neurons, output_synapses) output_layer.set_linked_synapses() all_layers = [input_layer, layer_1, output_layer] return all_layers