def init_layers(self, input_nodes, total_hidden_nodes_list, output_nodes, *recurrent_mods): """ This function initializes the layers. The variables: * input_nodes: the number of nodes in the input layer * total_hidden_nodes_list: a list of numbers of nodes in the hidden layer. For example, [5, 3] * output_nodes: the number of nodes in the output layer The initial network is created, and then a series of modifications can be made to enable recurrent features. recurrent_mods are configurations for modifications to the neural network that is created within init_layers. For example, if init_layers(input_nodes, total_hidden_nodes_list, output_nodes, ElmanSimpleRecurrent()) was used, then the initial network structure of input, hidden, and output nodes would be created. After that, the additional copy or context nodes that would automatically transfer values from the lowest hidden layer would be added to the input layer. More than one recurrent scheme can be applied, each one adding to the existing network. """ self.layers = [] # Input layer layer = Layer(len(self.layers), NODE_INPUT) layer.add_nodes(input_nodes, NODE_INPUT) layer.add_node(BiasNode()) self.layers.append(layer) self.input_layer = layer for hid in total_hidden_nodes_list: layer = Layer(len(self.layers), NODE_HIDDEN) layer.add_nodes(hid, NODE_HIDDEN) layer.add_node(BiasNode()) self.layers.append(layer) layer = Layer(len(self.layers), NODE_OUTPUT) layer.add_nodes(output_nodes, NODE_OUTPUT) self.layers.append(layer) self.output_layer = layer self._init_connections() for recurrent_mod in recurrent_mods: recurrent_mod.apply_config(self)
def test_add_node(self): layer = Layer(0, 'input') layer.default_activation_type = 'linear' node = Node() layer.add_node(node) self.assertEqual(1, layer.total_nodes()) self.assertEqual(0, layer.nodes[0].node_no) self.assertEqual('linear', layer.nodes[0].get_activation_type()) layer.default_activation_type = 'sigmoid' node = Node() layer.add_node(node) self.assertEqual(2, layer.total_nodes()) self.assertEqual(1, layer.nodes[1].node_no) self.assertEqual('sigmoid', layer.nodes[1].get_activation_type()) node = BiasNode() layer.add_node(node) self.assertEqual(3, layer.total_nodes()) self.assertEqual(2, layer.nodes[2].node_no) node = Node() node.set_activation_type('tanh') layer.add_node(node) self.assertEqual('tanh', layer.nodes[3].get_activation_type())
def test_total_nodes(self): layer = Layer(0, 'input') layer.add_nodes(2, 'input') layer.add_nodes(2, 'copy') layer.add_node(BiasNode()) self.assertEqual(5, layer.total_nodes()) self.assertEqual(2, layer.total_nodes('input')) self.assertEqual(2, layer.total_nodes('copy')) self.assertEqual(0, layer.total_nodes('hidden'))
class BiasNodeTest(unittest.TestCase): """ Tests BiasNode """ def setUp(self): self.node = BiasNode() def test__init__(self): self.assertEqual(NODE_BIAS, self.node.node_type) self.assertEqual(1.0, self.node._value) self.assertEqual(1.0, self.node._activated) def test_activate(self): self.assertEqual(1.0, self.node.activate()) def test_error_func(self): # should always be 1.0 self.assertEqual(1.0, self.node.error_func(.3))
def setUp(self): self.node = BiasNode()
def _parse_inputfile_node(config, node_id): """ This function receives a node id, parses it, and returns the node in the network to which it pertains. It implies that the network structure must already be in place for it to be functional. """ activation_type = config.get(node_id, 'activation_type') node_type = config.get(node_id, 'node_type') if node_type == NODE_BIAS: node = BiasNode() elif node_type == NODE_COPY: node = CopyNode() node._source_type = config.get(node_id, 'source_type') node._incoming_weight = float( config.get(node_id, 'incoming_weight')) node._existing_weight = float( config.get(node_id, 'existing_weight')) node.set_activation_type(activation_type) else: node = Node() node.set_activation_type(activation_type) node.node_type = node_type return node
def _parse_inputfile_node(config, node_id): """ This function receives a node id, parses it, and returns the node in the network to which it pertains. It implies that the network structure must already be in place for it to be functional. """ activation_type = config.get(node_id, 'activation_type') node_type = config.get(node_id, 'node_type') if node_type == NODE_BIAS: node = BiasNode() elif node_type == NODE_COPY: node = CopyNode() node._source_type = config.get(node_id, 'source_type') node._incoming_weight = float(config.get(node_id, 'incoming_weight')) node._existing_weight = float(config.get(node_id, 'existing_weight')) node.set_activation_type(activation_type) else: node = Node() node.set_activation_type(activation_type) node.node_type = node_type return node