def init_layers(self, input_nodes, total_hidden_nodes_list, output_nodes, *recurrent_mods): """ This function initializes the layers. The variables: * input_nodes: the number of nodes in the input layer * total_hidden_nodes_list: a list of numbers of nodes in the hidden layer. For example, [5, 3] * output_nodes: the number of nodes in the output layer The initial network is created, and then a series of modifications can be made to enable recurrent features. recurrent_mods are configurations for modifications to the neural network that is created within init_layers. For example, if init_layers(input_nodes, total_hidden_nodes_list, output_nodes, ElmanSimpleRecurrent()) was used, then the initial network structure of input, hidden, and output nodes would be created. After that, the additional copy or context nodes that would automatically transfer values from the lowest hidden layer would be added to the input layer. More than one recurrent scheme can be applied, each one adding to the existing network. """ self.layers = [] # Input layer layer = Layer(len(self.layers), NODE_INPUT) layer.add_nodes(input_nodes, NODE_INPUT) layer.add_node(BiasNode()) self.layers.append(layer) self.input_layer = layer for hid in total_hidden_nodes_list: layer = Layer(len(self.layers), NODE_HIDDEN) layer.add_nodes(hid, NODE_HIDDEN) layer.add_node(BiasNode()) self.layers.append(layer) layer = Layer(len(self.layers), NODE_OUTPUT) layer.add_nodes(output_nodes, NODE_OUTPUT) self.layers.append(layer) self.output_layer = layer self._init_connections() for recurrent_mod in recurrent_mods: recurrent_mod.apply_config(self)
def test_total_nodes(self): layer = Layer(0, 'input') layer.add_nodes(2, 'input') layer.add_nodes(2, 'copy') layer.add_node(BiasNode()) self.assertEqual(5, layer.total_nodes()) self.assertEqual(2, layer.total_nodes('input')) self.assertEqual(2, layer.total_nodes('copy')) self.assertEqual(0, layer.total_nodes('hidden'))
def test_add_node(self): layer = Layer(0, 'input') layer.default_activation_type = 'linear' node = Node() layer.add_node(node) self.assertEqual(1, layer.total_nodes()) self.assertEqual(0, layer.nodes[0].node_no) self.assertEqual('linear', layer.nodes[0].get_activation_type()) layer.default_activation_type = 'sigmoid' node = Node() layer.add_node(node) self.assertEqual(2, layer.total_nodes()) self.assertEqual(1, layer.nodes[1].node_no) self.assertEqual('sigmoid', layer.nodes[1].get_activation_type()) node = BiasNode() layer.add_node(node) self.assertEqual(3, layer.total_nodes()) self.assertEqual(2, layer.nodes[2].node_no) node = Node() node.set_activation_type('tanh') layer.add_node(node) self.assertEqual('tanh', layer.nodes[3].get_activation_type())
def _parse_inputfile_layer(self, config, layer_no): """ This function loads a layer and nodes from the input file. Note that it does not load the connections for those nodes here, waiting until all the nodes are fully instantiated. This is because the connection objects have nodes as part of the object. """ layer_id = 'layer %s' % (layer_no) layer_nodes = config.get(layer_id, 'nodes').split(" ") layer_type = config.get(layer_id, 'layer_type') layer = Layer(layer_no, layer_type) for node_id in layer_nodes: node = self._parse_inputfile_node(config, node_id) layer.add_node(node) self.layers.append(layer)