def test_hidden():
    net = RecurrentNet(
        n_inputs=1,
        n_hidden=1,
        n_outputs=1,
        input_to_hidden=([(0, 0)], [1.0]),
        hidden_to_hidden=([], []),
        output_to_hidden=([], []),
        input_to_output=([], []),
        hidden_to_output=([(0, 0)], [1.0]),
        output_to_output=([], []),
        hidden_responses=[1.0],
        output_responses=[1.0],
        hidden_biases=[0],
        output_biases=[0],
        use_current_activs=True,
    )

    result = net.activate([[0.2]])
    assert result.shape == (1, 1)
    assert_almost_equal(net.activs[0, 0], 0.731, 0.001)
    assert_almost_equal(net.outputs[0, 0], 0.975, 0.001)
    assert result[0, 0] == net.outputs[0, 0]

    result = net.activate([[0.4]])
    assert result.shape == (1, 1)
    assert_almost_equal(net.activs[0, 0], 0.881, 0.001)
    assert_almost_equal(net.outputs[0, 0], 0.988, 0.001)
    assert result[0, 0] == net.outputs[0, 0]
def test_unconnected():
    net = RecurrentNet(
        n_inputs=1,
        n_hidden=0,
        n_outputs=1,
        input_to_hidden=([], []),
        hidden_to_hidden=([], []),
        output_to_hidden=([], []),
        input_to_output=([], []),
        hidden_to_output=([], []),
        output_to_output=([], []),
        hidden_responses=[],
        output_responses=[1.0],
        hidden_biases=[],
        output_biases=[0],
    )

    result = net.activate([[0.2]])
    assert result.shape == (1, 1)
    assert_almost_equal(net.outputs[0, 0], 0.5, 0.001)
    assert result[0, 0] == net.outputs[0, 0]

    result = net.activate([[0.4]])
    assert result.shape == (1, 1)
    assert_almost_equal(net.outputs[0, 0], 0.5, 0.001)
    assert result[0, 0] == net.outputs[0, 0]
    def create_phenotype_network_nd(self, filename=None):
        rnn_params = self.es_hyperneat_nd_tensors()

        return RecurrentNet(n_inputs=rnn_params["n_inputs"],
                            n_outputs=rnn_params["n_outputs"],
                            n_hidden=rnn_params["n_hidden"],
                            output_to_hidden=rnn_params["output_to_hidden"],
                            output_to_output=rnn_params["output_to_output"],
                            hidden_to_hidden=rnn_params["hidden_to_hidden"],
                            input_to_hidden=rnn_params["input_to_hidden"],
                            input_to_output=rnn_params["input_to_output"],
                            hidden_to_output=rnn_params["hidden_to_output"],
                            hidden_responses=rnn_params["hidden_responses"],
                            output_responses=rnn_params["output_responses"],
                            hidden_biases=rnn_params["hidden_biases"],
                            output_biases=rnn_params["output_biases"])
def test_match_neat():
    with open("tests/test-genome.pkl", "rb") as f:
        genome = pickle.load(f)

    # use tanh since neat sets output nodes with no inputs to 0
    # (sigmoid would output 0.5 for us)
    def neat_tanh_activation(z):
        return float(torch.tanh(2.5 * torch.tensor(z, dtype=torch.float64)))

    for node in genome.nodes.values():
        node.response = 0.5

    config = neat.Config(
        neat.DefaultGenome,
        neat.DefaultReproduction,
        neat.DefaultSpeciesSet,
        neat.DefaultStagnation,
        "tests/test-config.cfg",
    )

    for _ in range(500):
        genome.mutate(config.genome_config)
        # print(genome)

        neat_net = neat.nn.RecurrentNetwork.create(genome, config)
        for i, (node, _activation, aggregation, bias, response,
                links) in enumerate(neat_net.node_evals):
            neat_net.node_evals[i] = (
                node,
                neat_tanh_activation,
                aggregation,
                bias,
                response,
                links,
            )

        torch_net = RecurrentNet.create(genome,
                                        config,
                                        activation=tanh_activation,
                                        prune_empty=True)

        for _ in range(5):
            inputs = np.random.randn(12)
            # print(inputs)
            neat_result = neat_net.activate(inputs)
            torch_result = torch_net.activate([inputs])[0].numpy()
            assert np.allclose(neat_result, torch_result, atol=1e-8)
Beispiel #5
0
    def create_phenotype_network_nd(self, filename=None):
        input_coordinates = self.substrate.input_coordinates
        output_coordinates = self.substrate.output_coordinates

        input_nodes = range(len(input_coordinates))
        output_nodes = range(len(input_nodes),
                             len(input_nodes) + len(output_coordinates))
        hidden_idx = len(input_coordinates) + len(output_coordinates)

        coordinates, indices, draw_connections, node_evals = [], [], [], []
        nodes = {}

        coordinates.extend(input_coordinates)
        coordinates.extend(output_coordinates)
        indices.extend(input_nodes)
        indices.extend(output_nodes)

        # Map input and output coordinates to their IDs.
        coords_to_id = dict(zip(coordinates, indices))

        # Where the magic happens.
        hidden_nodes, connections = self.es_hyperneat_nd()

        for cs in hidden_nodes:
            coords_to_id[cs] = hidden_idx
            hidden_idx += 1
        for cs, idx in coords_to_id.items():
            for c in connections:
                if c.coord2 == cs:
                    draw_connections.append(c)
                    if idx in nodes:
                        initial = nodes[idx]
                        initial.append((coords_to_id[c.coord1], c.weight))
                        nodes[idx] = initial
                    else:
                        nodes[idx] = [(coords_to_id[c.coord1], c.weight)]

        for idx, links in nodes.items():
            node_evals.append((idx, self.activation, sum, 0.0, 1.0, links))

        # Visualize the network?
        # if filename is not None:
        #     draw_es_nd(coords_to_id, draw_connections, filename)

        return RecurrentNet.create_from_es(input_nodes, output_nodes,
                                           node_evals)
 def create_phenotype_network_nd(self):
     rnn_params = self.es_hyperneat_nd_tensors()
     return RecurrentNet(
         n_inputs=rnn_params["n_inputs"],
         n_outputs=rnn_params["n_outputs"],
         n_hidden=rnn_params["n_hidden"],
         output_to_hidden=rnn_params["output_to_hidden"],
         output_to_output=rnn_params["output_to_output"],
         hidden_to_hidden=rnn_params["hidden_to_hidden"],
         input_to_hidden=rnn_params["input_to_hidden"],
         input_to_output=rnn_params["input_to_output"],
         hidden_to_output=rnn_params["hidden_to_output"],
         hidden_responses=rnn_params["hidden_responses"],
         output_responses=rnn_params["output_responses"],
         hidden_biases=rnn_params["hidden_biases"],
         output_biases=rnn_params["output_biases"],
         activation=str_to_activation[self.activation_string],
     )
Beispiel #7
0
def make_net(genome, config, bs):
    # TODO: get number of agents directly instead of magic number
    return RecurrentNet.create(genome, config, bs * 2)
Beispiel #8
0
def make_net(genome, config, bs):
    return RecurrentNet.create(genome, config, bs)
 def __init__(self, genome, config, batch_size=64):
     self._batch_size = batch_size
     self._net = RecurrentNet.create(genome, config, batch_size)