示例#1
0
def test_constant_non_uniform_layers(non_uniform_layers):
    metric = spike_count.SpikeCount()

    for sublayer in non_uniform_layers.layers[0].layers:
        sublayer.output.values = np.array([1, 0, 0, 0])
    for sublayer in non_uniform_layers.layers[1:]:
        sublayer.output.values = np.array([1, 0, 0, 0])

    metric.next(non_uniform_layers)
    metric.next(non_uniform_layers)

    result = metric.compute()
    assert result.metric_name == 'spike_count'
    assert result.global_result == {0: 15, 2: 5}
    assert result.per_layer_result == [[{
        0: 3,
        2: 1
    }, {
        0: 3,
        2: 1
    }, {
        0: 3,
        2: 1
    }, {
        0: 3,
        2: 1
    }], {
        0: 3,
        2: 1
    }]
示例#2
0
def test_constant_density_layers(layers):
    metric = spike_count.SpikeCount()

    for sublayer in layers.layers:
        sublayer.output.values = np.array([1, 0, 0, 0])

    metric.next(layers)
    metric.next(layers)

    result = metric.compute()
    assert result.metric_name == 'spike_count'
    assert result.global_result == {0: 12, 2: 4}
    assert result.per_layer_result == [{
        0: 3,
        2: 1
    }, {
        0: 3,
        2: 1
    }, {
        0: 3,
        2: 1
    }, {
        0: 3,
        2: 1
    }]
示例#3
0
def test_constant_density_neurons(neurons):
    metric = spike_count.SpikeCount()

    neurons.output.values = np.array([1, 0, 0, 0])
    metric.next(neurons)
    metric.next(neurons)

    result = metric.compute()
    assert result.metric_name == 'spike_count'
    assert result.global_result == {0: 3, 2: 1}
    assert result.per_layer_result == {0: 3, 2: 1}
示例#4
0
def test_changing_density_non_uniform_layers(non_uniform_layers):
    metric = spike_count.SpikeCount()

    for sublayer in non_uniform_layers.layers[0].layers:
        sublayer.output.values = np.array([1, 0, 0, 0])
    for sublayer in non_uniform_layers.layers[1:]:
        sublayer.output.values = np.array([1, 1, 0, 0])
    metric.next(non_uniform_layers)

    for sublayer in non_uniform_layers.layers[0].layers:
        sublayer.output.values = np.array([1, 0, 1, 0])
    for sublayer in non_uniform_layers.layers[1:]:
        sublayer.output.values = np.array([1, 1, 0, 1])
    metric.next(non_uniform_layers)

    result = metric.compute()
    assert result.metric_name == 'spike_count'
    assert result.global_result == {0: 9, 1: 5, 2: 6}
    assert result.per_layer_result == [[{
        0: 2,
        1: 1,
        2: 1
    }, {
        0: 2,
        1: 1,
        2: 1
    }, {
        0: 2,
        1: 1,
        2: 1
    }, {
        0: 2,
        1: 1,
        2: 1
    }], {
        0: 1,
        1: 1,
        2: 2
    }]
示例#5
0
import numpy as np
from protobrain import brain
from protobrain import neuron
from protobrain import sensor
from protobrain import computation
from protobrain import learning
from protobrain.encoders import numerical
from protobrain.metrics import benchmark
from protobrain.metrics import spike_count
from protobrain.metrics import spike_density

if __name__ == '__main__':
    metrics = [
        # spike_density.SpikeDensity(),
        spike_count.SpikeCount(),
    ]

    cycles = 5
    max_val = 100
    sample_inputs = [i % max_val for i in range(max_val * cycles)]

    def create_sensor(dim):
        return sensor.Sensor(numerical.CyclicEncoder(0, max_val, dim))

    def create_brain(architecture,
                     sensor_dim,
                     computation=None,
                     learning=None,
                     random_seed=0):
        np.random.seed(random_seed)
        cortex = neuron.FeedForward([neuron.Neurons(i) for i in architecture])
示例#6
0
def test_no_timesteps(neurons):
    metric = spike_count.SpikeCount()

    with pytest.raises(RuntimeError):
        result = metric.compute()