Beispiel #1
0
def BindsNET_cpu(n_neurons, time):
    t0 = t()

    torch.set_default_tensor_type("torch.FloatTensor")

    t1 = t()

    network = Network()
    network.add_layer(Input(n=n_neurons), name="X")
    network.add_layer(LIFNodes(n=n_neurons), name="Y")
    network.add_connection(
        Connection(source=network.layers["X"], target=network.layers["Y"]),
        source="X",
        target="Y",
    )

    data = {"X": poisson(datum=torch.rand(n_neurons), time=time)}
    network.run(inputs=data, time=time)

    return t() - t0, t() - t1
# Build network.
network = Network(dt=dt)

# Layers of neurons.
inpt = Input(shape=(80, 80), traces=True)  # Input layer
exc = LIFNodes(n=n_neurons, refrac=0, traces=True)  # Excitatory layer
readout = LIFNodes(n=16, refrac=0, traces=True)  # Readout layer
layers = {"X": inpt, "E": exc, "R": readout}

# Connections between layers.
# Input -> excitatory.
w = 0.01 * torch.rand(layers["X"].n, layers["E"].n)
input_exc_conn = Connection(
    source=layers["X"],
    target=layers["E"],
    w=0.01 * torch.rand(layers["X"].n, layers["E"].n),
    wmax=0.02,
    norm=0.01 * layers["X"].n,
)

# Excitatory -> readout.
exc_readout_conn = Connection(
    source=layers["E"],
    target=layers["R"],
    w=0.01 * torch.rand(layers["E"].n, layers["R"].n),
    update_rule=Hebbian,
    nu=[1e-2, 1e-2],
    norm=0.5 * layers["E"].n,
)

# Spike recordings for all layers.
Beispiel #3
0
    nu=[1e-4, 1e-2],
    wmax=1.0,
)

w = torch.zeros(n_filters, conv_size, conv_size, n_filters, conv_size,
                conv_size)
for fltr1 in range(n_filters):
    for fltr2 in range(n_filters):
        if fltr1 != fltr2:
            for i in range(conv_size):
                for j in range(conv_size):
                    w[fltr1, i, j, fltr2, i, j] = -100.0

w = w.view(n_filters * conv_size * conv_size,
           n_filters * conv_size * conv_size)
recurrent_conn = Connection(conv_layer, conv_layer, w=w)

network.add_layer(input_layer, name="X")
network.add_layer(conv_layer, name="Y")
network.add_connection(conv_conn, source="X", target="Y")
network.add_connection(recurrent_conn, source="Y", target="Y")

# Voltage recording for excitatory and inhibitory layers.
voltage_monitor = Monitor(network.layers["Y"], ["v"], time=time)
network.add_monitor(voltage_monitor, name="output_voltage")

if gpu:
    network.to("cuda")

# Load MNIST data.
train_dataset = MNIST(
Beispiel #4
0
from bindsnet_qa.encoding import bernoulli
from bindsnet_qa.network.topology import Connection
from bindsnet_qa.environment import GymEnvironment
from bindsnet_qa.network.nodes import Input, LIFNodes
from bindsnet_qa.pipeline.action import select_softmax

# Build network.
network = Network(dt=1.0)

# Layers of neurons.
inpt = Input(n=80 * 80, shape=[80, 80], traces=True)
middle = LIFNodes(n=100, traces=True)
out = LIFNodes(n=4, refrac=0, traces=True)

# Connections between layers.
inpt_middle = Connection(source=inpt, target=middle, wmin=0, wmax=1e-1)
middle_out = Connection(source=middle, target=out, wmin=0, wmax=1)

# Add all layers and connections to the network.
network.add_layer(inpt, name="Input Layer")
network.add_layer(middle, name="Hidden Layer")
network.add_layer(out, name="Output Layer")
network.add_connection(inpt_middle,
                       source="Input Layer",
                       target="Hidden Layer")
network.add_connection(middle_out,
                       source="Hidden Layer",
                       target="Output Layer")

# Load the Breakout environment.
environment = GymEnvironment("BreakoutDeterministic-v4")
# Sets up Gpu use
if gpu and torch.cuda.is_available():
    torch.cuda.set_device(device_id)
    # torch.set_default_tensor_type('torch.cuda.FloatTensor')
else:
    torch.manual_seed(seed)

network = Network(dt=dt)
inpt = Input(784, shape=(1, 28, 28))
network.add_layer(inpt, name="I")
output = LIFNodes(n_neurons,
                  thresh=-52 + np.random.randn(n_neurons).astype(float))
network.add_layer(output, name="O")
C1 = Connection(source=inpt,
                target=output,
                w=0.5 * torch.randn(inpt.n, output.n))
C2 = Connection(source=output,
                target=output,
                w=0.5 * torch.randn(output.n, output.n))

network.add_connection(C1, source="I", target="O")
network.add_connection(C2, source="O", target="O")

spikes = {}
for l in network.layers:
    spikes[l] = Monitor(network.layers[l], ["s"], time=time)
    network.add_monitor(spikes[l], name="%s_spikes" % l)

voltages = {"O": Monitor(network.layers["O"], ["v"], time=time)}
network.add_monitor(voltages["O"], name="O_voltages")
Beispiel #6
0
from bindsnet_qa.encoding import bernoulli
from bindsnet_qa.network.topology import Connection
from bindsnet_qa.environment import GymEnvironment
from bindsnet_qa.network.nodes import Input, LIFNodes
from bindsnet_qa.pipeline.action import select_softmax

# Build network.
network = Network(dt=1.0)

# Layers of neurons.
inpt = Input(n=80 * 80, shape=[80, 80], traces=True)
middle = LIFNodes(n=100, traces=True)
out = LIFNodes(n=4, refrac=0, traces=True)

# Connections between layers.
inpt_middle = Connection(source=inpt, target=middle, wmin=0, wmax=1e-1)
middle_out = Connection(
    source=middle,
    target=out,
    wmin=0,
    wmax=1,
    update_rule=MSTDP,
    nu=1e-1,
    norm=0.5 * middle.n,
)

# Add all layers and connections to the network.
network.add_layer(inpt, name="Input Layer")
network.add_layer(middle, name="Hidden Layer")
network.add_layer(out, name="Output Layer")
network.add_connection(inpt_middle,