def simulate(seed, dt):
    """
    simulate 10 progressive recall of the original
    pattern at different dt according to the STDP 
    rule found experimentally


    """
    ncells = 3000 # number of cells
    c = 0.5       # connection probability 
    a = 0.1       # activity of a pattern
    m = 50        # number of patterns to store
    g1 = 0.433    # slope of inhibitory component
    
    W = topology(ncells, c, seed)
    Z = Pattern(ncells, a)
    Y = generate(Z, m)

    J = clipped_Hebbian(Y, W)
    J = J*delta_t(dt)


    overlap = np.empty(10)
    X = Y[0] # initial pattern

    for i in range(10):
        h = np.inner(J.T, X)/float(ncells)
        spk_avg = np.mean(X)
        X = ( h < g1*spk_avg ).choose(1,0)
        overlap[i] = get_overlap(Z, X)

    return(overlap)
    def __init__(self, n, c, a, m, g1, seed=None):
        """
        Create the connectivity matrix W and generate the 
        matrix of synaptic weigths J according to
        Gibson & Robinson 1992
        
        Arguments:
        n       -- number of cells
        c       -- connection probability
        a       -- activity of a pattern
        m       -- number of patterns to store
        g1      -- slope of the inhibitory component

        """
        self.n = n
        self.c = c
        self.a = a
        self.m = m
        self.g1 = g1

        # this can be only return by getters
        self.W = None
        self.Z = Pattern(self.n, self.a)
        self.Y = generate(self.Z, self.m)
        self.J = None

        # self.seed = seed
        self.__call__(seed) # will update self.seed
def create_synaptic_weights(seed):
    """
    create a matrix of synaptic weigths according to
    the clipped_Hebbian rule describied in Gibson & Robinson 1992

    returns the matrix of connectivity, the original pattern and
    the matrix of synaptic weights
    """

    W = topology(ncells, c, seed)
    Z = Pattern(ncells, a)
    Y = generate(Z, m)

    J = clipped_Hebbian(Y,W)
    return(W, Z, Y, J)
def create_synaptic_weights(seed):
    """
    simulate 10 progressive recalls of a partial 
    pattern at different dt according to the STDP 
    rule found experimentally

    Returns:
    The connectivity matrix 
    The original pattern to be recall
    The matrix of synaptic weights
    """
    
    W = topology(ncells, c, seed)
    Z = Pattern(ncells, a)
    Y = generate(Z, m)

    J = clipped_Hebbian(Y, W)
    return(W, Z, Y, J)
Beispiel #5
0
# =========================================================================

from network import topology
from firings import Pattern
from firings import generate, get_valid, get_spurious, get_overlap
from plasticity import clipped_Hebbian
from plots import raster_plot

import numpy as np
import sys

myseed = int(sys.argv[1])  # read the first argument of the command
ncells = 3000
W = topology(n=ncells, c=0.5, seed=myseed)
Z = Pattern(n=ncells, a=0.1)
Y = generate(Z, m=50)

J = clipped_Hebbian(Y, W)  # 8.93 s


# Recall
X = Y[0]  # The initial state is exactly the same as the initial pattern

nrecall = 6
spikes = np.empty((nrecall, ncells))

X = Y[0]  # The initial state is exactly the same as the initial pattern
for i in range(6):  # progressive recall in 6 steps

    h = np.inner(J.T, X) / float(ncells)  # 86.8 ms per loop
    print("recall [%d]:" % i),
Beispiel #6
0
"""
test_clipped.py

To test the clipped_Hebbian rule
"""

import numpy as np
from firings import Pattern, generate
from plasticity import clipped_Hebbian

W = np.array(([0,1,0,0,0],
              [0,0,1,0,0],
              [0,0,0,1,0],
              [0,0,0,0,1],
              [1,0,1,0,0]),dtype=int)

Z = Pattern(n=5, a=.6)
Y = generate(Z, m=3)

J = clipped_Hebbian(Y,W)
print(J)