示例#1
0
    def __init__(self, tau_plus=20.0, tau_minus=20.0, A_plus=0.01, A_minus=0.01, w_min=0.0, w_max=1.0):

        parameters="""
            tau_plus = %(tau_plus)s : postsynaptic
            tau_minus = %(tau_minus)s : postsynaptic
            A_plus = %(A_plus)s : postsynaptic
            A_minus = %(A_minus)s : postsynaptic
            w_min = %(w_min)s : postsynaptic
            w_max = %(w_max)s : postsynaptic
        """ % {'tau_plus': tau_plus, 'tau_minus':tau_minus, 'A_plus':A_plus, 'A_minus': A_minus, 'w_min': w_min, 'w_max': w_max}

        equations = """
            tau_plus  * dx/dt = -x : event-driven
            tau_minus * dy/dt = -y : event-driven
        """
        pre_spike="""
            g_target += w
            x += A_plus * w_max
            w = clip(w + y, w_min , w_max)
        """          
        post_spike="""
            y -= A_minus * w_max
            w = clip(w + x, w_min , w_max)
        """

        Synapse.__init__(self, parameters=parameters, equations=equations, pre_spike=pre_spike, post_spike=post_spike,
            name="Spike-timing dependent plasticity", description="Synapse exhibiting spike-timing dependent plasticity.")
        # For reporting
        self._instantiated.append(True)
示例#2
0
    def __init__(self, tau_rec=100.0, tau_facil=0.01, U=0.5):

        if tau_facil<= 0.0:
            _error('tau_facil must be positive. Choose a very small value if you have to, or derive a new synapse.')
            exit(0)

        parameters = """
    tau_rec = %(tau_rec)s
    tau_facil = %(tau_facil)s
    U = %(U)s
    """ % {'tau_rec': tau_rec, 'tau_facil': tau_facil, 'U': U}
        equations = """
    dx/dt = (1 - x)/tau_rec : init = 1.0, event-driven
    du/dt = (U - u)/tau_facil : init = %(U)s, event-driven   
    """ % {'tau_rec': tau_rec, 'tau_facil': tau_facil, 'U': U}
        pre_spike="""
    g_target += w * u * x
    x *= (1 - u)
    u += U * (1 - u)
    """

        Synapse.__init__(self, parameters=parameters, equations=equations, pre_spike=pre_spike,
            name="Short-term plasticity", description="Synapse exhibiting short-term facilitation and depression, implemented using the model of Tsodyks, Markram et al.")
        # For reporting
        self._instantiated.append(True)
示例#3
0
 def __init__(self):
     Synapse.__init__(self,
                      equations="",
                      psp="w * pre.r",
                      name="-",
                      description="Weighted sum of firing rates.")
     # For reporting
     self._instantiated.append(True)
示例#4
0
 def __init__(self):
     Synapse.__init__(self, 
         pre_spike = "g_target += w",
         name="Event-driven synapse", 
         description="Increases the post-synaptic conductance from the synaptic efficency after each pre-synaptic spike."
     )
     # For reporting
     self._instantiated.append(True)
示例#5
0
 def __init__(self, psp, operation):
     Synapse.__init__(self, 
         psp=psp, operation=operation,
         name="Shared Weight", 
         description="Weight shared over all synapses of the projection."
     )
     # For reporting
     self._instantiated.append(True)
示例#6
0
 def __init__(self):
     Synapse.__init__(self, 
         equations="",
         psp="w * pre.r",
         name="Static weight", 
         description="Standard weighted sum of firing rates."
     )
     # For reporting
     self._instantiated.append(True)
示例#7
0
 def __init__(self, psp, operation):
     Synapse.__init__(
         self,
         psp=psp,
         operation=operation,
         name="Shared Weight",
         description="Weight shared over all synapses of the projection.")
     # For reporting
     self._instantiated.append(True)
示例#8
0
 def __init__(self):
     Synapse.__init__(
         self,
         pre_spike="g_target += w",
         name="Event-driven synapse",
         description=
         "Increases the post-synaptic conductance from the synaptic efficency after each pre-synaptic spike."
     )
     # For reporting
     self._instantiated.append(True)
示例#9
0
    def __init__(self, eta=0.01):

        parameters = """
    eta = %(eta)s
    """ % {'eta': eta}

        equations = """
    dw/dt = eta * pre.r * post.r : min=0.0, explicit 
    """

        Synapse.__init__(self, parameters=parameters, equations=equations,
            name="Hebbian Plasticity", description="Simple Hebbian learning rule")
        # For reporting
        self._instantiated.append(True)
示例#10
0
    def __init__(self,
                 tau_plus=20.0,
                 tau_minus=20.0,
                 A_plus=0.01,
                 A_minus=0.01,
                 w_min=0.0,
                 w_max=1.0):

        parameters = """
            tau_plus = %(tau_plus)s : projection
            tau_minus = %(tau_minus)s : projection
            A_plus = %(A_plus)s : projection
            A_minus = %(A_minus)s : projection
            w_min = %(w_min)s : projection
            w_max = %(w_max)s : projection
        """ % {
            'tau_plus': tau_plus,
            'tau_minus': tau_minus,
            'A_plus': A_plus,
            'A_minus': A_minus,
            'w_min': w_min,
            'w_max': w_max
        }

        equations = """
            tau_plus  * dx/dt = -x : event-driven
            tau_minus * dy/dt = -y : event-driven
        """
        pre_spike = """
            g_target += w
            x += A_plus * w_max
            w = clip(w + y, w_min , w_max)
        """
        post_spike = """
            y -= A_minus * w_max
            w = clip(w + x, w_min , w_max)
        """

        Synapse.__init__(
            self,
            parameters=parameters,
            equations=equations,
            pre_spike=pre_spike,
            post_spike=post_spike,
            name="Spike-timing dependent plasticity",
            description="Synapse exhibiting spike-timing dependent plasticity."
        )
        # For reporting
        self._instantiated.append(True)
示例#11
0
    def __init__(self, eta=0.01, alpha=1.0):

        parameters = """
    eta = %(eta)s
    alpha = %(alpha)s
    """ % {'eta': eta, 'alpha': alpha}

        equations = """
    dw/dt = eta * ( pre.r * post.r - alpha * post.r^2 * w ) : min=0.0, explicit 
    """

        Synapse.__init__(self, parameters=parameters, equations=equations,
            name="Oja plasticity", description="Regularized Hebbian learning rule.")
        # For reporting
        self._instantiated.append(True)
示例#12
0
    def __init__(self, eta=0.01):

        parameters = """
    eta = %(eta)s
    tau = %(tau)s
    """ % {'eta': eta, 'tau': tau}

        equations = """
        tau * dtheta/dt + theta = post.r^2 : postsynaptic, exponential
        dw/dt = eta * post.r * (post.r - theta) * pre.r : min=0.0, explicit
    """

        Synapse.__init__(self, parameters=parameters, equations=equations,
            name="IBCM", description="Intrator and Cooper (1992) learning rule.")
        # For reporting
        self._instantiated.append(True)
示例#13
0
文件: Utils.py 项目: vitay/ANNarchy
    def __init__(
            self,
            psp,
            operation,
            name="Shared synapse",
            description="Weight shared over all synapses of the projection."):
        """
        """
        # Shared synapses are non-plastic.
        Synapse.__init__(self,
                         psp=psp,
                         operation=operation,
                         name=name,
                         description=description)

        # For reporting
        self._instantiated.append(True)
示例#14
0
    def __init__(self, eta=0.01):

        parameters = """
    eta = %(eta)s : projection
    """ % {'eta': eta}

        equations = """
    dw/dt = eta * pre.r * post.r : min=0.0, explicit 
    """

        Synapse.__init__(self,
                         parameters=parameters,
                         equations=equations,
                         name="Hebbian Plasticity",
                         description="Simple Hebbian learning rule")
        # For reporting
        self._instantiated.append(True)
示例#15
0
    def __init__(self, tau_rec=100.0, tau_facil=0.01, U=0.5):

        if tau_facil <= 0.0:
            _error(
                'tau_facil must be positive. Choose a very small value if you have to, or derive a new synapse.'
            )
            exit(0)

        parameters = """
    tau_rec = %(tau_rec)s
    tau_facil = %(tau_facil)s
    U = %(U)s
    """ % {
            'tau_rec': tau_rec,
            'tau_facil': tau_facil,
            'U': U
        }
        equations = """
    dx/dt = (1 - x)/tau_rec : init = 1.0, event-driven
    du/dt = (U - u)/tau_facil : init = %(U)s, event-driven   
    """ % {
            'tau_rec': tau_rec,
            'tau_facil': tau_facil,
            'U': U
        }
        pre_spike = """
    g_target += w * u * x
    x *= (1 - u)
    u += U * (1 - u)
    """

        Synapse.__init__(
            self,
            parameters=parameters,
            equations=equations,
            pre_spike=pre_spike,
            name="Short-term plasticity",
            description=
            "Synapse exhibiting short-term facilitation and depression, implemented using the model of Tsodyks, Markram et al."
        )
        # For reporting
        self._instantiated.append(True)
示例#16
0
    def __init__(self, eta=0.01, alpha=1.0):

        parameters = """
    eta = %(eta)s : projection
    alpha = %(alpha)s : projection
    """ % {
            'eta': eta,
            'alpha': alpha
        }

        equations = """
    dw/dt = eta * ( pre.r * post.r - alpha * post.r^2 * w ) : min=0.0, explicit 
    """

        Synapse.__init__(self,
                         parameters=parameters,
                         equations=equations,
                         name="Oja plasticity",
                         description="Regularized Hebbian learning rule.")
        # For reporting
        self._instantiated.append(True)
示例#17
0
    def __init__(self, eta=0.01, tau=2000.0):

        parameters = """
    eta = %(eta)s : projection
    tau = %(tau)s : projection
    """ % {
            'eta': eta,
            'tau': tau
        }

        equations = """
        tau * dtheta/dt + theta = post.r^2 : postsynaptic, exponential
        dw/dt = eta * post.r * (post.r - theta) * pre.r : min=0.0, explicit
    """

        Synapse.__init__(
            self,
            parameters=parameters,
            equations=equations,
            name="IBCM",
            description="Intrator and Cooper (1992) learning rule.")
        # For reporting
        self._instantiated.append(True)