Beispiel #1
0
 def _step(self, inp):
     self.outs = []
     for i in range(self.max_iter):
         self.s = np.sum(self.np['w'] * inp, axis=1)
         self.s += self.np['b']
         out = self.transf(self.s)
         if i > 0 and np.abs(out - inp).sum() <= self.delta:
             break
         self.outs.append(out)
         inp = out
     return out
Beispiel #2
0
def euclidean(A, B):
    """
    Euclidean distance function.
    See scipi.spatial.distance.cdist()

    :Example:
        >>> from neurolab import mynp  as np
        >>> euclidean(np.array([0,0]), np.array([[0,1], [0, 5.5]])).tolist()
        [1.0, 5.5]

    """
    return np.sqrt(np.sum(np.square(A-B) ,axis=1))
Beispiel #3
0
def ff_grad_step(net, out, tar, grad=None):
    """
    Calc gradient with backpropogete method,
    for feed-forward neuran networks on each step

    :Parametrs:
        net: Net
            Feed-forward network
        inp: array, size = net.ci
            Input array
        tar: array, size = net.co
            Train target
        deriv: callable
            Derivative of error function
        grad:list of dict default(None)
            Grad on previous step
    :Returns:
        grad: list of dict
            Gradient of net for each layer,
            format:[{'w':..., 'b':...},{'w':..., 'b':...},...]

    """
    delt = [None] * len(net.layers)
    if grad is None:
        grad = []
        for i, l in enumerate(net.layers):
            grad.append({})
            for k, v in l.np.items():
                grad[i][k] = np.zeros(v.shape)
    e = out - tar
    # for output layer
    ln = len(net.layers) - 1
    layer = net.layers[ln]
    delt[ln] = net.errorf.deriv(e) * layer.transf.deriv(layer.s, out)
    delt[ln] = delt[ln].reshape((delt[ln].size, 1,)) #replacement for "    delt[ln].shape = delt[ln].size, 1"
    grad[ln]['w'] += delt[ln] * layer.inp
    grad[ln]['b'] += delt[ln].reshape(delt[ln].size)

    bp = range(len(net.layers) -2, -1, -1)
    for ln in bp:
        layer = net.layers[ln]
        next = ln + 1

        dS = np.sum(net.layers[next].np['w'] * delt[next], axis=0)
        delt[ln] = dS * layer.transf.deriv(layer.s, layer.out)
        delt[ln] = delt[ln].reshape((delt[ln].size, 1,)) #replacement for "        delt[ln].shape = delt[ln].size, 1"

        grad[ln]['w'] += delt[ln] * layer.inp
        grad[ln]['b'] += delt[ln].reshape(delt[ln].size)
    return grad
Beispiel #4
0
def newhop(target, transf=None, max_init=10, delta=0):
    """
    Create a Hopfield recurrent network

    :Parameters:
        target: array like (l x net.co)
            train target patterns
        transf: func (default HardLims)
            Activation function
        max_init: int (default 10)
            Maximum of recurent iterations
        delta: float (default 0)
            Minimum diference between 2 outputs for stop reccurent cycle
    :Returns:
        net: Net
    :Example:
        >>> net = newhem([[-1, -1, -1], [1, -1, 1]])
        >>> output = net.sim([[-1, 1, -1], [1, -1, 1]])

    """

    target = np.asfarray(target)
    assert target.ndim == 2

    ci = len(target[0])
    if transf is None:
        transf = trans.HardLims()
    l = layer.Reccurent(ci, ci, transf, max_init, delta)
    w = l.np['w']
    b = l.np['b']

    # init weight
    for i in range(ci):
        for j in range(ci):
            if i == j:
                w[i, j] = 0.0
            else:
                w[i, j] = np.sum(target[:, i] * target[:, j]) / ci
        b[i] = 0.0
    l.initf = None

    minmax = transf.out_minmax if hasattr(transf, 'out_minmax') else [-1, 1]

    net = Net([minmax] * ci, ci, [l], [[-1], [0]], None, None)
    return net
Beispiel #5
0
def newhop_old(target, transf=None):
    """
    Create a Hopfield recurrent network.

    Old version need tool.simhop for use.
    Will be removed in future versions.

    :Parameters:
        target: array like (l x net.co)
            train target patterns
        transf: func (default HardLims)
            Activation function
    :Returns:
        net: Net
    :Example:
        >>> from neurolab.tool import simhop
        >>> net = newhop_old([[-1, 1, -1], [1, -1, 1]])
        >>> output = simhop(net, [[-1, 1, -1], [1, -1, 1]])
    """

    target = np.asfarray(target)
    ci = len(target[0])
    if transf is None:
        transf = trans.HardLims()
    l = layer.Perceptron(ci, ci, transf)
    w = l.np['w']
    b = l.np['b']

    # init weight
    for i in range(ci):
        for j in range(ci):
            if i == j:
                w[i, j] = 0.0
            else:
                w[i, j] = np.sum(target[:, i] * target[:, j]) / ci
        b[i] = 0.0
    l.initf = None

    minmax = transf.out_minmax if hasattr(transf, 'out_minmax') else [-1, 1]

    net = Net([minmax] * ci, ci, [l], [[0], [0]], None, None)
    return net
Beispiel #6
0
 def _step(self, inp):
     self.s = np.sum(self.np['w'] * inp, axis=1)
     self.s += self.np['b']
     return self.transf(self.s)
Beispiel #7
0
 def __call__(self, e):
     v = 0.5 * np.sum(np.square(e))
     return v
Beispiel #8
0
 def __call__(self, e):
     N = e.size
     v =  np.sum(np.square(e)) / N
     return v
Beispiel #9
0
 def __call__(self, e):
     v = np.sum(np.abs(e)) / e.size
     return v
Beispiel #10
0
 def __call__(self, e):
     v = np.sum(np.abs(e))
     return v