Example #1
0
 def _step(self, inp):
     self.outs = []
     for i in range(self.max_iter):
         self.s = np.sum(self.np['w'] * inp, axis=1)
         self.s += self.np['b']
         out = self.transf(self.s)
         if i > 0 and np.abs(out - inp).sum() <= self.delta:
             break
         self.outs.append(out)
         inp = out
     return out
Example #2
0
def initnw(layer):
    """
    Nguyen-Widrow initialization function

    """
    ci = layer.ci
    cn = layer.cn
    w_fix = 0.7 * cn ** (1. / ci)
    w_rand = np.random.rand(cn, ci) * 2 - 1
    # Normalize
    if ci == 1:
        w_rand = w_rand / np.abs(w_rand)
    else:
        w_rand = w_rand * np.sqrt(1. / np.square(w_rand).sum(axis=1).reshape(cn, 1))

    w = w_fix * w_rand
    b = np.array([0]) if cn == 1 else w_fix * np.linspace(-1, 1, cn) * np.sign(w[:, 0])

    # Scaleble to inp_active
    amin, amax  = layer.transf.inp_active
    amin = -1 if amin == -np.Inf else amin
    amax = 1 if amax == np.Inf else amax

    x = 0.5 * (amax - amin)
    y = 0.5 * (amax + amin)
    w = x * w
    b = x * b + y

    # Scaleble to inp_minmax
    minmax = layer.inp_minmax.copy()
    minmax[np.isneginf(minmax)] = -1
    minmax[np.isinf(minmax)] = 1

    x = 2. / (minmax[:, 1] - minmax[:, 0])
    y = 1. - minmax[:, 1] * x
    w = w * x

    b += np.dot(w, y)
    layer.np['w'][:] = w
    layer.np['b'][:] = b

    return
Example #3
0
 def __call__(self, e):
     v = np.sum(np.abs(e)) / e.size
     return v
Example #4
0
 def __call__(self, e):
     v = np.sum(np.abs(e))
     return v