Exemple #1
0
def simhop(net, input, n=10):
    """
    Simuate hopfied network

	OLD VERSION, now you may use newhop new with native sim method
	This function may be deleted in future (use newhop(...).sim())

    :Parameters:
        net: Net
            Simulated recurrent neural network like Hopfield (newhop_old only)
        input: array like (N x net.ci)
            Train input patterns
        n: int (default 10)
            Maximum number of simulated steps

    :Return:
        output: array
            Network outputs
        full_output: list of array
            Network outputs, including the intermediate results
    :Exmamle:
        >>> from .net import newhop_old
        >>> target = [[-1, -1, -1], [1, -1, 1]]
        >>> net = newhop_old(target)
        >>> simhop(net, target)[0]
        array([[-1., -1., -1.],
               [ 1., -1.,  1.]])

    """

    input = np.asfarray(input)

    assert input.ndim == 2
    assert input.shape[1] == net.layers[-1].co
    assert input.shape[1] == net.ci

    output = []
    for inp in input:
        net.layers[-1].out = inp
        out = []
        for i in range(n):
            o = net.step(inp)
            if i>0 and np.all(out[-1] == o):
                break
            out.append(o)
        output.append(np.array(out))
    return np.array([r[-1] for r in output]), output
Exemple #2
0
    def __init__(self, ci, cn, distf=None):
        Layer.__init__(self, ci, cn, cn, {'w': (cn, ci), 'conscience': cn})
        self.transf = trans.Competitive()
        self.initf = init.midpoint
        self.out_minmax[:] = np.array([self.transf.out_minmax] * cn)
        self.np['conscience'].fill(1.0)

        self.distf = euclidean
Exemple #3
0
 def __init__(self, ci, cn, transf, max_iter, delta):
     Layer.__init__(self, ci, cn, cn, {'w': (cn, ci), 'b': cn})
     self.max_iter = max_iter
     self.delta = delta
     self.transf = transf
     self.outs = []
     if not hasattr(transf, 'out_minmax'):
         test = np.asfarry([-1e100, -100, -10, -1, 0, 1, 10, 100, 1e100])
         val = self.transf(test)
         self.out_minmax = np.array([val.min(), val.max()] * self.co)
     else:
         self.out_minmax = np.asfarray([transf.out_minmax] * self.co)
     self.initf = None
     self.s = np.zeros(self.cn)
Exemple #4
0
    def __init__(self, ci, cn, transf):

        Layer.__init__(self, ci, cn, cn, {'w': (cn, ci), 'b': cn})

        self.transf = transf
        if not hasattr(transf, 'out_minmax'):
            test = np.asfarry([-1e100, -100, -10, -1, 0, 1, 10, 100, 1e100])
            val = self.transf(test)
            self.out_minmax = np.array([val.min(), val.max()] * self.co)
        else:
            self.out_minmax = np.asfarray([transf.out_minmax] * self.co)
        # default init function
        self.initf = init.initwb_reg
        #self.initf = init.initwb_nw
        self.s = np.zeros(self.cn)
Exemple #5
0
def initnw(layer):
    """
    Nguyen-Widrow initialization function

    """
    ci = layer.ci
    cn = layer.cn
    w_fix = 0.7 * cn ** (1. / ci)
    w_rand = np.random.rand(cn, ci) * 2 - 1
    # Normalize
    if ci == 1:
        w_rand = w_rand / np.abs(w_rand)
    else:
        w_rand = w_rand * np.sqrt(1. / np.square(w_rand).sum(axis=1).reshape(cn, 1))

    w = w_fix * w_rand
    b = np.array([0]) if cn == 1 else w_fix * np.linspace(-1, 1, cn) * np.sign(w[:, 0])

    # Scaleble to inp_active
    amin, amax  = layer.transf.inp_active
    amin = -1 if amin == -np.Inf else amin
    amax = 1 if amax == np.Inf else amax

    x = 0.5 * (amax - amin)
    y = 0.5 * (amax + amin)
    w = x * w
    b = x * b + y

    # Scaleble to inp_minmax
    minmax = layer.inp_minmax.copy()
    minmax[np.isneginf(minmax)] = -1
    minmax[np.isinf(minmax)] = 1

    x = 2. / (minmax[:, 1] - minmax[:, 0])
    y = 1. - minmax[:, 1] * x
    w = w * x

    b += np.dot(w, y)
    layer.np['w'][:] = w
    layer.np['b'][:] = b

    return