Exemplo n.º 1
0
 def __init__(self, ci, cn, co, property):
     self.ci = ci
     self.cn = cn
     self.co = co
     self.np = {}
     for p, shape in property.items():
         self.np[p] = np.empty(shape)
     self.inp = np.zeros(ci)
     self.out = np.zeros(co)
     # Property must be change when init Layer
     self.out_minmax = np.empty([self.co, 2])
     # Property will be change when init Net
     self.inp_minmax = np.empty([self.ci, 2])
     self.initf = None
Exemplo n.º 2
0
def ff_grad(net, input, target):
    """
    Calc and accumulate gradient with backpropogete method,
    for feed-forward neuran networks on each step

    :Parametrs:
        net: Net
            Feed-forward network
        input: array, shape = N,net.ci
            Input array
        target: array, shape = N,net.co
            Train target
        deriv: callable
            Derivative of error function
    :Returns:
        grad: list of dict
            Dradient of net for each layer,
            format:[{'w':..., 'b':...},{'w':..., 'b':...},...]
        grad_flat: array
            All neurons propertys in 1 array (reference of grad)
        output: array
            output of network

    """
    grad_flat = np.zeros(np_size(net))
    grad = []
    st = 0
    for i, l in enumerate(net.layers):
        grad.append({})
        for k, v in l.np.items():
            grad[i][k] = grad_flat[st: st + v.size]
            grad[i][k] = grad[i][k].reshape(v.shape) #replacement for "            grad[i][k].shape = v.shape"
            st += v.size
    output = np.empty(shape = (len(target),)+net.out.shape)
    i = 0
    for inp, tar in zip(input, target):
        out = net.step(inp)
        ff_grad_step(net, out, tar, grad)
        #import sys, traceback
        #traceback.print_stack()
        #raise(ValueError("Exit 1 for live {0}".format(grad_flat)))
        output[i] = out
        i += 1

    #Dirty hack
    st = 0
    for i, l in enumerate(net.layers):
        for k, v in l.np.items():
            grad_flat[st: st + v.size] = grad[i][k].reshape(v.size)
            st += v.size
    return grad, grad_flat, output
Exemplo n.º 3
0
def np_get_ref(net):
    """
    Get all network parameters in one array as referance
    Change array -> change networks

    :Example:
    >>> import neurolab as nl
    >>> net = nl.net.newff([[-1, 1]], [3, 1])
    >>> x = np_get_ref(net)
    >>> x.fill(10)
    >>> net.layers[0].np['w'].tolist()
    [[10.0], [10.0], [10.0]]

    """
    size = np_size(net)
    x = np.empty(size)
    st = 0
    for l in net.layers:
        for k, v in l.np.items():
            x[st: st + v.size] = v.flatten()
            l.np[k] = x[st: st + v.size]
            l.np[k] = l.np[k].reshape(v.shape) #replacement for "            l.np[k].shape = v.shape"
            st += v.size
    return x