Beispiel #1
0
import numpy as np
from nn_mlp import BackPropogationNetwork,npA 

if __name__ == '__main__':
    import matplotlib
    import matplotlib.pyplot as plt

    print "Learning the sin function"
    lFuncs = [None, BackPropogationNetwork.sgm, BackPropogationNetwork.linear]

    bpn = BackPropogationNetwork( (1,4,1), lFuncs, 0.4)

    samples = np.zeros(20, dtype=[('x',  float, 1), ('y', float, 1)])
    samples['x'] = np.linspace(0,1,20)
    samples['y'] = np.sin(samples['x']*np.pi)

    input  = np.array([samples['x']]).T
    target = np.array([samples['y']]).T
    max = 100000
    #lnErr = 1e-5
    lnErr = 1e-2

    for i in range(max+1):
        err = 0.0;
        for ii in range(len(input)):
            err += bpn.trainEpoch(npA(input[ii]),npA(target[ii]))
        if i%2500 == 0:
        #if i%20 == 0:
            print "iteration {0}\tError : {1:0.6f}".format(i,err)
            #bpn.NN_cout("Train_"+str(i))
        if err <= lnErr:
Beispiel #2
0
    x.append(i*width+xmin)
  return tuple(x)

def hist1dDraw(v1,bins):
  P.hist(v1,bins,histtype='step')
  #P.hist(vv, bins, normed=1, histtype='step', cumulative=True)
  P.plot()
  P.show()

if __name__ == '__main__':
    import matplotlib
    import matplotlib.pyplot as plt

    lFuncs = [None, BackPropogationNetwork.sgm, BackPropogationNetwork.linear]

    bpn = BackPropogationNetwork( (2,4,1), lFuncs, 0.4)

    samples = np.zeros(50, dtype=[('x',  float, 1),('x2',  float, 1), ('y', float, 1)])
    samples['x'] = np.random.normal(40., 5., 50)
    samples['x2'] = np.random.normal(40., 5., 50)
    samples['y'] = np.ones(50)

    samples2 = np.zeros(50, dtype=[('x',  float, 1),('x2',  float, 1), ('y', float, 1)])
    samples2['x'] = np.random.normal(40., 50., 50)
    samples2['x2'] = np.random.normal(40., 50., 50)
    samples2['y'] = np.zeros(50)

    #samples['x'] += generate1dvalues(50,10,20)
    #samples['y'] += np.zeros(50)