Пример #1
0
def get_activation_lrpmodule(activation_layer):
    layer_name = activation_layer.__name__
    activation_modules = {
        "linear": None,
        "relu": modules.Rect(),
        "softmax": modules.SoftMax(),
    }
    return activation_modules[layer_name]
def testNN2(runs=1,
            width=3,
            data="iris.txt",
            iters=20000,
            std=True,
            trainPct=0.666):
    if data == 'gauss':
        X, y = multimodalData(numModes=4, numPerMode=30)
        # X, y = sklearn.datasets.make_classification()
        XY = np.asarray(np.hstack([X, y.reshape((X.shape[0], 1))]))
    else:
        XY = data_io.read(data)
    nclass = len(set(XY[:, -1]))  # number of classes

    # y has nclass classes (0, ..., nclass-1)
    def unary(yi):
        return [(1 if i == yi else 0) for i in range(nclass)]

    # build a network
    u = width
    nn = modules.Sequential([
        modules.Linear(XY.shape[1] - 1, u),
        modules.Tanh(),
        modules.Linear(u, u),
        modules.Tanh(),
        modules.Linear(u, nclass),
        modules.SoftMax()
    ])
    results = {False: [], True: []}
    for run in range(runs):
        Xtrain, ytrain, Xtest, ytest = splitByClass(XY, trainPct)
        # Map into n softmax outputs
        Ytrain = np.array([unary(yi) for yi in ytrain])
        for rms in (False, True):
            # train the network.
            nn.clean()
            nn.train2(np.asarray(Xtrain),
                      np.asarray(Ytrain),
                      batchsize=1,
                      iters=iters,
                      lrate_decay_step=1000,
                      rms=rms,
                      momentum=(0.9 if rms else None))
            errors = predictionErrors(nn, Xtest, ytest)
            accuracy = 1.0 - (float(errors) / Xtest.shape[0])
            print 'RMS', rms, 'Prediction accuracy', accuracy
            results[rms].append(accuracy)
    print 'Results', results
    print 'Average accuracy', 'rms=False', sum(
        results[False]) / runs, 'rms=True', sum(results[True]) / runs,
Пример #3
0
import settings

# user init
batchsize = 10
numbIters = 10000

# load data
X, Y = tools.data_loader.load_data()

# setup neural network
nn = modules.Sequential([
    modules.Linear(32**2, 2),
    modules.NegAbs(),
    modules.BinStep(),
    modules.Linear(2, 2),
    modules.SoftMax()
])

# train neural network
nn.train(X['train'],
         Y['train'],
         Xval=X['valid'],
         Yval=Y['valid'],
         batchsize=batchsize,
         iters=numbIters)

# determine training name of neural net
nnName = 'nn_' + nn.name + ('_(batchsize_{}_number_iterations_{})'.format(
    batchsize, numbIters))

# save neural network
def testNN(iters=10000, width=3, learning_rates=[0.005], std=True):
    # Test cases
    # X, y = xor()
    # X, y = xor_more()
    X, y = multimodalData(numModes=4)
    # X, y = sklearn.datasets.make_moons(noise=0.25)
    # X, y = sklearn.datasets.make_classification()
    # y has 2 classes (0, 1)

    # Map into 2 softmax outputs
    nclass = len(set(listify(y)))

    def unary(yi):
        return [(1 if i == yi else 0) for i in range(nclass)]

    Y = np.array([unary(yi) for yi in y])
    #build a network
    nd = X.shape[1]  # number of features
    u = width
    nn = modules.Sequential([
        modules.Linear(nd, u),
        modules.Tanh(),
        modules.Linear(u, u),
        modules.Tanh(),
        modules.Linear(u, nclass),
        modules.SoftMax()
    ])

    # train the network.
    errors = []
    for lrate in learning_rates:
        nn.clean()
        # Default does not do learning rate decay or early stopping.
        #print ('X,Y,',X,Y)

        nn.train2(np.asarray(X),
                  np.asarray(Y),
                  batchsize=1,
                  iters=iters,
                  lrate=lrate,
                  lrate_decay_step=100000)
        errors.append((lrate, predictionErrors(nn, X, y)))
    print 'Errors for learning rates', errors

    # Plot the last result
    if nd > 2: return  # only plot two-feature cases
    eps = .1
    xmin = np.min(X[:, 0]) - eps
    xmax = np.max(X[:, 0]) + eps
    ymin = np.min(X[:, 1]) - eps
    ymax = np.max(X[:, 1]) + eps
    ax = tidyPlot(xmin, xmax, ymin, ymax, xlabel='x', ylabel='y')

    def fizz(x1, x2):
        y = nn.forward(np.array([x1, x2]))
        return y[0, 1]  # class 1

    res = 30  # resolution of plot
    ima = np.array([[fizz(xi, yi) for xi in np.linspace(xmin, xmax, res)] \
                                for yi in np.linspace(ymin, ymax, res)])
    im = ax.imshow(np.flipud(ima),
                   interpolation='none',
                   extent=[xmin, xmax, ymin, ymax],
                   cmap='viridis' if std else 'jet')
    plt.colorbar(im)
    if std:
        colors = [('r' if l == 0 else 'g') for l in y]
        ax.scatter(X[:, 0],
                   X[:, 1],
                   c=colors,
                   marker='o',
                   s=80,
                   edgecolors='none')
    else:
        pinds = np.where(y == 0)
        ninds = np.where(y == 1)
        plt.plot(X[pinds[0], 0], X[pinds[0], 1], 'ob')
        plt.plot(X[ninds[0], 0], X[ninds[0], 1], 'or')
Пример #5
0
train_xor = True
train_mnist = True

if train_xor:
    D,N = 2,200000

    #this is the XOR problem.
    X = np.random.rand(N,D) #we want [NxD] data
    X = (X > 0.5)*1.0
    Y = X[:,0] == X[:,1]
    Y = (np.vstack((Y, np.invert(Y)))*1.0).T # and [NxC] labels

    X += np.random.randn(N,D)*0.1 # add some noise to the data.

    #build a network
    nn = modules.Sequential([modules.Linear(2,3), modules.Tanh(),modules.Linear(3,15), modules.Tanh(), modules.Linear(15,15), modules.Tanh(), modules.Linear(15,3), modules.Tanh() ,modules.Linear(3,2), modules.SoftMax()])
    #train the network.
    nn.train(X,Y, batchsize = 5, iters=1000)
    acc = np.mean(np.argmax(nn.forward(X), axis=1) == np.argmax(Y, axis=1))
    if not np == numpy: # np=cupy
        acc = np.asnumpy(acc)
    print('model train accuracy is: {:0.4f}'.format(acc))

    #save the network
    model_io.write(nn, '../xor_net_small_1000.txt')

if train_mnist:

    Xtrain = data_io.read('../data/MNIST/train_images.npy')
    Ytrain = data_io.read('../data/MNIST/train_labels.npy')
    Xtest = data_io.read('../data/MNIST/test_images.npy')