Exemplo n.º 1
0
            epoch_loss += loss.loss(predicted, batch.targets)
            grad = loss.grad(predicted, batch.targets)
            net.backward(grad)
            optimizer.step(net)
        
        print(epoch, epoch_loss)


net = NeuralNet([
                    Convolution_2D(name="conv_1", filter_shape=(10,1,3,3),padding="same",stride=1),
                    Avg_Pool_2D(name="avg_pool_1", size=2, stride=2),
                    SpatialBatchNormalization(name="sbn_1",input_channel=10),
                    ReLU(name="relu_1"),
                    Convolution_2D(name="conv_2", filter_shape=(20,10,3,3),padding="same",stride=1),
                    Avg_Pool_2D(name="avg_pool_2", size=2, stride=2),
                    SpatialBatchNormalization(name="sbn_2",input_channel=20),
                    ReLU(name="relu_2"),
                    Flatten(name="flat_1"),
                    Dense(input_size=15*40*20, output_size=100, name="dense_1"),
                    BatchNormalization(name="bn_1",input_size=100),
                    ReLU(name="relu_3"),
                    Dense(input_size=100, output_size=40, name="dense_2"),
                    BatchNormalization(name="bn_2",input_size=40),
                    Sigmoid(name="sigmoid_1")


                ])

train(net, num_epochs = 500)

Exemplo n.º 2
0
import numpy as np

from deeplearning.nn import NeuralNet
from deeplearning.activation import Tanh, Sigmoid
from deeplearning.layers import Dense
from deeplearning.train import train

inputs = np.array([[0, 0], [1, 0], [0, 1], [1, 1], [2, 2]])

targets = np.array([[0], [1], [1], [2], [6]])

net = NeuralNet([
    Dense(name="dense_1", input_size=2, output_size=50),
    Sigmoid(name="sigmoid_1"),
    Dense(name="dense_2", input_size=50, output_size=1)
])

train(net, inputs, targets, num_epochs=10000)

for x, y in zip(inputs, targets):
    predicted = net.forward(x, training=False)
    print(x, predicted, y)
Exemplo n.º 3
0
import numpy as np

from deeplearning.train import train
from deeplearning.nn import NeuralNet
from deeplearning.activation import Tanh, Softmax, Sigmoid, ReLU
from deeplearning.layers import Dense, Dropout
from deeplearning.loss import CrossEntropy
from deeplearning.optim import Momentum_SGD

inputs = np.array([[0, 0], [0, 1], [1, 1], [1, 0]])

targets = np.array([[1, 0], [0, 1], [0, 1], [1, 0]])

net = NeuralNet([
    Dense(input_size=2, output_size=2, name="dense_1"),
    Softmax(name="softmax_1"),
])

train(net,
      inputs,
      targets,
      num_epochs=1000,
      loss=CrossEntropy(),
      optimizer=Momentum_SGD())

# train(net, inputs, targets, num_epochs=3000)

for x, y in zip(inputs, targets):
    predicted = net.forward(x, training=False)
    print(x, predicted, y)
Exemplo n.º 4
0
    mnist["test_images"] = mnist["test_images"].reshape((10000,1,28,28))
    mnist["training_labels"] = one_hot(mnist["training_labels"])
    mnist["test_labels"] = one_hot(mnist["test_labels"])
    return mnist



dataset = load_data()

net = Sequential(
                 name = "residual_net",
                 layers = [
                    res_block(name="res_block_1",n_channels=1,n_out_channels=5,stride=2),
                    res_block(name="res_block_2",n_channels=5,n_out_channels=5,stride=1),
                    Flatten(name="flat_1"),
                    Dense(input_size=14*14*5, output_size=10, name="dense_1"),
                    BatchNormalization(name="bn_1",input_size=10),
                    Softmax(name="softmax_1")
                 
                 
                ])

train(net, dataset["test_images"][1000:5000], dataset["test_labels"][1000:5000], num_epochs=20,loss=CrossEntropy(),optimizer=Adam())


y_test = np.argmax(dataset["test_labels"][0:1000],axis=1)
print(accurarcy(net.predict(dataset["test_images"][0:1000]), y_test))

for map_name,name,param,grad in net.get_params_grads():
    print(map_name,",",name)
Exemplo n.º 5
0
print(x_train.shape)
print(y_train.shape)

import numpy as np

from deeplearning.train import train
from deeplearning.nn import NeuralNet
from deeplearning.activation import Tanh, Softmax, Sigmoid, ReLU
from deeplearning.layers import Dense, Dropout, BatchNormalization
from deeplearning.loss import MSE, CrossEntropy
from deeplearning.optim import Momentum_SGD, SGD, AdaGrad, RMSProp, Adam
from deeplearning.evaluation import accurarcy
from deeplearning.reg import *

net = NeuralNet([
    Dense(input_size=12, output_size=50, name="dense_1"),
    BatchNormalization(input_size=50, name="bn_1"),
    ReLU(name="relu_1"),
    Dense(input_size=50, output_size=100, name="dense_2"),
    BatchNormalization(input_size=100, name="bn_2"),
    ReLU(name="relu_2"),
    Dense(input_size=100, output_size=2, name="dense_4"),
    BatchNormalization(input_size=2, name="bn_4"),
    Softmax(name="softmax_1")
])

#net = NeuralNet([
#    Dense(input_size=12, output_size=50,name="dense_1",regularizer=L2_Regularization(lamda=0.003)),
#    ReLU(name="relu_1"),
#    Dense(input_size=50, output_size=100,name="dense_2",regularizer=L2_Regularization(lamda=0.003)),
#    ReLU(name="relu_2"),
Exemplo n.º 6
0
#                    Dense(input_size=100, output_size=10, name="dense_2"),
#                    BatchNormalization(name="bn_2",input_size=10),
#                    Softmax(name="softmax_1")
#
#
#                ])
#

net = NeuralNet([
    Convolution_2D(name="conv_1",
                   filter_shape=(10, 1, 3, 3),
                   padding="same",
                   stride=1),
    SpatialBatchNormalization(name="sbn_1", input_channel=10),
    ReLU(name="relu_1"),
    Flatten(name="flatten_1"),
    Dense(input_size=10 * 28 * 28, output_size=10, name="dense_2"),
    BatchNormalization(name="bn_2", input_size=10),
    Softmax(name="softmax_1")
])

train(net,
      dataset["test_images"][0:1000],
      dataset["test_labels"][0:1000],
      num_epochs=20,
      loss=CrossEntropy(),
      optimizer=Adam())

y_test = np.argmax(dataset["test_labels"][0:1000], axis=1)
print(accurarcy(net.predict(dataset["test_images"][0:1000]), y_test))
Exemplo n.º 7
0
    y[i, :] = c

from deeplearning.train import train
from deeplearning.nn import Sequential
from deeplearning.activation import Tanh, Softmax, Sigmoid, ReLU
from deeplearning.layers import Dense, Dropout, Flatten
from deeplearning.rnn import RNN, LastTimeStep
from deeplearning.loss import CrossEntropy, MSE
from deeplearning.optim import SGD, Adam

net = Sequential(name="net",
                 layers=[
                     RNN(name="rnn_1", D=8, H=16),
                     Sigmoid(name="sigmoid_1"),
                     LastTimeStep(name="last_1"),
                     Dense(name="dense_1", input_size=16, output_size=8),
                     Sigmoid(name="sigmoid_5")
                 ])

train(net, X, y, num_epochs=1000, loss=MSE(), optimizer=Adam())

for map_name, name, param, grad in net.get_params_grads():
    print(map_name, ",", name)


def binary2int(x):
    res = 0
    for i in range(x.shape[0]):
        res *= 2
        res += x[i]
    return res