Ejemplo n.º 1
0
def should_success_calculate_for_multiple_neurons():
    network = MultipleLayersModel([
        Layer(input_dimension=1,
              output_dimension=3,
              activation_function=LinearFunction(),
              weights_initializer=ConstShapeInitializer(
                  np.asarray([[1., 2., 3.]])),
              biases_initializer=ConstShapeInitializer(np.asarray([1., 2.,
                                                                   3.]))),
        Layer(input_dimension=3,
              output_dimension=1,
              activation_function=LinearFunction(2.),
              weights_initializer=ConstShapeInitializer(
                  np.asarray([[1.], [2.], [3.]])),
              biases_initializer=ConstShapeInitializer(np.asarray([1.])))
    ])
    X = np.asarray([[0.], [1.]])
    Y = np.asarray([[0.], [2.]])
    gradient = ApproximateGradient()
    square_error = SquareError()
    network_gradient = gradient(network, X, Y, square_error)
    expected = np.asarray([[
        np.asarray([[224.00000444, 448.0000166, 672.00003605]]),
        np.asarray([344.00000857, 688.0000326, 1032.00007197])
    ],
                           [
                               np.asarray([[568.00002073], [1136.00008012],
                                           [1704.00017987]]),
                               np.asarray([344.00000834])
                           ]])
    equals(expected, network_gradient)
Ejemplo n.º 2
0
def main():
    xor = MLP()
    xor.add_layer(Layer(2))
    xor.add_layer(Layer(2))
    xor.add_layer(Layer(1))

    xor.init_network()

    xor.patterns = [
        ([0, 0], [0]),
        ([0, 1], [1]),
        ([1, 0], [1]),
        ([1, 1], [0]),
    ]

    print xor.train(xor.patterns)
    for inp, target in xor.patterns:
        tolerance = 0.1
        computed = xor.forward(inp)
        error = abs(computed[0] - target[0])
        print 'input: %s target: %s, output: %s, error: %.4f' % (inp,
            target, computed, error)
Ejemplo n.º 3
0
def should_success_calculate_for_multiple_examples():
    network = MultipleLayersModel([
        Layer(input_dimension=1,
              output_dimension=1,
              activation_function=LinearFunction(),
              weights_initializer=ConstShapeInitializer(np.asarray([[1.]])),
              biases_initializer=ConstShapeInitializer(np.asarray([2.]))),
        Layer(input_dimension=1,
              output_dimension=1,
              activation_function=LinearFunction(2.),
              weights_initializer=ConstShapeInitializer(np.asarray([[3.]])),
              biases_initializer=ConstShapeInitializer(np.asarray([0.])))
    ])
    X = np.asarray([[0.], [1.]])
    Y = np.asarray([[0.], [2.]])
    gradient = ApproximateGradient()
    square_error = SquareError()
    network_gradient = gradient(network, X, Y, square_error)
    expected = np.asarray(
        [[np.asarray([[192.0000359518781]]),
          np.asarray([336.0000719681011])],
         [np.asarray([[288.0000519667192]]),
          np.asarray([112.00000793110121])]])
    equals(expected, network_gradient)
Ejemplo n.º 4
0
def should_be_success_calculate_output():
    layer = Layer(
        input_dimension=2,
        output_dimension=3,
        activation_function=LinearFunction(),
        weights_initializer=ConstShapeInitializer(
            np.asarray([
                [1., 2., 3.],
                [1., 2., 3.]
            ])
        ),
        biases_initializer=ConstShapeInitializer(
            np.asarray(
                [1., 2., 3.]
            )
        )
    )
    expected = np.asarray(
        [4., 8, 12.]
    )
    equals(expected, layer([1, 2]))
Ejemplo n.º 5
0
def create_tab(function_text, model, learning_rate=1e-3):
    return {
        'function': function_text,
        'model': model,
        'gradient': Gradient(),
        'error': SquareError(),
        'teacher': GradientTeacher(),
        'learning_rate': learning_rate
    }


tabs = [
    create_tab(function_text="2 * x",
               model=MultipleLayersModel([
                   Layer(input_dimension=1,
                         output_dimension=1,
                         activation_function=LinearFunction()),
                   Layer(input_dimension=1,
                         output_dimension=1,
                         activation_function=LinearFunction())
               ])),
    create_tab(function_text="50 * x",
               learning_rate=1e-4,
               model=MultipleLayersModel([
                   Layer(input_dimension=1,
                         output_dimension=1,
                         activation_function=LinearFunction()),
                   Layer(input_dimension=1,
                         output_dimension=1,
                         activation_function=LinearFunction())
               ])),
Ejemplo n.º 6
0
    def init_model_params(self, dim_x):
        print 'M1 model params initialize'
        dim_z = self.hyper_params['dim_z']
        n_hidden = self.hyper_params['n_hidden']  # [500, 500, 500]
        self.type_px = self.hyper_params['type_px']

        def relu(x):
            return x * (x > 0) + 0.01 * x

        def softplus(x):
            return T.log(T.exp(x) + 1)

        activation = {
            'tanh': T.tanh,
            'relu': relu,
            'softplus': softplus,
            'sigmoid': T.nnet.sigmoid,
            'none': None
        }
        nonlinear_q = activation[self.hyper_params['nonlinear_q']]
        nonlinear_p = activation[self.hyper_params['nonlinear_p']]
        if self.type_px == 'bernoulli':
            output_f = activation['sigmoid']
        elif self.type_px == 'gaussian':
            output_f = activation['none']

        # Recognize model
        self.recognize_layers = [
            Layer((dim_x, n_hidden[0]), function=nonlinear_q)
        ]
        if len(n_hidden) > 1:
            self.recognize_layers += [
                Layer(shape, function=nonlinear_q)
                for shape in zip(n_hidden[:-1], n_hidden[1:])
            ]
        self.recognize_mean_layer = Layer((n_hidden[-1], dim_z), function=None)
        self.recognize_log_sigma_layer = Layer((n_hidden[-1], dim_z),
                                               function=None,
                                               w_zero=True,
                                               b_zero=True)

        # Generate Model
        self.generate_layers = [
            Layer((dim_z, n_hidden[0]), function=nonlinear_p)
        ]
        if len(n_hidden) > 1:
            self.generate_layers += [
                Layer(shape, function=nonlinear_p)
                for shape in zip(n_hidden[:-1], n_hidden[1:])
            ]
        self.generate_mean_layer = Layer((n_hidden[-1], dim_x),
                                         function=output_f)
        self.generate_log_sigma_layer = Layer((n_hidden[-1], dim_x),
                                              function=None,
                                              b_zero=True)

        self.model_params_ = ([
            param for layer in self.generate_layers for param in layer.params
        ] + self.recognize_mean_layer.params +
                              self.recognize_log_sigma_layer.params + [
                                  param for layer in self.recognize_layers
                                  for param in layer.params
                              ] + self.generate_mean_layer.params)

        if self.type_px == 'gaussian':
            self.model_params_ += self.generate_log_sigma_layer.params
Ejemplo n.º 7
0
from mlp import MultipleLayersModel, Layer
from initializers import UniformInitializer, ConstShapeInitializer

import numpy as np

__all__ = ['gradient_teacher_test']


def function(x):
    return 2 * x


network = MultipleLayersModel([
    Layer(input_dimension=1,
          output_dimension=1,
          activation_function=LinearFunction(),
          weights_initializer=ConstShapeInitializer(np.asarray([[1.]])),
          biases_initializer=ConstShapeInitializer(np.asarray([2.]))),
    Layer(input_dimension=1,
          output_dimension=1,
          activation_function=LinearFunction(2.),
          weights_initializer=ConstShapeInitializer(np.asarray([[3.]])),
          biases_initializer=ConstShapeInitializer(np.asarray([0.])))
])


def gradient_teacher_test():
    uniform = UniformInitializer(seed=2019)
    inputs = uniform((5, 1))
    outputInitializer = ConstShapeInitializer(
        [function(value) for value in inputs])
Ejemplo n.º 8
0
nn_path = options.model
if path.isdir(nn_path) is True:
    raise Exception(nn_path + ': Is a directory.')
if path.exists(nn_path) is False:
    raise Exception(nn_path + ': No such file or directory.')
"""
The format of the save is as following
[layer:[activation,n_input,neurons,weights,biases]]
"""
nn_load = np.load(nn_path, allow_pickle=True)

nn = NeuralNetwork()

# Use all provided dataset
cfg['batch_size'] = 1

# Load data set
_, _, X_test, y_test = preprocessing(cfg, csv2data(dataset_path))

for x in nn_load:
    activation = x[0]
    weights = x[3]
    bias = x[4]
    nn.add_layer(Layer(activation=activation, weights=weights, bias=bias))

y_predict = nn.feed_forward(X_test)

print('MSE: %f' % (nn.mean_squarred_error(y_predict, y_test)))
print('CEE: %f' % (nn.cross_entropy_error(y_predict, y_test)))
print('ACCURACY: %f' % (nn.accuracy(y_predict, y_test)))
Ejemplo n.º 9
0
# Extract configuration
if path.isdir(options.configure) is True:
    raise Exception(options.configure + ': Is a directory.')
if path.exists(options.configure) is False:
    raise Exception(options.configure + ': No such file or directory.')
with open(options.configure, 'r') as yfile:
    cfg = yaml.load(yfile, Loader=yaml.BaseLoader)

# Load data set
X_train, y_train, X_test, y_test = preprocessing(cfg, csv2data(dataset_path))

# Build the network
nn = NeuralNetwork(error=options.error)
w_seed = int(cfg['weights_seed'])
b_seed = int(cfg['bias_seed'])
nn.add_layer(Layer(n_input=X_train.shape[1]), weights_seed=w_seed, bias_seed=b_seed)
nn.add_layer(Layer(n_input=14, activation='tanh'), weights_seed=w_seed, bias_seed=b_seed)
nn.add_layer(Layer(n_input=14, activation='tanh'), weights_seed=w_seed, bias_seed=b_seed)
nn.add_layer(Layer(n_input=14, activation='tanh'), weights_seed=w_seed, bias_seed=b_seed)
nn.add_layer(Layer(n_input=14, activation='tanh'), weights_seed=w_seed, bias_seed=b_seed)
nn.add_layer(Layer(n_input=14, activation='tanh'), weights_seed=w_seed, bias_seed=b_seed)
nn.add_layer(Layer(n_input=y_train.shape[1], activation='softmax'), weights_seed=w_seed, bias_seed=b_seed)

# Train
mses, cees = nn.train(X_train, y_train, X_test, y_test, learning_rate=float(cfg['learning_rate']), max_epochs=int(cfg['epoch']), mini_batch_size=float(cfg['mini_batch_size']))

if (options.plot is True):
    nn.plot(mses, cees, learning_rate=float(cfg['learning_rate']), mini_batch_size=int(cfg['mini_batch_size']))

nn.save()
Ejemplo n.º 10
0
def main():
    imres = MLP()
    num_points = 784
    imres.add_layer(Layer(num_points))
    imres.add_layer(Layer(20))
    imres.add_layer(Layer(10))

    imres.add_bias()
    imres.init_network()

    imres.step = 0.001
    imres.moment = imres.step / 10
    imres.verbose = True
    target_error = 0.01

    imres.patterns = []
    imres._patterns = []
    imres.test_patterns = []
    imres._test_patterns = []

    def norm(inp):
        def fn(x):
            return x / 255

        return map(fn, inp)

    mn = MNIST('./mnist/data/')
    samples, labels = mn.load_testing()
    for i in range(100):
        outvect = [0] * 10
        outvect[labels[i]] = 1
        imres.patterns.append((samples[i], outvect))
        imres._patterns.append((samples[i], labels[i], outvect))

    for i in range(100, 200):
        outvect = [0] * 10
        outvect[labels[i]] = 1
        imres.test_patterns.append((samples[i], outvect))
        imres._test_patterns.append((samples[i], labels[i], outvect))

    print 'Training samples: %d' % len(imres.patterns)
    print 'Testing samples: %d' % len(imres.test_patterns)
    print 'Target error: %.4f' % target_error

    final_err, steps = imres.train_target(imres.patterns,
                                          target_error=target_error)

    print 'Training done in %d steps with final error of %.6f' % (steps,
                                                                  final_err)

    print '----- Detailed test output -----'
    total_tests = len(imres._test_patterns)
    total_fails = 0
    for inp, num, target in imres._test_patterns:
        computed = imres.run(inp)
        error = abs(computed[0] - target[0])
        computed = map(lambda x: round(x, 4), computed)
        maxn = computed[0]
        pos = 0
        for i in range(len(computed)):
            if computed[i] > maxn:
                maxn = computed[i]
                pos = i

        if num != pos:
            total_fails += 1
        print 'in: %d, out: %d' % (num, pos)
        print 'target: %s \noutput: %s' % (target, computed)

    print '-----'
    print 'Testing done - %d of %d samples classified incorrectly' % (
        total_fails, total_tests)
Ejemplo n.º 11
0
    def init_model_params(self, dim_x, dim_y):
        print 'M2 model params initialize'

        dim_z = self.hyper_params['dim_z']
        n_hidden = self.hyper_params['n_hidden']  # [500, 500, 500]
        n_hidden_recognize = n_hidden
        n_hidden_generate = n_hidden[::-1]

        self.type_px = self.hyper_params['type_px']

        activation = {
            'tanh': T.tanh,
            'relu': self.relu,
            'softplus': self.softplus,
            'sigmoid': T.nnet.sigmoid,
            'none': self.identify,
        }

        self.nonlinear_q = activation[self.hyper_params['nonlinear_q']]
        self.nonlinear_p = activation[self.hyper_params['nonlinear_p']]
        if self.type_px == 'bernoulli':
            output_f = activation['sigmoid']
        elif self.type_px == 'gaussian':
            output_f = activation['none']

        # Recognize model
        self.recognize_layers = [
            Layer(param_shape=(dim_x, n_hidden_recognize[0]),
                  function=self.identify,
                  nonbias=True),
            Layer(param_shape=(dim_y, n_hidden_recognize[0]),
                  function=self.identify)
        ]
        if len(n_hidden_recognize) > 1:
            self.recognize_layers += [
                Layer(param_shape=shape,
                      function=self.nonlinear_q) for shape in zip(
                          n_hidden_recognize[:-1], n_hidden_recognize[1:])
            ]
        self.recognize_mean_layer = Layer(param_shape=(n_hidden_recognize[-1],
                                                       dim_z),
                                          function=self.identify)
        self.recognize_log_var_layer = Layer(
            param_shape=(n_hidden_recognize[-1], dim_z),
            function=self.identify,
            w_zero=True,
            b_zero=True)

        # Generate Model
        self.generate_layers = [
            Layer((dim_z, n_hidden_generate[0]),
                  function=self.identify,
                  nonbias=True),
            Layer((dim_y, n_hidden_generate[0]), function=self.identify),
        ]
        if len(n_hidden) > 1:
            self.generate_layers += [
                Layer(param_shape=shape, function=self.nonlinear_p)
                for shape in zip(n_hidden_generate[:-1], n_hidden_generate[1:])
            ]
        self.generate_mean_layer = Layer(param_shape=(n_hidden_generate[-1],
                                                      dim_x),
                                         function=output_f)
        self.generate_log_var_layer = Layer(param_shape=(n_hidden_generate[-1],
                                                         dim_x),
                                            function=self.identify,
                                            b_zero=True)

        # Add all parameters
        self.model_params_ = ([
            param for layer in self.recognize_layers for param in layer.params
        ] + self.recognize_mean_layer.params +
                              self.recognize_log_var_layer.params + [
                                  param for layer in self.generate_layers
                                  for param in layer.params
                              ] + self.generate_mean_layer.params)

        if self.type_px == 'gaussian':
            self.model_params_ += self.generate_log_var_layer.params