Esempio n. 1
0
 def test_result(self):
     iterable = range(14)
     batches = batched(iterable, 3)
     batches = list(batches)
     assert len(batches) == 5
     assert len(batches[0]) == 3
     assert len(batches[-1]) == 2
Esempio n. 2
0
 def test_result(self):
     # pylint: disable=redefined-variable-type
     iterable = range(14)
     batches = batched(iterable, 3)
     batches = list(batches)
     assert len(batches) == 5
     assert len(batches[0]) == 3
     assert len(batches[-1]) == 2
Esempio n. 3
0
 def test_result(self):
     # pylint: disable=redefined-variable-type
     iterable = range(14)
     batches = batched(iterable, 3)
     batches = list(batches)
     assert len(batches) == 5
     assert len(batches[0]) == 3
     assert len(batches[-1]) == 2
Esempio n. 4
0
 def __call__(self):
     """Train the model and visualize progress."""
     print('Start training')
     repeats = repeated(self.problem.dataset.training, self.problem.epochs)
     batches = batched(repeats, self.problem.batch_size)
     if self.visual:
         self.window.start(functools.partial(self._train_visual, batches))
     else:
         self._train(batches)
Esempio n. 5
0
 def __call__(self):
     """Train the model and visualize progress."""
     print('Start training')
     repeats = repeated(self.problem.dataset.training, self.problem.epochs)
     batches = batched(repeats, self.problem.batch_size)
     if self.visual:
         self.window.start(functools.partial(self._train_visual, batches))
     else:
         self._train(batches)
Esempio n. 6
0
 def __call__(self, weights, examples):
     batch_size = int(math.ceil(len(examples) / self.workers))
     batches = list(batched(examples, batch_size))
     sizes = [len(x) / batch_size for x in batches]
     sizes = [x / sum(sizes) for x in sizes]
     assert len(batches) <= self.workers
     assert sum(sizes) == 1
     compute = functools.partial(self.backprop, weights)
     gradients = self.pool.map(compute, batches)
     return sum(x * y for x, y in zip(gradients, sizes))
Esempio n. 7
0
 def __call__(self, weights, examples):
     batch_size = int(math.ceil(len(examples) / self.workers))
     batches = list(batched(examples, batch_size))
     sizes = [len(x) / batch_size for x in batches]
     sizes = [x / sum(sizes) for x in sizes]
     assert len(batches) <= self.workers
     assert sum(sizes) == 1
     compute = functools.partial(self.backprop, weights)
     gradients = self.pool.map(compute, batches)
     return sum(x * y for x, y in zip(gradients, sizes))
Esempio n. 8
0
 def test_generator(self):
     iterable = MockGenerator([1, 2, 3])
     batches = batched(iterable, 3)
     assert iterable.evaluated == 0
     list(batches)
     assert iterable.evaluated == 3
Esempio n. 9
0
 def test_generator(self):
     iterable = MockGenerator([1, 2, 3])
     batches = batched(iterable, 3)
     assert iterable.evaluated == 0
     list(batches)
     assert iterable.evaluated == 3
Esempio n. 10
0
    # Define model and initialize weights
    network = Network([
        Layer(len(problem.dataset.training[0].data), Linear),
        Layer(700, Relu),
        Layer(500, Relu),
        Layer(300, Relu),
        Layer(len(problem.dataset.training[0].target), Sigmoid)
    ])
    weights = Matrices(network.shapes)
    weights.flat = np.random.normal(0, problem.weight_scale, len(weights.flat))

    # Classes needed during training
    backprop = ParallelBackprop(network, problem.cost)
    momentum = Momentum()
    decent = GradientDecent()
    decay = WeightDecay()
    plot = Plot()

    # Train the model
    repeats = repeated(problem.dataset.training, problem.training_rounds)
    batches = batched(repeats, problem.batch_size)
    for index, batch in enumerate(batches):
        gradient = backprop(weights, batch)
        gradient = momentum(gradient, problem.momentum)
        weights = decent(weights, gradient, problem.learning_rate)
        weights = decay(weights, problem.weight_decay)
        # Show progress
        plot(compute_costs(network, weights, problem.cost, batch))
        every(problem.evaluate_every // problem.batch_size, index, evaluate,
            index, network, weights, problem.dataset.testing)
Esempio n. 11
0
 def __call__(self, weights, examples):
     batch_size = (len(examples) + self.workers - 1) // self.workers
     batches = list(batched(examples, batch_size))
     compute = functools.partial(self.backprop, weights)
     gradients = self.pool.map(compute, batches)
     return sum(gradients) / batch_size