forked from dmitriy-serdyuk/blocks_tutorial
-
Notifications
You must be signed in to change notification settings - Fork 0
/
mnist_ex2_solution.py
executable file
·111 lines (97 loc) · 4.1 KB
/
mnist_ex2_solution.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
#!/usr/bin/env python
import logging
from argparse import ArgumentParser
from theano import tensor
from blocks.algorithms import GradientDescent, Scale
from blocks.bricks import MLP, Tanh, Softmax
from blocks.bricks.cost import CategoricalCrossEntropy, MisclassificationRate
from blocks.initialization import IsotropicGaussian, Constant
from fuel.streams import DataStream
from fuel.transformers import Flatten
from fuel.datasets import MNIST
from fuel.schemes import SequentialScheme
from blocks.filter import VariableFilter
from blocks.graph import ComputationGraph, apply_dropout
from blocks.model import Model
from blocks.monitoring import aggregation
from blocks.roles import INPUT, WEIGHT
from blocks.extensions import FinishAfter, Timing, Printing
from blocks.extensions.saveload import Checkpoint
from blocks.extensions.monitoring import (DataStreamMonitoring,
TrainingDataMonitoring)
from blocks.main_loop import MainLoop
try:
from blocks.extras.extensions.plot import Plot
BLOCKS_EXTRAS_AVAILABLE = True
except:
BLOCKS_EXTRAS_AVAILABLE = False
def main(save_to, num_epochs):
mlp = MLP([Tanh(), Tanh(), Softmax()], [784, 100, 100, 10],
weights_init=IsotropicGaussian(0.01),
biases_init=Constant(0))
mlp.initialize()
x = tensor.matrix('features')
y = tensor.lmatrix('targets')
probs = mlp.apply(tensor.flatten(x, outdim=2))
cost = CategoricalCrossEntropy().apply(y.flatten(), probs)
error_rate = MisclassificationRate().apply(y.flatten(), probs)
cg = ComputationGraph([cost, error_rate])
cost.name = 'final_cost'
test_cost = cost
for_dropout = VariableFilter(roles=[INPUT],
bricks=mlp.linear_transformations[1:])(cg.variables)
dropout_graph = apply_dropout(cg, for_dropout, 0.5)
dropout_graph = apply_dropout(dropout_graph, [x], 0.1)
dropout_cost, dropout_error_rate = dropout_graph.outputs
mnist_train = MNIST(("train",))
mnist_test = MNIST(("test",))
algorithm = GradientDescent(
cost=dropout_cost, parameters=cg.parameters,
step_rule=Scale(learning_rate=0.1))
extensions = [Timing(),
FinishAfter(after_n_epochs=num_epochs),
DataStreamMonitoring(
[cost, error_rate],
Flatten(
DataStream.default_stream(
mnist_test,
iteration_scheme=SequentialScheme(
mnist_test.num_examples, 500)),
which_sources=('features',)),
prefix="test"),
TrainingDataMonitoring(
[dropout_cost, dropout_error_rate,
aggregation.mean(algorithm.total_gradient_norm)],
prefix="train",
after_epoch=True),
Checkpoint(save_to),
Printing()]
if BLOCKS_EXTRAS_AVAILABLE:
extensions.append(Plot(
'MNIST example',
channels=[
['test_final_cost',
'test_misclassificationrate_apply_error_rate'],
['train_total_gradient_norm']]))
main_loop = MainLoop(
algorithm,
Flatten(
DataStream.default_stream(
mnist_train,
iteration_scheme=SequentialScheme(
mnist_train.num_examples, 50)),
which_sources=('features',)),
model=Model(dropout_cost),
extensions=extensions)
main_loop.run()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
parser = ArgumentParser("An example of training an MLP on"
" the MNIST dataset.")
parser.add_argument("--num-epochs", type=int, default=2,
help="Number of training epochs to do.")
parser.add_argument("save_to", default="mnist.pkl", nargs="?",
help=("Destination to save the state of the training "
"process."))
args = parser.parse_args()
main(args.save_to, args.num_epochs)