/
Test.py
94 lines (44 loc) · 1.62 KB
/
Test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
#!/usr/bin/env python
# coding: utf-8
# In[3]:
import mnist_loader
training_data, validation_data, test_data = mnist_loader.load_data_wrapper()
training_data = list(training_data)
# In[2]:
import Recognition
# In[6]:
net = Recognition.Network([784, 100, 10])
net.SGD(training_data, 30, 10, 0.001, test_data=test_data)
# In[3]:
net = Recognition.Network([784, 30, 10])
# In[4]:
net.SGD(training_data, 30, 10, 3.0, test_data=test_data)
# In[4]:
import network2
# In[5]:
net = network2.Network([784, 30, 10], cost=network2.CrossEntropyCost)
#net.large_weight_initializer()
net.SGD(training_data, 30, 10, 0.1, lmbda = 5.0,evaluation_data=validation_data,
monitor_evaluation_accuracy=True)
# In[6]:
# chapter 3 - Overfitting example - too many epochs of learning applied on small (1k samples) amount od data.
# Overfitting is treating noise as a signal.
# In[7]:
net = network2.Network([784, 30, 10], cost=network2.CrossEntropyCost)
net.large_weight_initializer()
net.SGD(training_data[:1000], 400, 10, 0.5, evaluation_data=test_data,
monitor_evaluation_accuracy=True,
monitor_training_cost=True)
# In[8]:
# chapter 3 - Regularization (weight decay) example 1 (only 1000 of training data and 30 hidden neurons)
# In[9]:
net = network2.Network([784, 30, 10], cost=network2.CrossEntropyCost)
net.large_weight_initializer()
net.SGD(training_data[:1000], 400, 10, 0.5,
evaluation_data=test_data,
lmbda = 0.1, # this is a regularization parameter
monitor_evaluation_cost=True,
monitor_evaluation_accuracy=True,
monitor_training_cost=True,
monitor_training_accuracy=True)
# In[ ]: