forked from znck/mitosis-detection
/
mitosis.py
101 lines (86 loc) · 3.3 KB
/
mitosis.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#!/usr/bin/env python
from keras.layers import BatchNormalization
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.layers.core import Dense, Flatten, Activation
from keras.models import Sequential
from keras.optimizers import SGD, RMSprop, Adamax
from keras.layers.advanced_activations import LeakyReLU, PReLU
from keras.initializations import get_fans
import keras.backend as K
import numpy as np
from layers import MyConvolution2D
def custom_initialization(shape, name=None):
fan_in, fan_out = get_fans(shape)
loc = (fan_in + fan_out) / 2.
scale = (fan_in + fan_out) / 2.
return K.variable(np.random.normal(loc, scale, shape) * (1. / np.sqrt(fan_in / 2)), name=name)
def model_1(lr=.001, rho=.9, epsilon=1.0e-6):
dnn = Sequential()
dnn.add(BatchNormalization(input_shape=(3, 101, 101)))
dnn.add(Convolution2D(16, 2, 2, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 3, 3, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 3, 3, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 2, 2, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 2, 2, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Flatten())
dnn.add(Dense(100))
dnn.add(Dense(2))
dnn.add(Activation('softmax'))
dnn.compile(loss='binary_crossentropy', optimizer=Adamax(lr=lr))
return dnn
def model_2(lr=.001, rho=.9, epsilon=1.0e-6):
dnn = Sequential()
dnn.add(BatchNormalization(input_shape=(3, 101, 101)))
dnn.add(Convolution2D(16, 4, 4, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 4, 4, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 4, 4, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Convolution2D(16, 3, 3, init='he_normal'))
dnn.add(MaxPooling2D())
dnn.add(LeakyReLU(alpha=.01))
dnn.add(Flatten())
dnn.add(Dense(100))
dnn.add(Dense(2))
dnn.add(Activation('softmax'))
dnn.compile(loss='binary_crossentropy', optimizer=Adamax(lr=lr))
return dnn
def model_base(lr=.001, rho=.9, epsilon=1.0e-6):
nn = Sequential()
nn.add(BatchNormalization(input_shape=(3, 101, 101)))
nn.add(MyConvolution2D(16, 6, 6, init='he_normal'))
nn.add(LeakyReLU(alpha=.01))
nn.add(MaxPooling2D())
nn.add(MyConvolution2D(16, 5, 5, init='he_normal'))
nn.add(LeakyReLU(alpha=.01))
nn.add(MaxPooling2D())
nn.add(MyConvolution2D(16, 3, 3, init='he_normal'))
nn.add(LeakyReLU(alpha=.01))
nn.add(MaxPooling2D())
nn.add(Flatten())
nn.add(Dense(200))
nn.add(LeakyReLU(alpha=.01))
nn.add(Dense(100))
nn.add(LeakyReLU(alpha=.01))
nn.add(Dense(2))
nn.add(LeakyReLU(alpha=.01))
nn.add(Activation('softmax'))
nn.compile(loss='binary_crossentropy', optimizer=Adamax(lr=lr), class_mode='binary')
return nn
if __name__ == '__main__':
from runner import main
main()