示例#1
0
 def __init__(self):
     self.C1 = Layer.ConvolutionalLayer([5, 5, 1, 6], pad="VALID", activation_function="SQUASHING")
     self.S2 = Layer.PoolingLayer([2, 2, 6], mode="AVERAGE", activation_function="SQUASHING")
     self.C3 = Layer.ConvolutionalCombinationLayer([5, 5, 16], get_combination_map(), pad="VALID", activation_function="SQUASHING")
     self.S4 = Layer.PoolingLayer([2, 2, 16], mode="AVERAGE", activation_function="SQUASHING")
     self.C5 = Layer.ConvolutionalLayer([5, 5, 16, 120], pad="VALID", activation_function="SQUASHING")
     self.F6 = Layer.FullyConnectedLayer([120, 84], activation_function="SQUASHING")
     self.RBF = Layer.RBFLayer(RBF_BITMAP.rbf_bitmap())
 def __init__(self):
     self.C1 = Layer.ConvolutionalLayer([11, 11, 3, 96],
                                        pad="VALID",
                                        stride=4,
                                        activation_function="RELU",
                                        initializer="ALEXNET_bias0")
     self.N1 = Layer.LocalResponseNormalization(depth_radius=2,
                                                bias=2,
                                                alpha=1e-4,
                                                beta=0.75)
     self.S1 = Layer.PoolingLayer([3, 3, 96],
                                  stride=2,
                                  mode="MAX",
                                  activation_function="LINEAR")
     self.C2_1 = Layer.ConvolutionalLayer([5, 5, 48, 128],
                                          pad="SAME",
                                          activation_function="RELU")
     self.C2_2 = Layer.ConvolutionalLayer([5, 5, 48, 128],
                                          pad="SAME",
                                          activation_function="RELU")
     self.N2 = Layer.LocalResponseNormalization(depth_radius=2,
                                                bias=2,
                                                alpha=1e-4,
                                                beta=0.75)
     self.S2 = Layer.PoolingLayer([3, 3, 256],
                                  stride=2,
                                  mode="MAX",
                                  activation_function="LINEAR")
     self.C3 = Layer.ConvolutionalLayer([3, 3, 256, 384],
                                        pad="SAME",
                                        activation_function="RELU",
                                        initializer="ALEXNET_bias0")
     self.C4_1 = Layer.ConvolutionalLayer([3, 3, 192, 192],
                                          pad="SAME",
                                          activation_function="RELU")
     self.C4_2 = Layer.ConvolutionalLayer([3, 3, 192, 192],
                                          pad="SAME",
                                          activation_function="RELU")
     self.C5_1 = Layer.ConvolutionalLayer([3, 3, 192, 128],
                                          pad="SAME",
                                          activation_function="RELU")
     self.C5_2 = Layer.ConvolutionalLayer([3, 3, 192, 128],
                                          pad="SAME",
                                          activation_function="RELU")
     self.S5 = Layer.PoolingLayer([3, 3, 256],
                                  stride=2,
                                  mode="MAX",
                                  activation_function="LINEAR")
     self.F6 = Layer.FullyConnectedLayer([9216, 4096],
                                         activation_function="RELU")
     self.D6 = Layer.DropOut(keep_prob=0.5)
     self.F7 = Layer.FullyConnectedLayer([4096, 4096],
                                         activation_function="RELU")
     self.D7 = Layer.DropOut(keep_prob=0.5)
     self.F8 = Layer.FullyConnectedLayer([4096, 1000])
     self.Output = Layer.Softmax()