Esempio n. 1
0
    def init(self, n_in, n_out):
        Network.init_common(self, n_in, n_out)

        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        block0 = LayerBlockGaussianNoise()

        # Layer 1
        n_neurons_1 = 200
        block1 = LayerBlockFullyConnected(neuron_relu, self.n_in, n_neurons_1)

        # Layer 2
        n_neurons_2 = 200
        block2 = LayerBlockFullyConnected(neuron_relu, n_neurons_1,
                                          n_neurons_2)

        # Layer 3
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_neurons_2, self.n_out)

        self.ls_layers = convert_blocks_into_feed_forward_layers(
            [block0, block1, block2, block3])

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
    def init(self, patch_width, n_centroids, n_out):
        Network.init_common(self, patch_width**2 + n_centroids, n_out)

        self.patch_width = patch_width
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width ** 2
        self.ls_layers.append(LayerDivideFeatures([0, interval, self.n_in]))

        # Layer 1
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
                                      poolsize=(pool_size_height0, pool_size_width0))
        block1 = LayerBlockIdentity()
        self.ls_layers.append(LayerOfBlocks([block0, block1]))

        # Layer 2
        filter_map_height1 = (patch_width - kernel_height0 + 1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 + 1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block1 = LayerBlockIdentity()
        self.ls_layers.append(LayerOfBlocks([block0, block1]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        filter_map_height2 = (filter_map_height1 - kernel_height1 + 1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 + 1) / pool_size_width1
        n_in2 = n_kern1 * filter_map_height2 * filter_map_with2 + n_centroids
        n_out2 = 1000
        block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block2]))

        # Layer 5
        n_out3 = 1000
        block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_out2, n_out=n_out3)
        self.ls_layers.append(LayerOfBlocks([block2]))

        # Layer 6
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out3, n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block3]))

        self.update_params()
Esempio n. 3
0
    def init(self, patch_height, patch_width, n_out):
        Network.init_common(self, patch_height * patch_width, n_out)

        self.in_height = patch_height
        self.in_width = patch_width

        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_height, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0,
                                                 kernel_width0),
                                      poolsize=(pool_size_height0,
                                                pool_size_width0))

        # Layer 1
        filter_map_height1 = (patch_height - kernel_height0 +
                              1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 +
                             1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block1 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))

        # Layer 2
        filter_map_height2 = (filter_map_height1 - kernel_height1 +
                              1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 +
                            1) / pool_size_width1
        n_in2 = n_kern1 * filter_map_height2 * filter_map_with2
        n_out2 = 500
        block2 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)

        # Layer 3
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out2,
                                          n_out=self.n_out)

        self.ls_layers = convert_blocks_into_feed_forward_layers(
            [block0, block1, block2, block3])

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 4
0
    def init(self, patch_width, n_out):
        Network.init_common(self, 6 * patch_width**2, n_out)

        self.patch_width = patch_width

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width**2
        self.ls_layers.append(
            LayerDivideFeatures(range(0, 6 * interval + 1, interval)))

        # Layer 1
        n_out_1_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, patch_width**2,
                                          n_out_1_1)

        n_out_1_2 = 1000
        block1 = LayerBlockFullyConnected(neuron_relu, patch_width**2,
                                          n_out_1_1)

        self.ls_layers.append(
            LayerOfBlocks([block0, block0, block0, block1, block1, block1]))

        # Layer 2
        n_out_2_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block1 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block2 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)

        n_out_2_2 = 1000
        block3 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block4 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block5 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)

        self.ls_layers.append(
            LayerOfBlocks([block0, block1, block2, block3, block4, block5]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        n_in2 = n_out_2_1 * 3 + n_out_2_2 * 3
        n_out2 = 4000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out2,
                                          n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 5
0
    def init(self, patch_width, n_out):
        Network.init_common(self, patch_width**3, n_out)

        self.patch_width = patch_width

        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        filter_map_0_shape = np.array([patch_width, patch_width, patch_width],
                                      dtype=int)
        filter_0_shape = np.array([2, 2, 2], dtype=int)
        pool_0_shape = np.array([2, 2, 2], dtype=int)
        n_kern0 = 20
        block0 = LayerBlockConvPool3D(neuron_relu,
                                      1,
                                      tuple(filter_map_0_shape),
                                      n_kern0,
                                      tuple(filter_0_shape),
                                      poolsize=tuple(pool_0_shape))

        # Layer 1
        filter_map_1_shape = (filter_map_0_shape - filter_0_shape +
                              1) / pool_0_shape
        filter_1_shape = np.array([2, 2, 2], dtype=int)
        pool_1_shape = np.array([2, 2, 2], dtype=int)
        n_kern1 = 50
        block1 = LayerBlockConvPool3D(neuron_relu,
                                      n_kern0,
                                      tuple(filter_map_1_shape),
                                      n_kern1,
                                      tuple(filter_1_shape),
                                      poolsize=tuple(pool_1_shape))

        # Layer 2
        filter_map_2_shape = (filter_map_1_shape - filter_1_shape +
                              1) / pool_1_shape
        n_in2 = n_kern1 * np.prod(filter_map_2_shape)
        n_out2 = 1000
        block2 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)

        # Layer 3
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out2,
                                          n_out=self.n_out)

        self.ls_layers = convert_blocks_into_feed_forward_layers(
            [block0, block1, block2, block3])

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 6
0
    def init(self, patch_width, priors, n_out):
        Network.init_common(self, self.n_patches * patch_width**2, n_out)

        self.patch_width = patch_width

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width**2
        self.ls_layers.append(
            LayerDivideFeatures(range(0, self.n_in + 1, interval)))

        # Layer 1
        n_out_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, patch_width**2, n_out_1)
        self.ls_layers.append(LayerOfBlocks([block0] * 3))

        # Layer 2
        n_out_2 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, n_out_1, n_out_2)
        block1 = LayerBlockFullyConnected(neuron_relu, n_out_1, n_out_2)
        block2 = LayerBlockFullyConnected(neuron_relu, n_out_1, n_out_2)
        self.ls_layers.append(LayerOfBlocks([block0, block1, block2]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        n_in2 = n_out_2 * self.n_patches
        n_out2 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out2,
                                          n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 6
        self.ls_layers.append(LayerOfBlocks([LayerBlockMultiplication(priors)
                                             ]))

        # Layer 7
        self.ls_layers.append(LayerOfBlocks([LayerBlockNormalization()]))

        self.update_params()
    def init(self, patch_width, n_out):
        Network.init_common(self, 6 * patch_width**2, n_out)

        self.patch_width = patch_width

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width ** 2
        self.ls_layers.append(LayerDivideFeatures(range(0, 6*interval+1, interval)))

        # Layer 1
        n_out_1_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, patch_width**2, n_out_1_1)

        n_out_1_2 = 1000
        block1 = LayerBlockFullyConnected(neuron_relu, patch_width**2, n_out_1_1)

        self.ls_layers.append(LayerOfBlocks([block0, block0, block0, block1, block1, block1]))

        # Layer 2
        n_out_2_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block1 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block2 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)

        n_out_2_2 = 1000
        block3 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block4 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block5 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)

        self.ls_layers.append(LayerOfBlocks([block0, block1, block2, block3, block4, block5]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        n_in2 = n_out_2_1 * 3 + n_out_2_2 * 3
        n_out2 = 4000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 8
0
    def init(self, patch_height, patch_width, n_out):
        Network.init_common(self, patch_height*patch_width, n_out)

        self.in_height = patch_height
        self.in_width = patch_width

        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_height, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
                                      poolsize=(pool_size_height0, pool_size_width0))

        # Layer 1
        filter_map_height1 = (patch_height - kernel_height0 + 1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 + 1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block1 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))

        # Layer 2
        filter_map_height2 = (filter_map_height1 - kernel_height1 + 1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 + 1) / pool_size_width1
        n_in2 = n_kern1 * filter_map_height2 * filter_map_with2
        n_out2 = 500
        block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)

        # Layer 3
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)

        self.ls_layers = convert_blocks_into_feed_forward_layers([block0, block1, block2, block3])

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
    def init(self, patch_width, priors, n_out):
        Network.init_common(self, self.n_patches * patch_width**2, n_out)

        self.patch_width = patch_width

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width ** 2
        self.ls_layers.append(LayerDivideFeatures(range(0, self.n_in + 1, interval)))

        # Layer 1
        n_out_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, patch_width**2, n_out_1)
        self.ls_layers.append(LayerOfBlocks([block0]*3))

        # Layer 2
        n_out_2 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, n_out_1, n_out_2)
        block1 = LayerBlockFullyConnected(neuron_relu, n_out_1, n_out_2)
        block2 = LayerBlockFullyConnected(neuron_relu, n_out_1, n_out_2)
        self.ls_layers.append(LayerOfBlocks([block0, block1, block2]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        n_in2 = n_out_2 * self.n_patches
        n_out2 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 6
        self.ls_layers.append(LayerOfBlocks([LayerBlockMultiplication(priors)]))

        # Layer 7
        self.ls_layers.append(LayerOfBlocks([LayerBlockNormalization()]))

        self.update_params()
    def init(self, patch_width, n_out):
        Network.init_common(self, patch_width**3, n_out)

        self.patch_width = patch_width

        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        filter_map_0_shape = np.array([patch_width, patch_width, patch_width], dtype=int)
        filter_0_shape = np.array([2, 2, 2], dtype=int)
        pool_0_shape = np.array([2, 2, 2], dtype=int)
        n_kern0 = 20
        block0 = LayerBlockConvPool3D(neuron_relu,
                                      1, tuple(filter_map_0_shape),
                                      n_kern0, tuple(filter_0_shape),
                                      poolsize=tuple(pool_0_shape))

        # Layer 1
        filter_map_1_shape = (filter_map_0_shape - filter_0_shape + 1) / pool_0_shape
        filter_1_shape = np.array([2, 2, 2], dtype=int)
        pool_1_shape = np.array([2, 2, 2], dtype=int)
        n_kern1 = 50
        block1 = LayerBlockConvPool3D(neuron_relu,
                                      n_kern0, tuple(filter_map_1_shape),
                                      n_kern1, tuple(filter_1_shape),
                                      poolsize=tuple(pool_1_shape))

        # Layer 2
        filter_map_2_shape = (filter_map_1_shape - filter_1_shape + 1) / pool_1_shape
        n_in2 = n_kern1 * np.prod(filter_map_2_shape)
        n_out2 = 1000
        block2 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)

        # Layer 3
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)

        self.ls_layers = convert_blocks_into_feed_forward_layers([block0, block1, block2, block3])

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
    def init(self, n_in, n_out):
        Network.init_common(self, n_in, n_out)

        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        block0 = LayerBlockGaussianNoise()

        # Layer 1
        n_neurons_1 = 200
        block1 = LayerBlockFullyConnected(neuron_relu, self.n_in, n_neurons_1)

        # Layer 2
        n_neurons_2 = 200
        block2 = LayerBlockFullyConnected(neuron_relu, n_neurons_1, n_neurons_2)

        # Layer 3
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_neurons_2, self.n_out)

        self.ls_layers = convert_blocks_into_feed_forward_layers([block0, block1, block2, block3])

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
 def __init__(self):
     Network.__init__(self)
Esempio n. 13
0
    def init(self, patch_width, patch_width_comp, patch_width_3d, n_centroids,
             n_out):
        Network.init_common(
            self, 3 * patch_width**2 + 3 * patch_width_comp**2 +
            patch_width_3d**3 + n_centroids, n_out)

        self.patch_width = patch_width
        self.patch_width_3d = patch_width_3d
        self.n_centroids = n_centroids

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0

        splits = [0] + [patch_width**2] * 3 + [patch_width_comp**2] * 3 + [
            self.patch_width_3d**3
        ] + [n_centroids]
        self.ls_layers.append(LayerDivideFeatures(np.cumsum(splits)))

        # Layer 1
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0,
                                                 kernel_width0),
                                      poolsize=(pool_size_height0,
                                                pool_size_width0))

        block1 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(1, patch_width_comp, patch_width_comp),
            flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
            poolsize=(pool_size_height0, pool_size_width0))

        n_out_1_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, patch_width_3d**3,
                                            n_out_1_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(
            LayerOfBlocks([
                block0, block0, block0, block1, block1, block1, block_3d,
                block_centroids
            ]))

        # Layer 2
        filter_map_height1 = (patch_width - kernel_height0 +
                              1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 +
                             1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block0 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block1 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block2 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))

        filter_map_height1_comp = (patch_width_comp - kernel_height0 +
                                   1) / pool_size_height0
        filter_map_width1_comp = (patch_width_comp - kernel_width0 +
                                  1) / pool_size_width0
        block3 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1_comp,
                      filter_map_width1_comp),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block4 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1_comp,
                      filter_map_width1_comp),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block5 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1_comp,
                      filter_map_width1_comp),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))

        n_out_2_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, n_out_1_3, n_out_2_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(
            LayerOfBlocks([
                block0, block1, block2, block3, block4, block5, block_3d,
                block_centroids
            ]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        filter_map_height2 = (filter_map_height1 - kernel_height1 +
                              1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 +
                            1) / pool_size_width1
        filter_map_height2_comp = (filter_map_height1_comp - kernel_height1 +
                                   1) / pool_size_height1
        filter_map_with2_comp = (filter_map_width1_comp - kernel_width1 +
                                 1) / pool_size_width1
        n_in2 = 3 * n_kern1 * filter_map_height2 * filter_map_with2 + \
                3 * n_kern1 * filter_map_height2_comp * filter_map_with2_comp + n_out_2_3 + n_centroids
        n_out2 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        n_out3 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_out2,
                                          n_out=n_out3)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 6
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out3,
                                          n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
 def __init__(self):
     Network.__init__(self)
     self.patch_width = None
     self.patch_width_3d = None
     self.n_centroids = None
Esempio n. 15
0
 def __init__(self):
     Network.__init__(self)
 def __init__(self):
     Network.__init__(self)
     self.patch_width = None
     self.in_height = None
Esempio n. 17
0
    def init(self, patch_width, n_out):
        Network.init_common(self, 6 * patch_width * patch_width, n_out)

        self.patch_width = patch_width

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width**2
        self.ls_layers.append(
            LayerDivideFeatures(range(0, self.n_in + interval, interval)))

        # Layer 1
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0,
                                                 kernel_width0),
                                      poolsize=(pool_size_height0,
                                                pool_size_width0))

        block1 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0,
                                                 kernel_width0),
                                      poolsize=(pool_size_height0,
                                                pool_size_width0))

        self.ls_layers.append(
            LayerOfBlocks([block0, block0, block0, block1, block1, block1]))

        # Layer 2
        filter_map_height1 = (patch_width - kernel_height0 +
                              1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 +
                             1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block0 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block1 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block2 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))

        block3 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block4 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block5 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))

        self.ls_layers.append(
            LayerOfBlocks([block0, block1, block2, block3, block4, block5]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        filter_map_height2 = (filter_map_height1 - kernel_height1 +
                              1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 +
                            1) / pool_size_width1
        n_in2 = 6 * n_kern1 * filter_map_height2 * filter_map_with2
        n_out2 = 4000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out2,
                                          n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 18
0
 def __init__(self):
     Network.__init__(self)
     self.patch_width = None
     self.in_height = None
    def init(self, patch_width, n_out):
        Network.init_common(self, 6 * patch_width*patch_width, n_out)

        self.patch_width = patch_width

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width ** 2
        self.ls_layers.append(LayerDivideFeatures(range(0, self.n_in + interval, interval)))

        # Layer 1
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
                                      poolsize=(pool_size_height0, pool_size_width0))

        block1 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
                                      poolsize=(pool_size_height0, pool_size_width0))

        self.ls_layers.append(LayerOfBlocks([block0, block0, block0, block1, block1, block1]))

        # Layer 2
        filter_map_height1 = (patch_width - kernel_height0 + 1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 + 1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block1 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block2 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))

        block3 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block4 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block5 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))

        self.ls_layers.append(LayerOfBlocks([block0, block1, block2, block3, block4, block5]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        filter_map_height2 = (filter_map_height1 - kernel_height1 + 1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 + 1) / pool_size_width1
        n_in2 = 6 * n_kern1 * filter_map_height2 * filter_map_with2
        n_out2 = 4000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out2, n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 20
0
    def init(self, patch_width, patch_width_3d, n_centroids, n_out):
        Network.init_common(
            self, 6 * patch_width**2 + patch_width_3d**3 + n_centroids, n_out)

        self.patch_width = patch_width
        self.patch_width_3d = patch_width_3d
        self.n_centroids = n_centroids

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width**2
        self.ls_layers.append(
            LayerDivideFeatures(
                range(0, 6 * interval + 1, interval) +
                [6 * interval + self.patch_width_3d**3] + [self.n_in]))

        # Layer 1
        n_out_1_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, patch_width**2,
                                          n_out_1_1)

        n_out_1_2 = 1000
        block1 = LayerBlockFullyConnected(neuron_relu, patch_width**2,
                                          n_out_1_1)

        n_out_1_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, patch_width_3d**3,
                                            n_out_1_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(
            LayerOfBlocks([
                block0, block0, block0, block1, block1, block1, block_3d,
                block_centroids
            ]))

        # Layer 2
        n_out_2_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block1 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block2 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)

        n_out_2_2 = 1000
        block3 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block4 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block5 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)

        n_out_2_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, n_out_1_3, n_out_2_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(
            LayerOfBlocks([
                block0, block1, block2, block3, block4, block5, block_3d,
                block_centroids
            ]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        n_in2 = n_out_2_1 * 3 + n_out_2_2 * 3 + n_out_2_3 + n_centroids
        n_out2 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        n_out3 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_out2,
                                          n_out=n_out3)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 6
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out3,
                                          n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
    def init(self, patch_width, patch_width_3d, n_centroids, n_out):
        Network.init_common(self, 6 * patch_width**2 + patch_width_3d**3 + n_centroids, n_out)

        self.patch_width = patch_width
        self.patch_width_3d = patch_width_3d
        self.n_centroids = n_centroids

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width ** 2
        self.ls_layers.append(LayerDivideFeatures(range(0, 6*interval+1, interval)
                                                  + [6*interval + self.patch_width_3d**3]
                                                  + [self.n_in]))

        # Layer 1
        n_out_1_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, patch_width**2, n_out_1_1)

        n_out_1_2 = 1000
        block1 = LayerBlockFullyConnected(neuron_relu, patch_width**2, n_out_1_1)

        n_out_1_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, patch_width_3d**3, n_out_1_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(LayerOfBlocks([block0, block0, block0, block1, block1, block1, block_3d, block_centroids]))

        # Layer 2
        n_out_2_1 = 1000
        block0 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block1 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)
        block2 = LayerBlockFullyConnected(neuron_relu, n_out_1_1, n_out_2_1)

        n_out_2_2 = 1000
        block3 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block4 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)
        block5 = LayerBlockFullyConnected(neuron_relu, n_out_1_2, n_out_2_2)

        n_out_2_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, n_out_1_3, n_out_2_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(LayerOfBlocks([block0, block1, block2, block3, block4, block5, block_3d, block_centroids]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        n_in2 = n_out_2_1 * 3 + n_out_2_2 * 3 + n_out_2_3 + n_centroids
        n_out2 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        n_out3 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_out2, n_out=n_out3)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 6
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out3, n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 22
0
    def init(self, patch_width, n_centroids, n_out):
        Network.init_common(self, patch_width**2 + n_centroids, n_out)

        self.patch_width = patch_width
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0
        interval = patch_width**2
        self.ls_layers.append(LayerDivideFeatures([0, interval, self.n_in]))

        # Layer 1
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0,
                                                 kernel_width0),
                                      poolsize=(pool_size_height0,
                                                pool_size_width0))
        block1 = LayerBlockIdentity()
        self.ls_layers.append(LayerOfBlocks([block0, block1]))

        # Layer 2
        filter_map_height1 = (patch_width - kernel_height0 +
                              1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 +
                             1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block0 = LayerBlockConvPool2D(
            neuron_relu,
            in_shape=(n_kern0, filter_map_height1, filter_map_width1),
            flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
            poolsize=(pool_size_height1, pool_size_width1))
        block1 = LayerBlockIdentity()
        self.ls_layers.append(LayerOfBlocks([block0, block1]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        filter_map_height2 = (filter_map_height1 - kernel_height1 +
                              1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 +
                            1) / pool_size_width1
        n_in2 = n_kern1 * filter_map_height2 * filter_map_with2 + n_centroids
        n_out2 = 1000
        block2 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_in2,
                                          n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block2]))

        # Layer 5
        n_out3 = 1000
        block2 = LayerBlockFullyConnected(neuron_relu,
                                          n_in=n_out2,
                                          n_out=n_out3)
        self.ls_layers.append(LayerOfBlocks([block2]))

        # Layer 6
        block3 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(),
                                          n_in=n_out3,
                                          n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block3]))

        self.update_params()
 def __init__(self):
     Network.__init__(self)
     self.patch_width = None
Esempio n. 24
0
 def __init__(self):
     Network.__init__(self)
     self.patch_width = None
    def init(self, patch_width, patch_width_comp, patch_width_3d, n_centroids, n_out):
        Network.init_common(self, 3 * patch_width**2 + 3 * patch_width_comp**2 + patch_width_3d**3 + n_centroids, n_out)

        self.patch_width = patch_width
        self.patch_width_3d = patch_width_3d
        self.n_centroids = n_centroids

        self.ls_layers = []
        neuron_relu = neuron_type.NeuronRELU()

        # Layer 0

        splits = [0] + [patch_width**2]*3 + [patch_width_comp**2]*3 + [self.patch_width_3d**3] + [n_centroids]
        self.ls_layers.append(LayerDivideFeatures(np.cumsum(splits)))

        # Layer 1
        kernel_height0 = 5
        kernel_width0 = 5
        pool_size_height0 = 2
        pool_size_width0 = 2
        n_kern0 = 20
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width, patch_width),
                                      flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
                                      poolsize=(pool_size_height0, pool_size_width0))

        block1 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(1, patch_width_comp, patch_width_comp),
                                      flt_shape=(n_kern0, 1, kernel_height0, kernel_width0),
                                      poolsize=(pool_size_height0, pool_size_width0))

        n_out_1_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, patch_width_3d**3, n_out_1_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(LayerOfBlocks([block0, block0, block0, block1, block1, block1, block_3d, block_centroids]))

        # Layer 2
        filter_map_height1 = (patch_width - kernel_height0 + 1) / pool_size_height0
        filter_map_width1 = (patch_width - kernel_width0 + 1) / pool_size_width0
        kernel_height1 = 5
        kernel_width1 = 5
        pool_size_height1 = 2
        pool_size_width1 = 2
        n_kern1 = 50
        block0 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block1 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block2 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1, filter_map_width1),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))

        filter_map_height1_comp = (patch_width_comp - kernel_height0 + 1) / pool_size_height0
        filter_map_width1_comp = (patch_width_comp - kernel_width0 + 1) / pool_size_width0
        block3 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1_comp, filter_map_width1_comp),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block4 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1_comp, filter_map_width1_comp),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))
        block5 = LayerBlockConvPool2D(neuron_relu,
                                      in_shape=(n_kern0, filter_map_height1_comp, filter_map_width1_comp),
                                      flt_shape=(n_kern1, n_kern0, kernel_height1, kernel_width1),
                                      poolsize=(pool_size_height1, pool_size_width1))

        n_out_2_3 = 1000
        block_3d = LayerBlockFullyConnected(neuron_relu, n_out_1_3, n_out_2_3)

        block_centroids = LayerBlockIdentity()

        self.ls_layers.append(LayerOfBlocks([block0, block1, block2, block3, block4, block5, block_3d, block_centroids]))

        # Layer 3
        self.ls_layers.append(LayerMergeFeatures())

        # Layer 4
        filter_map_height2 = (filter_map_height1 - kernel_height1 + 1) / pool_size_height1
        filter_map_with2 = (filter_map_width1 - kernel_width1 + 1) / pool_size_width1
        filter_map_height2_comp = (filter_map_height1_comp - kernel_height1 + 1) / pool_size_height1
        filter_map_with2_comp = (filter_map_width1_comp - kernel_width1 + 1) / pool_size_width1
        n_in2 = 3 * n_kern1 * filter_map_height2 * filter_map_with2 + \
                3 * n_kern1 * filter_map_height2_comp * filter_map_with2_comp + n_out_2_3 + n_centroids
        n_out2 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_in2, n_out=n_out2)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 5
        n_out3 = 3000
        block0 = LayerBlockFullyConnected(neuron_relu, n_in=n_out2, n_out=n_out3)
        self.ls_layers.append(LayerOfBlocks([block0]))

        # Layer 6
        block0 = LayerBlockFullyConnected(neuron_type.NeuronSoftmax(), n_in=n_out3, n_out=self.n_out)
        self.ls_layers.append(LayerOfBlocks([block0]))

        self.ls_params = []
        for l in self.ls_layers:
            self.ls_params += l.params
Esempio n. 26
0
 def __init__(self):
     Network.__init__(self)
     self.patch_width = None
     self.patch_width_3d = None
     self.n_centroids = None