Beispiel #1
0
 def layer_op(self, input_tensor, is_training=None, keep_prob=None):
     channel_dim = len(input_tensor.get_shape()) - 1
     stack = [input_tensor]
     input_mask = tf.ones([input_tensor.get_shape().as_list()[-1]]) > 0
     for idx, d in enumerate(self.dilation_rates):
         if idx == len(self.dilation_rates) - 1:
             keep_prob = None  # no dropout on last layer of the stack
         if self.use_bdo:
             conv = ChannelSparseConvolutionalLayer(
                 self.n_dense_channels,
                 kernel_size=self.kernel_size,
                 **self.kwargs)
             conv, new_input_mask = conv(tf.concat(stack, channel_dim),
                                         input_mask=input_mask,
                                         is_training=is_training,
                                         keep_prob=keep_prob)
             input_mask = tf.concat([input_mask, new_input_mask], 0)
         else:
             conv = ConvolutionalLayer(self.n_dense_channels,
                                       kernel_size=self.kernel_size,
                                       **self.kwargs)
             conv = conv(tf.concat(stack, channel_dim),
                         is_training=is_training,
                         keep_prob=keep_prob)
         stack.append(conv)
     return stack
Beispiel #2
0
 def test_masks(self):
     x = tf.random_normal(shape=[2,4,5,4])
     conv1 = ChannelSparseConvolutionalLayer(10)
     conv2 = ChannelSparseConvolutionalLayer(10)
     conv3 = ChannelSparseConvolutionalLayer(10)
     conv4 = ChannelSparseConvolutionalLayer(10)
     x1, mask1=conv1(x, None, True, 1.)
     x2, mask2=conv2(x1, mask1, True, .5)
     x3, mask3=conv3(x2, mask2, True, .2)
     x4, mask4=conv4(x3, mask3, True, 1.)
     
     with self.test_session() as sess:
         sess.run(tf.global_variables_initializer())
         out1, out2, out3, out4 = sess.run([mask1, mask2, mask3, mask4])
     self.assertAllClose([10, 5, 2, 10], [np.sum(out1),
                                          np.sum(out2),
                                          np.sum(out3),
                                          np.sum(out4)])
Beispiel #3
0
    def test_2d_shape(self):
        x = tf.random_normal(shape=[2, 4, 5, 4])
        conv1 = ChannelSparseConvolutionalLayer(4)
        conv2 = ChannelSparseConvolutionalLayer(8, kernel_size=[1, 1, 3])
        conv3 = ChannelSparseConvolutionalLayer(4, acti_func='relu')
        conv4 = ChannelSparseConvolutionalLayer(8, with_bn=False)
        conv5 = ChannelSparseConvolutionalLayer(4, with_bias=True)
        x1, mask1 = conv1(x, None, True, 1.)
        x2, mask2 = conv2(x1, mask1, True, 1.)
        x3, mask3 = conv3(x2, mask2, True, .5)
        x4, mask4 = conv4(x3, mask3, True, .75)
        x5, mask5 = conv5(x4, mask4, True, 1.)

        with self.test_session(config=get_config()) as sess:
            sess.run(tf.global_variables_initializer())
            out1, out2, out3, out4, out5 = sess.run([x1, x2, x3, x4, x5])
        self.assertAllClose([2, 4, 5, 4], out1.shape)
        self.assertAllClose([2, 4, 5, 8], out2.shape)
        self.assertAllClose([2, 4, 5, 2], out3.shape)
        self.assertAllClose([2, 4, 5, 6], out4.shape)
        self.assertAllClose([2, 4, 5, 4], out5.shape)
    def create_block(self):
        dfs_block = []
        for _ in self.dilation_rates:
            if self.use_bdo:
                conv = ChannelSparseConvolutionalLayer(
                    self.n_dense_channels,
                    kernel_size=self.kernel_size,
                    **self.kwargs)
            else:
                conv = ConvolutionalLayer(self.n_dense_channels,
                                          kernel_size=self.kernel_size,
                                          **self.kwargs)

            dfs_block.append(conv)

        return dfs_block
Beispiel #5
0
    def create_block(self):
        net_conv_layers = []

        for _ in self.dilation_rates:
            if self.use_bdo:
                conv = ChannelSparseConvolutionalLayer(
                    self.n_dense_channels,
                    kernel_size=self.kernel_size,
                    **self.kwargs)
            else:
                conv = ConvolutionalLayer(self.n_dense_channels,
                                          kernel_size=self.kernel_size,
                                          **self.kwargs)
            net_conv_layers.append(conv)

        return DenseFSBlockDesc(conv_layers=net_conv_layers)