Exemple #1
0
def Block(net, name, nb_filters, stride):
    split = net.add(layer.Split(name + "-split", 2))
    if stride > 1:
        net.add(layer.Conv2D(name + "-br2-conv", nb_filters, 1, stride, pad=0),
                split)
        br2bn = net.add(layer.BatchNormalization(name + "-br2-bn"))
    net.add(layer.Conv2D(name + "-br1-conv1", nb_filters, 3, stride, pad=1),
            split)
    net.add(layer.BatchNormalization(name + "-br1-bn1"))
    net.add(layer.Activation(name + "-br1-relu"))
    net.add(layer.Conv2D(name + "-br1-conv2", nb_filters, 3, 1, pad=1))
    br1bn2 = net.add(layer.BatchNormalization(name + "-br1-bn2"))
    if stride > 1:
        net.add(layer.Merge(name + "-merge"), [br1bn2, br2bn])
    else:
        net.add(layer.Merge(name + "-merge"), [br1bn2, split])
Exemple #2
0
 def test_mult_inputs(self):
     ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
     s1 = ffn.add(layer.Activation('relu1', input_sample_shape=(2, )), [])
     s2 = ffn.add(layer.Activation('relu2', input_sample_shape=(2, )), [])
     ffn.add(layer.Merge('merge', input_sample_shape=(2, )), [s1, s2])
     x1 = tensor.Tensor((2, 2))
     x1.set_value(1.1)
     x2 = tensor.Tensor((2, 2))
     x2.set_value(0.9)
     out = ffn.forward(False, {'relu1': x1, 'relu2': x2})
     out = tensor.to_numpy(out)
     self.assertAlmostEqual(np.average(out), 2)
Exemple #3
0
 def test_merge(self):
     in_sample_shape = (3, 224, 224)
     merge = layer.Merge('merge', input_sample_shape=in_sample_shape)
     out_sample_shape = merge.get_output_sample_shape()
     self.check_shape(out_sample_shape, in_sample_shape)