Esempio n. 1
0
 def test_split(self):
     in_sample_shape = (3, 224, 224)
     split = layer.Split('split',
                         num_output=3,
                         input_sample_shape=in_sample_shape)
     out_sample_shape = split.get_output_sample_shape()
     self.check_shape(out_sample_shape, [in_sample_shape] * 3)
Esempio n. 2
0
 def test_mult_outputs(self):
     ffn = net.FeedForwardNet(loss.SoftmaxCrossEntropy())
     s1 = ffn.add(layer.Activation('relu1', input_sample_shape=(2, )), [])
     s2 = ffn.add(layer.Activation('relu2', input_sample_shape=(2, )), [])
     ffn.add(layer.Merge('merge', input_sample_shape=(2, )), [s1, s2])
     split = ffn.add(layer.Split('split', 2))
     ffn.add(layer.Dummy('split1'), split)
     ffn.add(layer.Dummy('split2'), split)
     x1 = tensor.Tensor((2, 2))
     x1.set_value(1.1)
     x2 = tensor.Tensor((2, 2))
     x2.set_value(0.9)
     out = ffn.forward(False, {'relu1': x1, 'relu2': x2})
     out = tensor.to_numpy(out['split1'])
     self.assertAlmostEqual(np.average(out), 2)
Esempio n. 3
0
def Block(net, name, nb_filters, stride):
    split = net.add(layer.Split(name + "-split", 2))
    if stride > 1:
        net.add(layer.Conv2D(name + "-br2-conv", nb_filters, 1, stride, pad=0),
                split)
        br2bn = net.add(layer.BatchNormalization(name + "-br2-bn"))
    net.add(layer.Conv2D(name + "-br1-conv1", nb_filters, 3, stride, pad=1),
            split)
    net.add(layer.BatchNormalization(name + "-br1-bn1"))
    net.add(layer.Activation(name + "-br1-relu"))
    net.add(layer.Conv2D(name + "-br1-conv2", nb_filters, 3, 1, pad=1))
    br1bn2 = net.add(layer.BatchNormalization(name + "-br1-bn2"))
    if stride > 1:
        net.add(layer.Merge(name + "-merge"), [br1bn2, br2bn])
    else:
        net.add(layer.Merge(name + "-merge"), [br1bn2, split])