Esempio n. 1
0
class EuclideanLossLayer(LossLayer):
    def __init__(self):
        LossLayer.__init__(self)
        self.diff_ = Blob(numpy.float, [6])

    def Reshape(self, bottom, top):
        LossLayer.Reshape(self, bottom, top)
        self.diff_.ReshapeLike(bottom[0])

    def type(self):
        return 'EuclideanLoss'

    def AllowForceBackward(self, bottom_index):
        return True

    def Forward_cpu(self, bottom, top):
        self.diff_.set_data(bottom[0].data() - bottom[1].data())
        dot = numpy.dot(self.diff_.data(), self.diff_.data())
        loss = dot / bottom[0].shape()[0] / 2
        top[0].set_data(loss)

    def Backward_cpu(self, top, propagate_down, bottom):
        print top[0].diff()
        print bottom[0].shape()[0]
        print self.diff_.data()
        bottom[0].set_diff(top[0].diff() / bottom[0].shape()[0] *
                           self.diff_.data())
Esempio n. 2
0
    def test_MaxPoolingLayer(self):
        top0 = Blob()

        bot0 = Blob()
        bot0.Reshape((1,1,4,4))
        bot0.set_data([5,3,1,2,1,2,3,2,4,2,2,5,3,6,1,1])
        bot0.Reshape((1,1,4,4))

        expect_top0 = Blob()
        expect_top0.Reshape((1,1,2,2))
        expect_top0.set_data([5,3,6,5])
        expect_top0.Reshape((1,1,2,2))

        expect_bot0 = Blob()
        expect_bot0.Reshape((1,1,4,4))
        expect_bot0.set_diff([1,0,0,0,0,0,0.8,0,0,0,0,0.6,0,0.4,0,0])
        expect_bot0.Reshape((1,1,4,4))

        layer  = MaxPoolingLayer(2,2,2)
        layer.Setup([bot0], [top0])

        layer.Forward([bot0], [top0])
        np.testing.assert_array_almost_equal( top0.data(), expect_top0.data() )

        top0.set_diff([1.0,0.8,0.4,0.6])
        top0.Reshape((1,1,2,2))
       
        layer.Backward([top0], [], [bot0])
        np.testing.assert_array_almost_equal( bot0.diff(), expect_bot0.diff() )
Esempio n. 3
0
    def test_ReLULayer(self):
        # top0.data = ReLU(bot0.data)
        # bot0.diff = ReLUGrad(top0.diff) 
        bot0 = Blob()
        bot0.Reshape([2,6])
        bot0.set_data([-1.0,2.0,-3.0,4.0,-5.0,6.0,-1.0,2.0,-3.0,4.0,-5.0,6.0])
        bot0.Reshape([2,6])

        top0 = Blob()
        top0.Reshape([2,6])
        top0.set_diff([1.0,-2.0,3.0,-4.0,5.0,-6.0,1.0,-2.0,3.0,-4.0,5.0,-6.0])
        top0.Reshape([2,6])

        expect_bot0 = Blob()
        expect_bot0.Reshape([2,6])
        expect_bot0.set_diff([0.0,-2.0,0.0,-4.0,0.0,-6.0,0.0,-2.0,0.0,-4.0,0.0,-6.0])
        expect_bot0.Reshape([2,6])

        expect_top0 = Blob()
        expect_top0.Reshape([2,6])
        expect_top0.set_data([0.0,2.0,0.0,4.0,0.0,6.0,0.0,2.0,0.0,4.0,0.0,6.0])
        expect_top0.Reshape([2,6])

        layer = ReLULayer()

        layer.Setup([bot0], [top0])
        layer.Forward([bot0], [top0])
        np.testing.assert_array_equal( expect_top0.data(), top0.data() )

        layer.Backward([top0], [], [bot0])
        np.testing.assert_array_equal( expect_bot0.diff(), bot0.diff() )
Esempio n. 4
0
 def test_Reshape(self):
     blob = Blob()
     blob.set_data(numpy.array(range(30), float))
     blob.set_diff(numpy.array(range(30), float))
     blob.Reshape((5, 3))
     blob.Reshape((5, 6))
     blob.Reshape((2, 15))
Esempio n. 5
0
    def test_SoftMaxLayer(self):
        bot0 = Blob()
        bot0.Reshape((3,))
        bot0.set_data([1,2,3])
        bot0.Reshape((3,))

        top0 = Blob()
        top0.Reshape((3,))

        expect_top0 = Blob()
        expect_top0.Reshape((3,))
        expect_top0.set_data([0.09003057, 0.24472847, 0.66524096])
        expect_top0.Reshape((3,))

        expect_bot0 = Blob()
        expect_bot0.Reshape((3,))
        expect_bot0.set_diff([0.0, 0.0, 0.0])
        expect_bot0.Reshape((3,))

        layer = SoftMaxLayer()
        layer.Setup([bot0], [top0])
        layer.Forward([bot0], [top0])

        np.testing.assert_array_almost_equal( top0.data(), expect_top0.data() )

        top0.set_diff(np.ones_like(top0.data()))
        layer.Backward([top0], [], [bot0])
        
        np.testing.assert_array_almost_equal( bot0.diff(), expect_bot0.diff() )
Esempio n. 6
0
 def test_ReshapeLike(self):
     blob = Blob()
     other1 = Blob()
     other2 = Blob()
     blob.set_data(numpy.array(range(30), float))
     blob.set_diff(numpy.array(range(30), float))
     blob.ReshapeLike(other1)
     blob.ReshapeLike(other2)
     blob.ReshapeLike(other1)
Esempio n. 7
0
    def test_InnerProductLayer(self):
        bot0 = Blob()
        bot0.Reshape((2,3))
        bot0.set_data([1,2,3,4,5,6])
        bot0.Reshape((2,3))

        top0 = Blob()

        expect_b = Blob()
        expect_b.Reshape((1,2))
        expect_b.set_diff([2,2])
        expect_b.Reshape((1,2))

        expect_W = Blob()
        expect_W.Reshape((3,2))
        expect_W.set_diff([5,5,7,7,9,9])
        expect_W.Reshape((3,2))

        expect_bot0 = Blob()
        expect_bot0.Reshape((2,3))
        expect_bot0.set_diff([3,7,11,3,7,11])
        expect_bot0.Reshape((2,3))

        layer  = InnerProductLayer(3,2)
        layer.Setup([bot0], [top0])

        layer.W.set_data([1,2,3,4,5,6])
        layer.W.Reshape((3,2))

        expect_top0 = Blob()
        expect_top0.Reshape((2,2))
        expect_top0.set_data([22,28,49,64])
        expect_top0.Reshape((2,2))

        layer.Forward([bot0], [top0])
        np.testing.assert_array_almost_equal( top0.data(), expect_top0.data() )

        top0.set_diff(np.ones_like(top0.data()))
        layer.Backward([top0], [], [bot0])

        np.testing.assert_array_almost_equal( layer.b.diff(), expect_b.diff() )
        np.testing.assert_array_almost_equal( layer.W.diff(), expect_W.diff() )
        np.testing.assert_array_almost_equal( bot0.diff(), expect_bot0.diff() )
Esempio n. 8
0
    def test_ConvolutionLayer(self):
        bot0 = Blob()
        bot0.Reshape((1,1,4,4))
        bot0.set_data([1,1,0,1,1,0,0,1,0,1,1,0,1,1,1,1])
        bot0.Reshape((1,1,4,4))

        top0 = Blob()

        expect_top0 = Blob()
        expect_top0.Reshape((1,1,3,3))
        expect_top0.set_data([2,1,1,2,2,1,2,3,3])
        expect_top0.Reshape((1,1,3,3))

        expect_W = Blob()
        expect_W.Reshape((1,1,2,2))
        expect_W.set_diff([5,5,6,6])
        expect_W.Reshape((1,1,2,2))

        expect_bot0 = Blob()
        expect_bot0.Reshape((1,1,4,4))
        expect_bot0.set_diff([1,1,1,0,2,3,3,1,2,3,3,1,1,2,2,1])
        expect_bot0.Reshape((1,1,4,4))

        expect_b = Blob()
        expect_b.Reshape((1,))
        expect_b.set_diff([9])
        expect_b.Reshape((1,))
     
        layer  = ConvolutionLayer(2,2,1,0,1)
        layer.Setup([bot0], [top0])
        layer.W.set_data([1,0,1,1])
        layer.W.Reshape((1,1,2,2))
        layer.Forward([bot0], [top0])

        np.testing.assert_array_almost_equal( top0.data(), expect_top0.data() )

        top0.set_diff(np.ones_like(top0.data()))
       
        layer.Backward([top0], [], [bot0])

        np.testing.assert_array_almost_equal( layer.W.diff(), expect_W.diff() )
        np.testing.assert_array_almost_equal( layer.b.diff(), expect_b.diff() )
        np.testing.assert_array_almost_equal( bot0.diff(), expect_bot0.diff() )
Esempio n. 9
0
class DropoutLayer(NeuronLayer):
    def __init__(self, threshold):
        NeuronLayer.__init__(self)
        self.threshold_ = threshold
        self.rand_blob_ = Blob()
        if 1.0 == threshold:
            self.scale_ = 1.0
        else:
            self.scale_ = 1.0 / (1.0 - threshold)

    def type(self):
        return 'Dropout'

    def LayerSetup(self, bottom, top):
        self.rand_blob_.ReshapeLike(bottom[0])
        self.rand_blob_.set_data(
            numpy.random.binomial(n=1,
                                  p=self.threshold_,
                                  size=bottom[0].data().shape))
        self.rand_blob_.ReshapeLike(bottom[0])

    def Reshape(self, bottom, top):
        NeuronLayer.Reshape(self, bottom, top)
        self.rand_blob_.ReshapeLike(bottom[0])

    def Forward_cpu(self, bottom, top):
        self.rand_blob_.ReshapeLike(bottom[0])
        self.rand_blob_.set_data(
            numpy.random.binomial(n=1,
                                  p=self.threshold_,
                                  size=bottom[0].data().shape))
        self.rand_blob_.ReshapeLike(bottom[0])
        top[0].set_data(
            numpy.multiply(bottom[0].data(),
                           self.rand_blob_.data() * self.scale_))

    def Backward_cpu(self, top, propagate_down, bottom):
        bottom[0].set_diff(
            numpy.multiply(top[0].diff(),
                           self.rand_blob_.data() * self.scale_))
Esempio n. 10
0
    def test_SoftmaxLossLayer(self):
        bot0 = Blob()
        bot0.Reshape([2,6])
        bot0.set_data([1.0,2.0,3.0,4.0,5.0,6.0,1.0,2.0,3.0,4.0,5.0,6.0])
        bot0.Reshape([2,6])

        bot1 = Blob()
        bot1.Reshape([2,6])
        bot1.set_data([1.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0])
        bot1.Reshape([2,6])

        top0 = Blob()
        top1 = Blob()

        expect_bot0 = Blob()
        expect_bot0.Reshape([2,6])
        expect_bot0.set_diff([-0.49786511,0.00580323,0.01577482,0.0428804,0.116561,0.31684566,-0.49786511,0.00580323,0.01577482,0.0428804,0.116561,0.31684566])
        expect_bot0.Reshape([2,6])

        expect_top1 = Blob()
        expect_top1.Reshape([2,6])
        expect_top1.set_data([0.00426978, 0.01160646, 0.03154963, 0.08576079, 0.23312201,0.63369132,0.00426978, 0.01160646, 0.03154963, 0.08576079, 0.23312201,0.63369132])
        expect_top1.Reshape([2,6])

        layer = SoftmaxLossLayer()

        layer.Setup([bot0, bot1], [top0,top1])
        layer.Forward([bot0, bot1], [top0,top1])

        np.testing.assert_array_almost_equal( top1.data(), expect_top1.data() )

        top0.set_diff(1.0)
        layer.Backward([top0,top1], [], [bot0, bot1])

        np.testing.assert_array_almost_equal( bot0.diff(), expect_bot0.diff() )
Esempio n. 11
0
class InnerProductLayer(Layer):
    def __init__(self, K, N):
        Layer.__init__(self)

        # input number of neuron
        self.K_ = K

        # output number of neuron
        self.N_ = N

        self.bias_term_ = None
        self.bias_multiplier_ = None
        self.transpose_ = False

        self.W = Blob()
        self.b = Blob()
        self.blobs_.append(self.W)
        self.blobs_.append(self.b)

    def LayerSetup(self, bottom, top):
        self.M_ = bottom[0].data().shape[0]

        W_shape = (self.K_, self.N_)
        b_shape = (1, self.N_)

        self.W.Reshape(W_shape)
        self.b.Reshape(b_shape)

        # Xavier
        fan_in = self.W.count() / self.W.shape()[0]
        fan_out = self.W.count() / self.W.shape()[1]

        n = (fan_in + fan_out) / 2

        scale = numpy.sqrt(3.0 / n)
        #self.W.set_data(numpy.random.uniform(-scale, scale, self.W.count()) )
        self.W.set_data(
            numpy.random.normal(loc=0.0, scale=0.1, size=self.W.count()))
        self.W.Reshape(W_shape)

    def Reshape(self, bottom, top):
        top_shape = (self.M_, self.N_)
        top[0].Reshape(top_shape)

    def type(self):
        return 'InnerProduct'

    def ExactNumBottomBlobs(self):
        return 1

    def ExactNumTopBlobs(self):
        return 1

    def Forward_cpu(self, bottom, top):
        top[0].set_data(
            numpy.matmul(bottom[0].data(), self.W.data()) + self.b.data())

    def Backward_cpu(self, top, propagate_down, bottom):
        self.W.set_diff(
            self.W.diff() +
            numpy.matmul(bottom[0].data().transpose(), top[0].diff()))
        self.b.set_diff(self.b.diff() + numpy.sum(top[0].diff(), axis=0))
        bottom[0].set_diff(
            numpy.matmul(top[0].diff(),
                         self.W.data().transpose()))
Esempio n. 12
0
            numpy.multiply(bottom[0].data(),
                           self.rand_blob_.data() * self.scale_))

    def Backward_cpu(self, top, propagate_down, bottom):
        bottom[0].set_diff(
            numpy.multiply(top[0].diff(),
                           self.rand_blob_.data() * self.scale_))


if __name__ == '__main__':
    layer = DropoutLayer(0.5)
    bottom = Blob()
    top = Blob()

    bottom.Reshape((4, 4))
    bottom.set_data(numpy.arange(16) * 1.0)
    bottom.Reshape((4, 4))

    layer.Setup([bottom], [top])

    layer.Forward([bottom], [top])

    top.set_diff(numpy.arange(16) * 1.0)
    top.Reshape((4, 4))
    layer.Backward([top], [], [bottom])

    print bottom.data()
    print layer.rand_blob_.data()
    print layer.scale_
    print top.data()
    print bottom.diff()
Esempio n. 13
0
        if self.EqualNumBottomTopBlobs() == True:
            if len(top) != len(bottom):
                print self.type(
                ) + " Layer produces one top blob as output for each " + "bottom blob input."

    def SetLossWeights(self, top):
        pass


if __name__ == '__main__':
    bottom = Blob(numpy.float, (2, 3))
    top = Blob(numpy.float, (2, 3))
    top2 = Blob(numpy.float, (2, 3))

    bottom.set_data([-2, -1, 0, 1, 2, 3])
    bottom.Reshape((2, 3))

    top2.set_diff([-2, -1, 0, 1, 2, 5])
    top2.Reshape((2, 3))

    layer1 = ReLULayer()
    layer1.Setup([bottom], [top])
    layer1.Forward([bottom], [top])

    layer2 = ExpLayer()
    layer2.Setup([top], [top2])
    layer2.Forward([top], [top2])

    layer2.Backward([top2], [], [top])
    layer1.Backward([top], [], [bottom])
Esempio n. 14
0
 def test_asum_data(self):
     blob = Blob()
     blob.set_data(numpy.array(range(30), float))
     res = blob.asum_data()
     self.assertEqual(res, 435.0)
Esempio n. 15
0
 def test_sumsq_data(self):
     blob = Blob()
     blob.set_data(numpy.array(range(4), float))
     res = blob.sumsq_data()
     self.assertEqual(res, 14.0)
Esempio n. 16
0
 def test_data_at(self):
     blob = Blob()
     blob.set_data(numpy.array(range(4)).reshape(2, 2))
     res = blob.data_at((1, 1))
     self.assertEqual(res, 3)
Esempio n. 17
0
        loss = dot / bottom[0].shape()[0] / 2
        top[0].set_data(loss)

    def Backward_cpu(self, top, propagate_down, bottom):
        print top[0].diff()
        print bottom[0].shape()[0]
        print self.diff_.data()
        bottom[0].set_diff(top[0].diff() / bottom[0].shape()[0] *
                           self.diff_.data())


if __name__ == '__main__':
    bottom_0 = Blob(numpy.float, [6])
    bottom_1 = Blob(numpy.float, [6])

    bottom_0.set_data([1.0, 2.0, 3.0, 4.0, 5.0, 6.0])
    bottom_1.set_data([1.0, 0.0, 0.0, 0.0, 0.0, 0.0])

    bottom_0.Reshape([1, 6])
    bottom_1.Reshape([1, 6])

    top = Blob(numpy.float, 0)
    top.set_diff(10.0)

    layer = EuclideanLossLayer()
    layer.Setup([bottom_0, bottom_1], [top])
    layer.Forward([bottom_0, bottom_1], [top])
    layer.Backward([top], [], [bottom_0, bottom_1])
    print top.data()
    print bottom_0.diff()