Esempio n. 1
0
 def test_ReshapeLike(self):
     blob = Blob()
     other1 = Blob()
     other2 = Blob()
     blob.set_data(numpy.array(range(30), float))
     blob.set_diff(numpy.array(range(30), float))
     blob.ReshapeLike(other1)
     blob.ReshapeLike(other2)
     blob.ReshapeLike(other1)
Esempio n. 2
0
 def AddTrainNet(self, net):
     Solver.AddTrainNet(self, net)
     params = self.net_.learnable_params()
     for i in range(len(params)):
         s = Blob()
         r = Blob()
         s.ReshapeLike(params[i])
         r.ReshapeLike(params[i])
         self.s_.append(s)
         self.r_.append(r)
Esempio n. 3
0
    def AddTrainNet(self, net):
        Solver.AddTrainNet(self, net)
        params = self.net_.learnable_params()
        for i in range(len(params)):
            h = Blob()
            u = Blob()
            t = Blob()
            h.ReshapeLike(params[i])
            u.ReshapeLike(params[i])
            t.ReshapeLike(params[i])
            self.history_.append(h)
            self.update_.append(u)
            self.temp_.append(t)

        self.history_.extend(self.history_)
Esempio n. 4
0
 def AddTrainNet(self, net):
     Solver.AddTrainNet(self, net)
     params = self.net_.learnable_params()
     for i in range(len(params)):
         blob = Blob()
         blob.ReshapeLike(params[i])
         self.history_.append(blob)
Esempio n. 5
0
class EuclideanLossLayer(LossLayer):
    def __init__(self):
        LossLayer.__init__(self)
        self.diff_ = Blob(numpy.float, [6])

    def Reshape(self, bottom, top):
        LossLayer.Reshape(self, bottom, top)
        self.diff_.ReshapeLike(bottom[0])

    def type(self):
        return 'EuclideanLoss'

    def AllowForceBackward(self, bottom_index):
        return True

    def Forward_cpu(self, bottom, top):
        self.diff_.set_data(bottom[0].data() - bottom[1].data())
        dot = numpy.dot(self.diff_.data(), self.diff_.data())
        loss = dot / bottom[0].shape()[0] / 2
        top[0].set_data(loss)

    def Backward_cpu(self, top, propagate_down, bottom):
        print top[0].diff()
        print bottom[0].shape()[0]
        print self.diff_.data()
        bottom[0].set_diff(top[0].diff() / bottom[0].shape()[0] *
                           self.diff_.data())
Esempio n. 6
0
class DropoutLayer(NeuronLayer):
    def __init__(self, threshold):
        NeuronLayer.__init__(self)
        self.threshold_ = threshold
        self.rand_blob_ = Blob()
        if 1.0 == threshold:
            self.scale_ = 1.0
        else:
            self.scale_ = 1.0 / (1.0 - threshold)

    def type(self):
        return 'Dropout'

    def LayerSetup(self, bottom, top):
        self.rand_blob_.ReshapeLike(bottom[0])
        self.rand_blob_.set_data(
            numpy.random.binomial(n=1,
                                  p=self.threshold_,
                                  size=bottom[0].data().shape))
        self.rand_blob_.ReshapeLike(bottom[0])

    def Reshape(self, bottom, top):
        NeuronLayer.Reshape(self, bottom, top)
        self.rand_blob_.ReshapeLike(bottom[0])

    def Forward_cpu(self, bottom, top):
        self.rand_blob_.ReshapeLike(bottom[0])
        self.rand_blob_.set_data(
            numpy.random.binomial(n=1,
                                  p=self.threshold_,
                                  size=bottom[0].data().shape))
        self.rand_blob_.ReshapeLike(bottom[0])
        top[0].set_data(
            numpy.multiply(bottom[0].data(),
                           self.rand_blob_.data() * self.scale_))

    def Backward_cpu(self, top, propagate_down, bottom):
        bottom[0].set_diff(
            numpy.multiply(top[0].diff(),
                           self.rand_blob_.data() * self.scale_))