コード例 #1
0
ファイル: logistic.py プロジェクト: CheMcCandless/udl
    def get_net(self, data_layer):
        n = Network()
        n.add(data_layer)
        n.name = "LogisticRegressionNet"
        l = InnerProduct(name="fc1", top="fc1", bottom="data", num_output=2)
        l3 = Loss(name="loss", type=LossType.SoftmaxWithLoss, bottom=["fc1", "label"], include=Phase.TRAIN)
        l4 = Loss(name="output", type= LossType.SOFTMAX, top= "output", bottom="fc1", include=Phase.TEST)

        n.add(l)
        n.add(l3)
        n.add(l4)

        return n.str().__str__()
コード例 #2
0
    def get_net(self, data_layer):
        n = Network()
        n.add(data_layer)
        n.name = "LogisticRegressionNet"
        l = InnerProduct(name="fc1", top="fc1", bottom="data", num_output=2)
        l3 = Loss(name="loss",
                  type=LossType.SoftmaxWithLoss,
                  bottom=["fc1", "label"],
                  include=Phase.TRAIN)
        l4 = Loss(name="output",
                  type=LossType.SOFTMAX,
                  top="output",
                  bottom="fc1",
                  include=Phase.TEST)

        n.add(l)
        n.add(l3)
        n.add(l4)

        return n.str().__str__()
コード例 #3
0
    def get_net(self, data_layer):
        net = Network()

        net.add(data_layer)
        net.name = "LeNet"
        c_bottom = ['data', 'pool0']
        c_num_output = [20, 50]

        max= pb2.PoolingParameter.MAX
        xavier = pb2.FillerParameter(type= 'xavier')
        const =  pb2.FillerParameter(type= 'constant')
        for i in range(0,2):
            c = 'conv%d'%i
            p = "pool%d"%i
            r = "relu%d"%i
            n = "norm%d"%i
            c_layer = Convolution(name=c , bottom= c_bottom[i] , num_output = c_num_output[i] , kernel_size =5, stride=1, weight_filler= xavier , bias_filler = const)
            p_layer = Pooling(name= p, bottom= c, kernel_size=2 , stride =2, pool = max )
            net.add(c_layer), net.add(p_layer),

        ip1 = InnerProduct(name="ip1", bottom="pool1", num_output=500, weight_filler= xavier , bias_filler = const)
        r_layer = Relu(name = r, bottom= "ip1",  top= "ip1")
        ip2 = InnerProduct(name="ip2", bottom="ip1", num_output=10,  weight_filler= xavier , bias_filler = const)

        o1 = Loss(name="loss", type=LossType.SoftmaxWithLoss, bottom=["ip2", "label"], include=Phase.TRAIN)
        o2 = Loss(name="output", type= LossType.SOFTMAX, top= "output", bottom="ip2", include=Phase.TEST)

        net.add(ip1), net.add(r_layer), net.add(ip2), net.add(o1), net.add(o2)


        return net.str().__str__()