Exemple #1
0
    def __init__(self):
        super(Discriminator, self).__init__()
        # 输入1*28*28 MNIST
        # 1*28*28 -> 64*16*16
        self.conv1 = ConvLayer(nc,
                               ndf,
                               4,
                               4,
                               zero_padding=1,
                               stride=2,
                               method='SAME',
                               bias_required=False)
        self.lrelu1 = Activators.LeakyReLU(0.2)

        # 64*16*16 -> 128*8*8
        self.conv2 = ConvLayer(ndf,
                               ndf * 2,
                               4,
                               4,
                               zero_padding=1,
                               stride=2,
                               method='SAME',
                               bias_required=False)
        self.bn1 = BatchNorm(ndf * 2)
        self.lrelu2 = Activators.LeakyReLU(0.2)

        # 128*8*8 -> 256*4*4
        self.conv3 = ConvLayer(ndf * 2,
                               ndf * 4,
                               4,
                               4,
                               zero_padding=1,
                               stride=2,
                               method='SAME',
                               bias_required=False)
        self.bn2 = BatchNorm(ndf * 4)
        self.lrelu3 = Activators.LeakyReLU(0.2)

        # 256*4*4 -> 1*1
        self.conv4 = ConvLayer(ndf * 4,
                               1,
                               4,
                               4,
                               zero_padding=0,
                               stride=1,
                               method='VALID',
                               bias_required=False)
        self.sigmoid = Activators.Sigmoid_CE()
 def __init__(self):
     super(Generator, self).__init__()
     # 构建反向传播网络组建
     # 输入Z=[100,]
     # 100*1 -> 256*4*4
     self.deconv1 = Deconv(nz, ngf*4, 4, zero_padding=0, stride=1, method='VALID', bias_required=False)
     self.bn1 = BatchNorm(ngf*4)
     self.relu1 = Activators.ReLU()
     # 256*4*4 -> 128*8*8
     self.deconv2 = Deconv(ngf*4, ngf*2, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
     self.bn2 = BatchNorm(ngf*2)
     self.relu2 = Activators.ReLU()
     # 128*8*8 -> 64*16*16
     self.deconv3 = Deconv(ngf*2, ngf, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
     self.bn3 = BatchNorm(ngf)
     self.relu3 = Activators.ReLU()
     # 64*16*16 -> 1*32*32
     self.deconv4 = Deconv(ngf, nc, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
     self.tanh = Activators.Tanh()
Exemple #3
0
def bn_test():
    bit_length = 64
    width = 8
    height = 8
    channel = 256
    
    x_numpy = np.random.randn(1,channel,height,width).astype(np.float64)
    x_numpy_1 = np.random.randn(1,channel,height,width).astype(np.float64)
    x_numpy_2 = x_numpy - x_numpy_1
    # print('input: ',x_numpy)

    w_numpy = np.random.normal(1.0, 0.02, size=(channel)).astype(np.float64)
    b_numpy = np.zeros(channel).astype(np.float64)

    bn = BatchNorm(channel)
    bn_sec = BatchNorm_sec(channel)
    # 设置参数
    bn.set_gamma(Parameter(w_numpy, requires_grad=True))
    bn.set_beta(Parameter(b_numpy, requires_grad=True))
    bn_sec.set_gamma(Parameter(w_numpy, requires_grad=True))
    bn_sec.set_beta(Parameter(b_numpy, requires_grad=True))

    bn_out = bn.forward(x_numpy)

    bn_out_sec_1, bn_out_sec_2 = bn_sec.forward(x_numpy_1, x_numpy_2)
    
    # print('error sum: ',bn_out-(bn_out_sec_1+bn_out_sec_2))
    
    test_num = 10
    time_avg = 0
    for i in range(test_num):
        start_time_sec = time.time()
        bn_out_sec_1, bn_out_sec_2 = bn_sec.forward(x_numpy_1, x_numpy_2)
        end_time_sec = time.time()
        time_avg+=(end_time_sec-start_time_sec)*1000
    print('time avg sec: \n', time_avg/test_num)