Пример #1
0
    def __init__(self):
        super(Discriminator, self).__init__()
        # 输入1*28*28 MNIST
        # 1*28*28 -> 64*16*16
        self.conv1 = ConvLayer(nc,
                               ndf,
                               4,
                               4,
                               zero_padding=1,
                               stride=2,
                               method='SAME',
                               bias_required=False)
        self.lrelu1 = Activators.LeakyReLU(0.2)

        # 64*16*16 -> 128*8*8
        self.conv2 = ConvLayer(ndf,
                               ndf * 2,
                               4,
                               4,
                               zero_padding=1,
                               stride=2,
                               method='SAME',
                               bias_required=False)
        self.bn1 = BatchNorm(ndf * 2)
        self.lrelu2 = Activators.LeakyReLU(0.2)

        # 128*8*8 -> 256*4*4
        self.conv3 = ConvLayer(ndf * 2,
                               ndf * 4,
                               4,
                               4,
                               zero_padding=1,
                               stride=2,
                               method='SAME',
                               bias_required=False)
        self.bn2 = BatchNorm(ndf * 4)
        self.lrelu3 = Activators.LeakyReLU(0.2)

        # 256*4*4 -> 1*1
        self.conv4 = ConvLayer(ndf * 4,
                               1,
                               4,
                               4,
                               zero_padding=0,
                               stride=1,
                               method='VALID',
                               bias_required=False)
        self.sigmoid = Activators.Sigmoid_CE()
Пример #2
0
class Discriminator(Module):
    def __init__(self):
        super(Discriminator, self).__init__()
        # 输入1*28*28 MNIST
        # 1*28*28 -> 64*16*16
        self.conv1 = ConvLayer(nc, ndf, 4,4, zero_padding=1, stride=2,method='SAME', bias_required=False)
        self.lrelu1 = Activators.LeakyReLU(0.2)

        # 64*16*16 -> 128*8*8
        self.conv2 = ConvLayer(ndf, ndf*2, 4,4, zero_padding=1, stride=2, method='SAME', bias_required=False)
        self.bn1 = BatchNorm(ndf*2)
        self.lrelu2 = Activators.LeakyReLU(0.2)

        # 128*8*8 -> 256*4*4
        self.conv3 = ConvLayer(ndf*2, ndf*4, 4,4, zero_padding=1, stride=2, method='SAME', bias_required=False)
        self.bn2 = BatchNorm(ndf*4)
        self.lrelu3 = Activators.LeakyReLU(0.2)

        # 256*4*4 -> 1*1
        self.conv4 = ConvLayer(ndf*4, 1, 4,4, zero_padding=0, stride=1, method='VALID', bias_required=False)
        self.sigmoid = Activators.Sigmoid_CE()

    def forward(self, x_input):
        l1 = self.lrelu1.forward(self.conv1.forward(x_input))

        l2 = self.lrelu2.forward(self.bn1.forward(self.conv2.forward(l1)))

        l3 = self.lrelu3.forward(self.bn2.forward(self.conv3.forward(l2)))
        
        l4 = self.conv4.forward(l3)
        # print('D l1 shape: ',l1.shape)
        # print('D l2 shape: ',l2.shape)
        # print('D l3 shape: ',l3.shape)
        # print('D l4 shape: ',l4.shape)
        output_sigmoid = self.sigmoid.forward(l4)
        return output_sigmoid
    
    def backward(self, dy):
        # print('dy.shape: ', dy.shape)
        dy_sigmoid = self.sigmoid.gradient(dy)
        # print('dy_sigmoid.shape: ', dy_sigmoid.shape)
        dy_l4 = self.conv4.gradient(dy_sigmoid)
        dy_l3 = self.conv3.gradient(self.bn2.gradient(self.lrelu3.gradient(dy_l4)))
        dy_l2 = self.conv2.gradient(self.bn1.gradient(self.lrelu2.gradient(dy_l3)))
        dy_l1 = self.conv1.gradient(self.lrelu1.gradient(dy_l2))
        # print('D_backward output shape: ',dy_l1.shape)
        return dy_l1
Пример #3
0
 def __init__(self):
     super(Generator, self).__init__()
     # 构建反向传播网络组建
     # 输入Z=[100,]
     # 100*1 -> 256*4*4
     self.deconv1 = Deconv(nz, ngf*4, 4, zero_padding=0, stride=1, method='VALID', bias_required=False)
     self.bn1 = BatchNorm(ngf*4)
     self.relu1 = Activators.ReLU()
     # 256*4*4 -> 128*8*8
     self.deconv2 = Deconv(ngf*4, ngf*2, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
     self.bn2 = BatchNorm(ngf*2)
     self.relu2 = Activators.ReLU()
     # 128*8*8 -> 64*16*16
     self.deconv3 = Deconv(ngf*2, ngf, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
     self.bn3 = BatchNorm(ngf)
     self.relu3 = Activators.ReLU()
     # 64*16*16 -> 1*32*32
     self.deconv4 = Deconv(ngf, nc, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
     self.tanh = Activators.Tanh()
Пример #4
0
class Generator(Module):
    def __init__(self):
        super(Generator, self).__init__()
        # 构建反向传播网络组建
        # 输入Z=[100,]
        # 100*1 -> 256*4*4
        self.deconv1 = Deconv(nz, ngf*4, 4, zero_padding=0, stride=1, method='VALID', bias_required=False)
        self.bn1 = BatchNorm(ngf*4)
        self.relu1 = Activators.ReLU()
        # 256*4*4 -> 128*8*8
        self.deconv2 = Deconv(ngf*4, ngf*2, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
        self.bn2 = BatchNorm(ngf*2)
        self.relu2 = Activators.ReLU()
        # 128*8*8 -> 64*16*16
        self.deconv3 = Deconv(ngf*2, ngf, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
        self.bn3 = BatchNorm(ngf)
        self.relu3 = Activators.ReLU()
        # 64*16*16 -> 1*32*32
        self.deconv4 = Deconv(ngf, nc, 4, zero_padding=1, stride=2, method='SAME', bias_required=False)
        self.tanh = Activators.Tanh()

    def forward(self, x_input):
        # print('G input shape: ',x_input.shape)
        l1 = self.relu1.forward(self.bn1.forward(self.deconv1.forward(x_input)))
        l2 = self.relu2.forward(self.bn2.forward(self.deconv2.forward(l1)))
        l3 = self.relu3.forward(self.bn3.forward(self.deconv3.forward(l2)))
        l4 = self.deconv4.forward(l3)

        # print('G l1 shape: ',l1.shape)
        # print('G l2 shape: ',l2.shape)
        # print('G l3 shape: ',l3.shape)
        # print('G l4 shape: ',l4.shape)
        output_tanh = self.tanh.forward(l4)

        return output_tanh

    def backward(self, dy):
        dy_tanh = self.tanh.gradient(dy)
        dy_l4 = self.deconv4.gradient(dy_tanh)
        dy_l3 = self.deconv3.gradient(self.bn3.gradient(self.relu3.gradient(dy_l4)))
        dy_l2 = self.deconv2.gradient(self.bn2.gradient(self.relu2.gradient(dy_l3)))
        self.deconv1.gradient(self.bn1.gradient(self.relu1.gradient(dy_l2)))
Пример #5
0
def bn_test():
    bit_length = 64
    width = 8
    height = 8
    channel = 256
    
    x_numpy = np.random.randn(1,channel,height,width).astype(np.float64)
    x_numpy_1 = np.random.randn(1,channel,height,width).astype(np.float64)
    x_numpy_2 = x_numpy - x_numpy_1
    # print('input: ',x_numpy)

    w_numpy = np.random.normal(1.0, 0.02, size=(channel)).astype(np.float64)
    b_numpy = np.zeros(channel).astype(np.float64)

    bn = BatchNorm(channel)
    bn_sec = BatchNorm_sec(channel)
    # 设置参数
    bn.set_gamma(Parameter(w_numpy, requires_grad=True))
    bn.set_beta(Parameter(b_numpy, requires_grad=True))
    bn_sec.set_gamma(Parameter(w_numpy, requires_grad=True))
    bn_sec.set_beta(Parameter(b_numpy, requires_grad=True))

    bn_out = bn.forward(x_numpy)

    bn_out_sec_1, bn_out_sec_2 = bn_sec.forward(x_numpy_1, x_numpy_2)
    
    # print('error sum: ',bn_out-(bn_out_sec_1+bn_out_sec_2))
    
    test_num = 10
    time_avg = 0
    for i in range(test_num):
        start_time_sec = time.time()
        bn_out_sec_1, bn_out_sec_2 = bn_sec.forward(x_numpy_1, x_numpy_2)
        end_time_sec = time.time()
        time_avg+=(end_time_sec-start_time_sec)*1000
    print('time avg sec: \n', time_avg/test_num)