Esempio n. 1
0
class CryptoNet_fivelayer(Module):
    def __init__(self, in_dim, n_class):
        super(CryptoNet_fivelayer, self).__init__()

        self.conv = Conv_sec(in_dim,
                             5,
                             5,
                             5,
                             zero_padding=1,
                             stride=2,
                             method='SAME',
                             bit_length=bit_length)
        self.sq1 = Activators_sec.Square(bit_length=bit_length)
        self.fc1 = FullyConnect_sec(845, 100, bit_length=bit_length)
        self.sq2 = Activators_sec.Square(bit_length=bit_length)
        self.fc2 = FullyConnect_sec(100, n_class, bit_length=bit_length)
        # self.logsoftmax = Logsoftmax()
        self.logsoftmax = Logsoftmax_sec(bit_length=bit_length)

    def forward(self, x1, x2):
        in_size = x1.shape[0]
        # start_time = time.time()
        out_c1, out_c2 = self.conv.forward(x1, x2)
        out_11, out_12 = self.sq1.forward(out_c1, out_c2)
        self.conv_out_shape = out_11.shape
        # print('out1shape: ',self.conv_out_shape)
        out_11 = out_11.reshape(in_size, -1)  # 将输出拉成一行
        out_12 = out_12.reshape(in_size, -1)  # 将输出拉成一行
        out_fc11, out_fc12 = self.fc1.forward(out_11, out_12)
        out_21, out_22 = self.sq2.forward(out_fc11, out_fc12)
        out_31, out_32 = self.fc2.forward(out_21, out_22)
        # end_time = time.time()
        # print('time consume: ', (end_time-start_time)*1000)

        out_logsoftmax_1, out_logsoftmax_2 = self.logsoftmax.forward(
            out_31, out_32)
        return out_logsoftmax_1, out_logsoftmax_2
        # out_logsoftmax = self.logsoftmax.forward(out_31+out_32)
        # return out_logsoftmax

    def backward(self, dy1, dy2):
        dy_logsoftmax_1, dy_logsoftmax_2 = self.logsoftmax.gradient(dy1, dy2)
        dy_f31, dy_f32 = self.fc2.gradient(dy_logsoftmax_1, dy_logsoftmax_2)
        dy_sq21, dy_sq22 = self.sq2.gradient(dy_f31, dy_f32)
        dy_f21, dy_f22 = self.fc1.gradient(dy_sq21, dy_sq22)

        dy_f21 = dy_f21.reshape(self.conv_out_shape)
        dy_f22 = dy_f22.reshape(self.conv_out_shape)

        dy_sq11, dy_sq12 = self.sq1.gradient(dy_f21, dy_f22)
        self.conv.gradient(dy_sq11, dy_sq12)
Esempio n. 2
0
    def __init__(self, in_dim, n_class):
        super(Minionn_fivelayer, self).__init__()

        self.conv = Conv_sec(in_dim,
                             5,
                             5,
                             5,
                             zero_padding=2,
                             stride=2,
                             method='SAME',
                             bit_length=bit_length)
        self.relu1 = Activators_sec.ReLU(bit_length=bit_length)
        self.fc1 = FullyConnect_sec(980, 100, bit_length=bit_length)
        self.relu2 = Activators_sec.ReLU(bit_length=bit_length)
        self.fc2 = FullyConnect_sec(100, n_class, bit_length=bit_length)
        self.logsoftmax = Logsoftmax_sec(bit_length=bit_length)
Esempio n. 3
0
    def __init__(self, in_dim, n_class):
        super(CryptoNet_fivelayer, self).__init__()

        self.conv = Conv_sec(in_dim,
                             5,
                             5,
                             5,
                             zero_padding=1,
                             stride=2,
                             method='SAME',
                             bit_length=bit_length)
        self.sq1 = Activators_sec.Square(bit_length=bit_length)
        self.fc1 = FullyConnect_sec(845, 100, bit_length=bit_length)
        self.sq2 = Activators_sec.Square(bit_length=bit_length)
        self.fc2 = FullyConnect_sec(100, n_class, bit_length=bit_length)
        # self.logsoftmax = Logsoftmax()
        self.logsoftmax = Logsoftmax_sec(bit_length=bit_length)
Esempio n. 4
0
    def __init__(self, in_dim, n_class):
        super(Secureml_fivelayer, self).__init__()

        self.fc0 = FullyConnect_sec(784, 128, bit_length=bit_length)
        self.sq1 = Activators_sec.Square(bit_length=bit_length)
        self.fc1 = FullyConnect_sec(128, 128, bit_length=bit_length)
        self.sq2 = Activators_sec.Square(bit_length=bit_length)
        self.fc2 = FullyConnect_sec(128, n_class, bit_length=bit_length)
        self.logsoftmax = Logsoftmax_sec(bit_length=bit_length)
Esempio n. 5
0
class Secureml_fivelayer(Module):
    def __init__(self, in_dim, n_class):
        super(Secureml_fivelayer, self).__init__()

        self.fc0 = FullyConnect_sec(784, 128, bit_length=bit_length)
        self.sq1 = Activators_sec.Square(bit_length=bit_length)
        self.fc1 = FullyConnect_sec(128, 128, bit_length=bit_length)
        self.sq2 = Activators_sec.Square(bit_length=bit_length)
        self.fc2 = FullyConnect_sec(128, n_class, bit_length=bit_length)
        self.logsoftmax = Logsoftmax_sec(bit_length=bit_length)
        # self.logsoftmax = Logsoftmax()

    def forward(self, x1, x2):
        in_size = x1.shape[0]
        x1 = x1.reshape(in_size, -1)
        x2 = x2.reshape(in_size, -1)

        # start_time = time.time()
        out_c1, out_c2 = self.fc0.forward(x1, x2)
        out_11, out_12 = self.sq1.forward(out_c1, out_c2)
        out_fc11, out_fc12 = self.fc1.forward(out_11, out_12)
        out_21, out_22 = self.sq2.forward(out_fc11, out_fc12)
        out_31, out_32 = self.fc2.forward(out_21, out_22)
        # end_time = time.time()
        # print('time consume: ', (end_time-start_time)*1000)

        # out_logsoftmax = self.logsoftmax.forward(out_31+out_32)
        # print('out_3: ',out_31+out_32)
        out_logsoftmax_1, out_logsoftmax_2 = self.logsoftmax.forward(
            out_31, out_32)
        return out_logsoftmax_1, out_logsoftmax_2
        # return out_logsoftmax

    def backward(self, dy1, dy2):
        # dy_logsoftmax = self.logsoftmax.gradient(dy)
        dy_logsoftmax_1, dy_logsoftmax_2 = self.logsoftmax.gradient(dy1, dy2)
        # 生成shares
        # dy_logsoftmax_1 = np.random.uniform(0,1, dy_logsoftmax.shape)
        # dy_logsoftmax_2 = dy_logsoftmax - dy_logsoftmax_1
        dy_f31, dy_f32 = self.fc2.gradient(dy_logsoftmax_1, dy_logsoftmax_2)

        dy_sq21, dy_sq22 = self.sq2.gradient(dy_f31, dy_f32)
        dy_f21, dy_f22 = self.fc1.gradient(dy_sq21, dy_sq22)

        dy_sq11, dy_sq12 = self.sq1.gradient(dy_f21, dy_f22)
        self.fc0.gradient(dy_sq11, dy_sq12)
Esempio n. 6
0
class Minionn_fivelayer(Module):
    def __init__(self, in_dim, n_class):
        super(Minionn_fivelayer, self).__init__()

        self.conv = Conv_sec(in_dim,
                             5,
                             5,
                             5,
                             zero_padding=2,
                             stride=2,
                             method='SAME',
                             bit_length=bit_length)
        self.relu1 = Activators_sec.ReLU(bit_length=bit_length)
        self.fc1 = FullyConnect_sec(980, 100, bit_length=bit_length)
        self.relu2 = Activators_sec.ReLU(bit_length=bit_length)
        self.fc2 = FullyConnect_sec(100, n_class, bit_length=bit_length)
        self.logsoftmax = Logsoftmax_sec(bit_length=bit_length)

    def forward(self, x1, x2):
        in_size = x1.shape[0]
        # start_time_sum = time.time()

        # start_conv = time.time()
        out_c1, out_c2 = self.conv.forward(x1, x2)
        # end_conv = time.time()

        out_11, out_12, dfc_time1 = self.relu1.forward(out_c1, out_c2)

        # start_fc1 = time.time()
        self.conv_out_shape = out_11.shape
        # print('out1shape: ',self.conv_out_shape)
        out_11 = out_11.reshape(in_size, -1)  # 将输出拉成一行
        out_12 = out_12.reshape(in_size, -1)  # 将输出拉成一行
        out_fc11, out_fc12 = self.fc1.forward(out_11, out_12)
        # end_fc1 = time.time()
        out_21, out_22, dfc_time2 = self.relu2.forward(out_fc11, out_fc12)
        # start_fc2 = time.time()
        out_31, out_32 = self.fc2.forward(out_21, out_22)
        # end_fc2 = time.time()

        # end_time_sum = time.time()

        # print('time consume: ', (end_conv-start_conv)*1000+(end_fc1-start_fc1)*1000+(end_fc1-start_fc1)*1000+self.relu1.offline_time+self.relu1.online_time+self.relu2.offline_time+self.relu2.online_time)
        # print('time consume sum: ', (end_time_sum-start_time_sum)*1000)
        out_logsoftmax_1, out_logsoftmax_2 = self.logsoftmax.forward(
            out_31, out_32)
        return out_logsoftmax_1, out_logsoftmax_2, dfc_time1 + dfc_time2
        # out_logsoftmax = self.logsoftmax.forward(out_31+out_32)
        # return out_logsoftmax

    def backward(self, dy1, dy2):
        dy_logsoftmax_1, dy_logsoftmax_2 = self.logsoftmax.gradient(dy1, dy2)
        dy_f31, dy_f32 = self.fc2.gradient(dy_logsoftmax_1, dy_logsoftmax_2)
        dy_relu21, dy_relu22 = self.relu2.gradient(dy_f31, dy_f32)
        dy_f21, dy_f22 = self.fc1.gradient(dy_relu21, dy_relu22)

        dy_f21 = dy_f21.reshape(self.conv_out_shape)
        dy_f22 = dy_f22.reshape(self.conv_out_shape)

        dy_relu11, dy_relu12 = self.relu1.gradient(dy_f21, dy_f22)
        self.conv.gradient(dy_relu11, dy_relu12)
Esempio n. 7
0
def fc_test():
    bit_length = 32
    batch_size = 1
    in_num = 580
    out_num = 580
    x_numpy = np.random.randn(batch_size, in_num).astype(np.float64)
    w_numpy = np.random.randn(in_num, out_num).astype(np.float64)
    b_numpy = np.random.randn(out_num).astype(np.float64)

    ## 准备秘密分享secret sharing
    x_numpy_1 = np.random.randn(batch_size, in_num).astype(np.float64)
    x_numpy_2 = x_numpy-x_numpy_1
    w_numpy_1 = np.random.randn(in_num, out_num).astype(np.float64)
    w_numpy_2 = w_numpy-w_numpy_1
    b_numpy_1 = np.random.randn(out_num).astype(np.float64)
    b_numpy_2 = b_numpy-b_numpy_1

    fc = FullyConnect(in_num, out_num)
    fc_sec = FullyConnect_sec(in_num, out_num, bit_length=bit_length)

    # 设置参数
    fc.set_weight(Parameter(w_numpy, requires_grad=True))
    fc_sec.set_weight_1(Parameter(w_numpy_1, requires_grad=True))
    fc_sec.set_weight_2(Parameter(w_numpy_2, requires_grad=True))
    fc.set_bias(Parameter(b_numpy, requires_grad=True))
    fc_sec.set_bias_1(Parameter(b_numpy_1, requires_grad=True))
    fc_sec.set_bias_2(Parameter(b_numpy_2, requires_grad=True))

    # fc_out = fc.forward(x_numpy)
    # fc_out_1, fc_out_2 = fc_sec.forward(x_numpy_1, x_numpy_2)
    # print('error: \n', fc_out-(fc_out_1+fc_out_2))

    test_num = 10
    time_avg = 0
    for i in range(test_num):
        start_time_sec = time.time()
        fc_out_1, fc_out_2 = fc_sec.forward(x_numpy_1, x_numpy_2)
        end_time_sec = time.time()
        time_avg+=(end_time_sec-start_time_sec)*1000
    print('time avg sec: \n', time_avg/test_num)