Exemplo n.º 1
0
    def test_Pyfhel_5c_save_restore_all(self):
        pyfhel = Pyfhel()
        pyfhel.contextGen(p=1964769281,
                          m=8192,
                          base=2,
                          sec=192,
                          flagBatching=True)
        pyfhel.keyGen()
        pyfhel.rotateKeyGen(60)
        pyfhel.relinKeyGen(60, 4)
        # save all keys into temporary directory
        tmp_dir = tempfile.TemporaryDirectory()
        pyfhel.saveContext(tmp_dir.name + "/context")
        pyfhel.savepublicKey(tmp_dir.name + "/pub.key")
        pyfhel.savesecretKey(tmp_dir.name + "/sec.key")
        pyfhel.saverelinKey(tmp_dir.name + "/relin.key")
        pyfhel.saverotateKey(tmp_dir.name + "/rotate.key")
        # restore all keys
        pyfhel2 = Pyfhel()
        pyfhel.contextGen(p=1964769281,
                          m=8192,
                          base=2,
                          sec=192,
                          flagBatching=True)
        pyfhel2.restoreContext(tmp_dir.name + "/context")
        pyfhel2.restorepublicKey(tmp_dir.name + "/pub.key")
        pyfhel2.restoresecretKey(tmp_dir.name + "/sec.key")
        pyfhel2.restorerelinKey(tmp_dir.name + "/relin.key")
        pyfhel2.restorerotateKey(tmp_dir.name + "/rotate.key")

        # test encryption decryption
        ctxt1 = pyfhel.encryptBatch([42])
        self.assertEqual(
            pyfhel2.decryptBatch(ctxt1)[0],
            42,
            "decrypting with restored keys should work",
        )
        try:
            pyfhel2.rotate(ctxt1, -1)
            self.assertEqual(
                pyfhel2.decryptBatch(ctxt1)[1],
                42,
                "decrypting with restored keys should work",
            )
        except Exception as err:
            self.fail("PyPtxt() creation failed unexpectedly: ", err)

        # test ciphertext storing
        ctxt2 = pyfhel.encryptInt(42)
        ctxt2.save(tmp_dir.name + "/ctxt2")

        ctxt_restored = PyCtxt()
        ctxt_restored.load(tmp_dir.name + "/ctxt2", "int")
        self.assertEqual(pyfhel2.decryptInt(ctxt_restored), 42,
                         "decrypting ciphertext should work")
        tmp_dir.cleanup()
    def predict(self, test_labels):

        if self.verbosity:
            print("Computing Prediction")
            print("==================================")

        fc_folder = self.enclayers_dir + "/fullyconnected"
        out_folder = fc_folder + "/output"

        if not path.exists(out_folder):
            raise Exception(
                "You need to compute the fully connected layer before.")

        print(test_labels[0])
        # Only q predictions are done simultaneously
        # for i in range(self.n)

        el = []

        start = timeit.default_timer()

        for i in range(test_labels.shape[1]):
            file = out_folder + "/fc_" + str(i)
            p = PyCtxt()
            p.load(file, 'batch')

            ptxt = self.py.decrypt(p)
            ptxt = self.py.decodeBatch(ptxt)
            ptxt = self.decode_tensor(ptxt, self.t, self.precision)

            if (el.__len__() <= i):
                el.append([])

            for j in range(ptxt.__len__()):
                if (el.__len__() <= j):
                    el.append([ptxt[j]])
                else:
                    el[j].append(ptxt[j])

        el = np.array(el)
        print(el.shape)
        print(el[0])
        pos = 0

        for i in range(el.shape[0]):
            mp = np.argmax(el[i])
            ml = np.argmax(test_labels[i])
            if (mp == ml):
                pos += 1

        stop = timeit.default_timer()
        print("Computation time: " + str(stop - start) + " s.")
        print("Positive prediction: " + str(pos))
        print("Negative prediction: " + str(self.n - pos))
        acc = (pos / self.n) * 100
        print("Model Accurancy:" + str(acc) + "%")
Exemplo n.º 3
0
 def test_Pyfhel_5d_save_restore_batch(self):
     pyfhel = Pyfhel()
     pyfhel.contextGen(p=1964769281, m=8192, base=2, sec=192, flagBatching=True)
     pyfhel.keyGen()
     pyfhel.rotateKeyGen(60)
     pyfhel.relinKeyGen(60, 4)
     # encrypt something
     ctxt = pyfhel.encryptBatch([1, 2, 3, 4])
     # save to temporary file
     tmp = tempfile.NamedTemporaryFile()
     ctxt.save(tmp.name)
     # load from temporary file
     loaded = PyCtxt()
     loaded.load(tmp.name, "batch")
     self.assertEqual(pyfhel.decryptBatch(loaded)[:4], [1, 2, 3, 4])
Exemplo n.º 4
0
 def test_Pyfhel_5f_save_restore_batch(self):
     pyfhel = Pyfhel()
     pyfhel.contextGen(p=1964769281, m=8192, base=2, sec=192, flagBatching=True)
     pyfhel.keyGen()
     pyfhel.rotateKeyGen(60)
     pyfhel.relinKeyGen(60, 4)
     # encrypt something
     ctxt = pyfhel.encryptBatch([1, 2, 3, 4])
     # save to temporary file
     tmp_dir = tempfile.TemporaryDirectory()
     tmp_file = os.path.join(tmp_dir.name, "ctxt")
     ctxt.save(tmp_file)
     # load from temporary file
     loaded = PyCtxt()
     loaded.load(tmp_file, "batch")
     self.assertEqual(pyfhel.decryptBatch(loaded)[:4], [1, 2, 3, 4])
     tmp_dir.cleanup()
Exemplo n.º 5
0
    def retrieve(self, folder, shape):
        if not self.is_stored_before(folder):
            raise Exception("Required files not foud in folder " + folder)

        to_populate = np.empty(shape=shape)
        to_populate = to_populate.flatten()
        l = []

        for i in range(to_populate.__len__()):
            p = PyCtxt()
            fname = folder + "/enc_" + str(i)
            if not path.exists(fname):
                raise Exception("File ", fname, "not exists")
            p.load(fname, 'batch')
            l.append(p)

        return np.reshape(l, shape)
    def _enc_arr_(self, arr, file_name=None):
        if not self.py.getflagBatch():
            raise Exception("You need to initialize Batch for this context.")

        if file_name != None:
            if path.exists(file_name):
                ct = PyCtxt()
                ct.load(file_name, 'batch')
                return ct

        res = []
        for x in range(self.n):
            res.append(arr[x])

        res = np.array(res)

        encoded = self.py.encodeBatch(res)
        encrypted = self.py.encryptPtxt(encoded)
        if file_name != None:
            encrypted.save(file_name)

        return encrypted
    def get_results(self, test_labels):

        dense_folder = self.enclayers_dir + "/fullyconnected"
        out_folder = dense_folder + "/output"
        el = []

        for i in range(test_labels.shape[1]):
            file = out_folder + "/fc_" + str(i)
            p = PyCtxt()
            p.load(file, 'batch')

            ptxt = self.py.decrypt(p)
            ptxt = self.py.decodeBatch(ptxt)

            if (el.__len__() <= i):
                el.append([])

            for j in range(ptxt.__len__()):
                if (el.__len__() <= j):
                    el.append([ptxt[j]])
                else:
                    el[j].append(ptxt[j])

        return np.array(el)
Exemplo n.º 8
0
#Assuming that ciphertext is received, verify reliability

sum_d = base64.b64decode(bytes(sum_e, "utf-8"))
sub_d = base64.b64decode(bytes(sub_e, "utf-8"))
mul_d = base64.b64decode(bytes(mul_e, "utf-8"))

with open("txt.c1", "wb") as t1_f:
    t1_f.write(sum_d)

with open("txt.c2", "wb") as t2_f:
    t2_f.write(sub_d)

with open("txt.c3", "wb") as t3_f:
    t3_f.write(mul_d)

sum = PyCtxt()
sum.load("txt.c1")
sum._encoding = ENCODING_t.INTEGER

sub = PyCtxt()
sub.load("txt.c2")
sub._encoding = ENCODING_t.INTEGER

mul = PyCtxt()
mul.load("txt.c3")
mul._encoding = ENCODING_t.INTEGER

HE.restoresecretKey("secret_k.pysk")
print(" addition:       decrypt(ctxt1 + ctxt2) =  ", HE.decryptInt(sum))
print(" substraction:   decrypt(ctxt1 - ctxt2) =  ", HE.decryptInt(sub))
print(" multiplication: decrypt(ctxt1 * ctxt2) =  ", HE.decryptInt(mul))
 def getEncryptedPixel(self, index):
     pixel_file = self.enclayers_dir + "/input/pixel_" + str(
         index) + ".pyctxt"
     p = PyCtxt()
     p.load(pixel_file, 'batch')
     return p
Exemplo n.º 10
0
    def fully_connected(self):
        if self.verbosity:
            print("Computing Fully Connected")
            print("==================================")

        input_folder = self.enclayers_dir + "/dense2/output"

        fc_folder = self.enclayers_dir + "/fullyconnected"
        out_folder = fc_folder + "/output"

        wfile = "storage/layers/preprocessed/precision_" + str(
            self.precision) + "/pre_4_dense_11.npy"
        bfile = "storage/layers/preprocessed/precision_" + str(
            self.precision) + "/pre_bias_4_dense_11.npy"

        if not path.exists(fc_folder):
            createDir(fc_folder)

        if path.exists(out_folder):

            print("Processed before. You can found it in " + out_folder +
                  " folder.")
            print("")

        elif not path.exists(wfile) or not path.exists(bfile):

            raise Exception(
                "Fully connected layer weights and biases need to be preprocessed before (with precision "
                + str(self.precision) + ").")

        elif not path.exists(input_folder):

            raise Exception(
                "Second dense output required. Please run Encryption.dense2(...) before."
            )

        else:
            createDir(out_folder)

            w = np.load(wfile)
            b = np.load(bfile)

            if w.shape[1] != b.shape[0]:
                raise Exception("Preprocessed weights " + str(w.shape) +
                                " and biases " + str(b.shape) +
                                "are incopatible.")

            if self.verbosity:
                print("Fully Connected: output processing...")
                print("0%")

            start = timeit.default_timer()

            for x in range(w.shape[1]):
                local_sum = None
                for i in range(w.shape[0]):
                    fname = input_folder + "/square_" + str(i)
                    p = PyCtxt()
                    p.load(fname, 'batch')
                    encw = self.get_map(w[i][x])
                    el = self.py.multiply_plain(p, encw, True)

                    if (local_sum == None):
                        local_sum = el
                    else:
                        local_sum = self.py.add(local_sum, el)

                enc_b = self.get_map(b[x])
                ts = self.py.add_plain(local_sum, enc_b, True)
                out_name = out_folder + "/fc_" + str(x)
                ts.save(out_name)

                if self.verbosity:
                    perc = int(((x + 1) / w.shape[1]) * 100)
                    print(
                        str(perc) + "% (" + str(x + 1) + "/" +
                        str(w.shape[1]) + ")")

            stop = timeit.default_timer()
            if self.verbosity:
                print("Fully Connected: output processed in " +
                      str(stop - start) + " s.")
                print("")
Exemplo n.º 11
0
    def dense1(self, input_shape):
        if self.verbosity:
            print("Computing First Dense (square)")
            print("==================================")

        dense_folder = self.enclayers_dir + "/dense1"
        out_folder = dense_folder + "/output"

        conv_folder = self.enclayers_dir + "/conv"
        out_conv = conv_folder + "/output"

        wfile = "storage/layers/preprocessed/precision_" + str(
            self.precision) + "/pre_2_dense_9.npy"
        bfile = "storage/layers/preprocessed/precision_" + str(
            self.precision) + "/pre_bias_2_dense_9.npy"

        if not path.exists(dense_folder):
            createDir(dense_folder)

        if path.exists(out_folder):

            print("Processed before. You can found it in " + out_folder +
                  " folder.")
            print("")

        elif not path.exists(wfile) or not path.exists(bfile):

            raise Exception(
                "First dense layer weights and biases need to be preprocessed before (with precision "
                + str(self.precision) + ").")

        elif not path.exists(out_conv):

            raise Exception(
                "Convolution output required. Please run Encryption.convolution(...) before."
            )

        else:
            createDir(out_folder)

            w = np.load(wfile)
            b = np.load(bfile)

            start = timeit.default_timer()

            per = input_shape[0] * input_shape[1]
            filters = input_shape[2]

            flat = per * filters

            if flat != w.shape[0]:
                raise Exception("Input shape " + str(input_shape) +
                                " is not compatible with preprocessed input " +
                                str(w.shape))

            if w.shape[1] != b.shape[0]:
                raise Exception("Preprocessed weights " + str(w.shape) +
                                " and biases " + str(b.shape) +
                                "are incopatible.")

            if self.verbosity:
                print("First Dense: output processing...")
                print("0%")

            for x in range(w.shape[1]):
                local_sum = None
                for i in range(per):
                    for j in range(filters):
                        fname = out_conv + "/" + str(i) + "_filter" + str(j)
                        p = PyCtxt()
                        p.load(fname, 'batch')
                        row = (i * filters + j)
                        encw = self.get_map(w[row][x])

                        el = self.py.multiply_plain(p, encw, True)

                        if (local_sum == None):
                            local_sum = el
                        else:
                            local_sum = self.py.add(local_sum, el)

                enc_b = self.get_map(b[x])
                ts = self.py.add_plain(local_sum, enc_b, True)
                ts = self.py.square(ts)
                out_name = out_folder + "/square_" + str(x)
                ts.save(out_name)

                if self.verbosity:
                    perc = int(((x + 1) / w.shape[1]) * 100)
                    print(
                        str(perc) + "% (" + str(x + 1) + "/" +
                        str(w.shape[1]) + ")")

            stop = timeit.default_timer()
            if self.verbosity:
                print("First Dense: output processed in " + str(stop - start) +
                      " s.")
                print("")
Exemplo n.º 12
0
    def convolution(self, size, kernel, stride):

        if self.verbosity:
            print("Computing Convolution")
            print("==================================")

        conv_folder = self.enclayers_dir + "/conv"
        pre_conv = conv_folder + "/pre"
        out_conv = conv_folder + "/output"

        if not path.exists(conv_folder):
            createDir(conv_folder)

        conv_w = self.preprocess_dir + "precision_" + str(
            self.precision) + "/pre_0_conv2d_3.npy"
        conv_b = self.preprocess_dir + "precision_" + str(
            self.precision) + "/pre_bias_0_conv2d_3.npy"

        if path.exists(pre_conv):
            print("(Pre)processed before. You can found it in " + pre_conv +
                  " folder.")

        elif not path.exists(conv_w):

            print(
                "Convolution weights need to be preprocessed before (with precision "
                + str(self.precision) + ").")
            print("")
        else:
            createDir(pre_conv)

            filters = np.load(conv_w)

            start = timeit.default_timer()

            fshape = filters.shape
            f = filters.reshape((fshape[0] * fshape[1], fshape[2]))
            conv_map = self.get_conv_map(size, kernel, stride)

            if (conv_map.shape[0] != f.shape[0]):
                raise Exception(
                    "Convolution map and filter shapes must match.")

            if self.verbosity:
                print("Convolution: output preprocessing...")
                print("0%")

            for x in range(f.shape[0]):
                for y in range(f.shape[1]):
                    w_filter = self.get_map(f[x, y])
                    for k in range(conv_map.shape[1]):
                        enc_pixel = self.getEncryptedPixel(conv_map[x, k])
                        # computing |self.n| dot products at time
                        res = self.py.multiply_plain(enc_pixel, w_filter, True)
                        f_name = pre_conv + "/pixel" + str(
                            conv_map[x, k]) + "_filter" + str(y)
                        res.save(f_name)
                if self.verbosity:
                    perc = int(((x + 1) / f.shape[0]) * 100)
                    print(
                        str(perc) + "% (" + str(x + 1) + "/" +
                        str(f.shape[0]) + ")")

            stop = timeit.default_timer()

            if self.verbosity:
                print("Convolution: output preprocessed in " +
                      str(stop - start) + " s.")

        if path.exists(out_conv):

            print("Processed before. You can found it in " + out_conv +
                  " folder.")
            print("")

        elif not path.exists(conv_b):

            print(
                "Convolution biases need to be preprocessed before (with precision "
                + str(self.precision) + ").")
            print("")

        else:
            createDir(out_conv)

            biases = np.load(conv_b)

            start = timeit.default_timer()

            bshape = biases.shape
            windows = self.get_conv_windows(size, kernel, stride)
            wshape = windows.shape

            if self.verbosity:
                print("Convolution: output processing...")
                print("0%")

            for x in range(bshape[0]):
                encoded_bias = self.get_map(biases[x])
                for y in range(wshape[0]):
                    local_sum = None
                    for k in range(wshape[1]):
                        f_name = pre_conv + "/pixel" + str(
                            windows[y, k]) + "_filter" + str(x)
                        p = PyCtxt()
                        p.load(f_name, 'batch')
                        if (local_sum == None):
                            local_sum = p
                        else:
                            local_sum = self.py.add(local_sum, p)

                    local_sum = self.py.add_plain(local_sum, encoded_bias)
                    file_name = out_conv + "/" + str(y) + "_filter" + str(x)
                    local_sum.save(file_name)

                if self.verbosity:
                    perc = int(((x + 1) / bshape[0]) * 100)
                    print(
                        str(perc) + "% (" + str(x + 1) + "/" + str(bshape[0]) +
                        ")")

            stop = timeit.default_timer()

            if self.verbosity:
                print("Convolution: output processed in " + str(stop - start) +
                      " s.")
                print("")

        return out_conv
Exemplo n.º 13
0
    def put(self, func):
        todos[func] = "1"
        a = request.form['a']
        b = request.form['b']
        c = request.form['c']
        d = request.form['d']
        HE = Pyfhel()

        #Import context
        con = base64.b64decode(bytes(a, "utf-8"))
        with open('context.pycon', "wb") as pk_fw:
            pk_fw.write(con)

        HE.restoreContext("context.pycon")

        # Import public key
        pk = base64.b64decode(bytes(b, "utf-8"))
        with open('public_k.pypk', "wb") as pk_fw:
            pk_fw.write(pk)
        HE.restorepublicKey("public_k.pypk")

        # Import Ciphertext 1
        c1 = base64.b64decode(bytes(c, "utf-8"))
        with open('ctxt.c1', "wb") as c1_fw:
            c1_fw.write(c1)
        ctxt1 = PyCtxt()
        ctxt1.load("ctxt.c1")
        ctxt1._encoding = ENCODING_t.INTEGER

        # Import Ciphertext 2
        c2 = base64.b64decode(bytes(d, "utf-8"))
        with open('ctxt.c2', "wb") as c2_fw:
            c2_fw.write(c2)
        ctxt2 = PyCtxt()
        ctxt2.load("ctxt.c2")
        ctxt2._encoding = ENCODING_t.INTEGER

        #密文同态运算
        ctxtSum = HE.add(ctxt1, ctxt2, True)
        ctxtSub = HE.sub(ctxt1, ctxt2, True)
        ctxtMul = HE.multiply(ctxt1, ctxt2, True)

        #Ciphertext homomorphism operation
        ctxtSum.save("sum")
        ctxtSub.save("sub")
        ctxtMul.save("mul")

        with open("sum", "rb") as f_sum:
            sum_e = str(base64.b64encode(f_sum.read()), "utf-8")

        with open("sub", "rb") as f_sub:
            sub_e = str(base64.b64encode(f_sub.read()), "utf-8")

        with open("mul", "rb") as f_mul:
            mul_e = str(base64.b64encode(f_mul.read()), "utf-8")

        if func == "add":
            todos[func] = sum_e
        elif func == "sub":
            todos[func] = sub_e
        elif func == "mul":
            todos[func] = mul_e
        return todos[func]
Exemplo n.º 14
0
HE.savepublicKey(tmp_dir.name + "/pub.key")
HE.savesecretKey(tmp_dir.name + "/sec.key")
HE.saverelinKey(tmp_dir.name + "/relin.key")
HE.saverotateKey(tmp_dir.name + "/rotate.key")

print("3. Restore all keys")
HE2 = Pyfhel()
HE2.restoreContext(tmp_dir.name + "/context")
HE2.restorepublicKey(tmp_dir.name + "/pub.key")
HE2.restoresecretKey(tmp_dir.name + "/sec.key")
HE2.restorerelinKey(tmp_dir.name + "/relin.key")
HE2.restorerotateKey(tmp_dir.name + "/rotate.key")

print("4. Testing encryption decryption:")
ctxt1 = HE.encryptBatch([42])
assert HE2.decryptBatch(
    ctxt1)[0] == 42, "decrypting with restored keys should work"
HE2.rotate(ctxt1, -1)
assert HE2.decryptBatch(
    ctxt1)[1] == 42, "decrypting with restored keys should work"

print("5. Testing ciphertext storing:")
ctxt2 = HE.encryptInt(42)
ctxt2.save(tmp_dir.name + "/ctxt2")

ctxt_restored = PyCtxt()
ctxt_restored.load(tmp_dir.name + "/ctxt2", int)
assert HE2.decryptInt(ctxt_restored) == 42, "decrypting ciphertext should work"

# Cleaning up secure channel
tmp_dir.cleanup()