예제 #1
0
def cal_xtokens_client(K_Z, K_X, w1, c, xterms, pairing, g, oxt_t_set_tuples):
    z = cal_cmac_aes(K_Z, convert_to_bytes(w1) + convert_to_bytes(c))
    e_z = pairing.init(ZR, convert_int_from_bytes(z))
    xtokens_serialized = list()
    for xterm in xterms:
        kxw = cal_cmac_aes(K_X, convert_to_bytes(xterm))
        e_kxw = pairing.init(ZR, convert_int_from_bytes(kxw))

        xtoken = g**(e_z * e_kxw)

        xtokens_serialized.append(pairing.serialize(xtoken))

    return oxt_t_set_tuples[
        c], xtokens_serialized  # each pair is ((e, y_c), xtokens_[c])
def gen_xtags(word, indices, K_X, pairing, K_I, g) -> list:
    xtags = list()
    kxw = cal_cmac_aes(K_X, convert_to_bytes(word))
    kxw_zr = pairing.init(ZR, convert_int_from_bytes(kxw))
    for ind in indices:
        xind = cal_cmac_aes(K_I, ind)
        xind_zr = pairing.init(ZR, convert_int_from_bytes(xind))
        xtag = g**(kxw_zr * xind_zr)

        xtags.append(str(xtag))
    return xtags
def gen_t_set(word, indices, K_S, K_I, K_Z, K_T, iv, pairing) -> dict:
    edb_word = dict()
    word_bytes = convert_to_bytes(word)
    K_e = cal_cmac_aes(K_S, word_bytes)

    c = 0
    for index in indices:
        xind = cal_cmac_aes(K_I, index)
        z = cal_cmac_aes(K_Z, word_bytes + convert_to_bytes(c))
        e = encrypt_aes(K_e, iv,
                        index)  # e is the encryption of word-index pair
        y = pairing.init(ZR, convert_int_from_bytes(xind)) / pairing.init(
            ZR, convert_int_from_bytes(z))
        tag = cal_cmac_aes(K_T, word_bytes)
        key = base64.encodebytes(tag).decode() + KEY_CONNECTOR + str(c)

        # Generate Tuple of (key~i, e, y)
        edb_word[key] = (e, pairing.serialize(y))
        c += 1

    return edb_word
def gen_inverted_index(ind, words: list, key: bytes):
    """
    Generate list of inverted indices for words in document ind
    :param ind: document index
    :param list words: words of the document
    :param bytes key: key for masking the index
    :return: list of (word, masked_index)
    """
    res = list()
    rind = base64.encodebytes(get_rind(convert_to_bytes(str(ind)),
                                       key))  # rind is now bytes
    for word in words:
        res.append((word, rind))

    return res
def query(edb: dict, keywords: list, key: tuple, iv: bytes, bf: BloomFilter,
          g_serialized: bytes) -> list:
    """
    Query OXT for some keywords
    :param g_serialized:
    :param bf:
    :param iv:
    :type key: tuple
    :param key:
    :param edb:
    :param keywords:
    :return:
    """

    pairing = PairingGroup('SS512')

    # ------ CLIENT  ------
    g = pairing.deserialize(g_serialized)
    assert g.initPP(), "ERROR: Failed to init pre-computation table for g."

    (K_P, K_S, K_X, K_I, K_Z, K_T) = key

    stag = cal_cmac_aes(K_T, convert_to_bytes(keywords[0]))
    if not stag:
        return []

    # ---------------- SERVER -------------
    # get keys with stag
    t_set_result = list()
    c = 0
    while True:
        key = base64.encodebytes(stag).decode() + KEY_CONNECTOR + str(c)
        if key in edb:
            t_set_result.append(edb[key])
            c += 1
        else:
            break

    # ---------------- CLIENT -------------
    # list of (e, y) with y is Element
    oxt_t_set_tuples = t_set_result
    if not oxt_t_set_tuples:
        return []

    # Concat xtoken with OXT Tuple
    xtoken_tuples = list()
    xterms = keywords[1:]

    for c in range(len(oxt_t_set_tuples)):
        z = cal_cmac_aes(K_Z,
                         convert_to_bytes(keywords[0]) + convert_to_bytes(c))
        e_z = pairing.init(ZR, convert_int_from_bytes(z))
        xtokens_serialized = list()
        for xterm in xterms:
            kxw = cal_cmac_aes(K_X, convert_to_bytes(xterm))
            e_kxw = pairing.init(ZR, convert_int_from_bytes(kxw))

            xtoken = g**(e_z * e_kxw)

            xtokens_serialized.append(pairing.serialize(xtoken))

        xtoken_tuples.append((oxt_t_set_tuples[c][1], xtokens_serialized))

    # ---------------- SERVER -------------
    # match xtags in BF
    es = list()
    for c in range(len(xtoken_tuples)):
        # check matched in bloomfilter immediately
        xtag_matched = True
        y_c = pairing.deserialize(xtoken_tuples[c][0])
        xtokens_serialized = xtoken_tuples[c][1]
        for xtoken_serialized in xtokens_serialized:
            xtag = pairing.deserialize(xtoken_serialized)**y_c
            xtag_matched = xtag_matched and (str(xtag) in bf)

        if xtag_matched:
            es.append(oxt_t_set_tuples[c][0])

    # ---------------- CLIENT -------------
    # client decrypt e
    K_e = cal_cmac_aes(K_S, convert_to_bytes(keywords[0]))
    rinds = [decrypt_aes(K_e, iv, e) for e in es]
    result = [
        get_rind(base64.decodebytes(rind), K_P).decode() for rind in rinds
    ]

    return result
예제 #6
0
def query(edb: dict,
          shve: dict,
          keywords: list,
          key: tuple,
          iv: bytes,
          bf: BloomFilter,
          g_serialized: bytes,
          paralleled=False,
          num_processes=None,
          benchmarking=False):
    """
    Query HXT for some keywords
    :param edb: OXT encrypted database
    :param shve: SHVE data
    :param keywords: list of keywords
    :param key: all keys
    :param iv: IV
    :param bf: bloomfilter
    :param g_serialized: serialized g value
    :param paralleled: should we parallel the process or not. NOTE: (maybe) incorrect parallelism.
    :param num_processes: number of processes used for parallelism
    :param benchmarking: whether this is a benchmark or not (default is False)
    :return: the list of results. If benchmarking, also return (number of xtags for stag) as the 2nd value of the tuple
    """
    pairing = PairingGroup('SS512')
    start_time = time.time()

    # ------ CLIENT  ------
    g = pairing.deserialize(g_serialized)
    assert g.initPP(), "ERROR: Failed to init pre-computation table for g."

    (K_P, K_S, K_X, K_I, K_Z, K_T, K_H) = key

    num_bits = bf.num_bits

    # client generates stag
    stag = cal_cmac_aes(K_T, convert_to_bytes(keywords[0]))

    logging.debug(('client generates stag time:', time.time() - start_time))

    result = []

    if not stag:
        if benchmarking:
            return result, 0
        else:
            return result

    start_time = time.time()
    # ---------------- SERVER -------------
    # get keys with stag
    t_set_result = list()
    c = 0
    while True:
        key = base64.encodebytes(stag).decode() + KEY_CONNECTOR + str(c)
        if key in edb:
            t_set_result.append(edb[key])
            c += 1
        else:
            break
    logging.debug(
        ('server get keys with stag time:', time.time() - start_time))

    # ---------------- CLIENT -------------
    start_time = time.time()
    # list of (e, y) with y is Element
    oxt_t_set_tuples = t_set_result
    if not oxt_t_set_tuples:
        if benchmarking:
            return result, len(t_set_result)
        else:
            return result

    # Concat xtoken with OXT Tuple
    xtoken_tuples = list()
    w1 = keywords[0]
    xterms = keywords[1:]

    num_t_set_stag = len(oxt_t_set_tuples)
    if paralleled:
        # parallel processing
        with multiprocessing.Pool(processes=num_processes,
                                  initializer=init_worker_cal_xtokens_client,
                                  initargs=(K_Z, K_X, w1, xterms, pairing, g,
                                            oxt_t_set_tuples)) as pool:
            # pool = multiprocessing.Pool()
            xtoken_tuples = pool.map(cal_xtokens_client_parallel,
                                     [c for c in range(num_t_set_stag)])

    else:
        for c in range(num_t_set_stag):
            xtoken_tuple = cal_xtokens_client(K_Z, K_X, w1, c, xterms, pairing,
                                              g, oxt_t_set_tuples)

            xtoken_tuples.append(
                xtoken_tuple)  # each pair is ((e, y_c), xtokens_[c])

    logging.debug(('client generates xtokens time:', time.time() - start_time))
    # ---------------- SERVER -------------
    start_time = time.time()
    # Server is generating xtag

    xtags_tuples = list()
    if paralleled:
        # parallel processing
        with multiprocessing.Pool(processes=num_processes,
                                  initializer=init_worker_cal_xtags_server,
                                  initargs=(pairing, )) as pool:
            xtags_tuples = pool.map(cal_xtags_server_parallel, xtoken_tuples)
    else:
        for c in range(len(xtoken_tuples)):
            xtags_tuples.append(cal_xtags_server(
                xtoken_tuples[c],
                pairing))  # each pair is ((e, y) -> xtags_of_c)

    logging.debug(('Server generating xtag time:', time.time() - start_time))

    start_time = time.time()
    xtags_hash_tuples = list()

    if paralleled:
        # parallel processing
        with multiprocessing.Pool(
                processes=num_processes,
                initializer=init_worker_cal_xtags_hashes_server,
                initargs=(bf.hash_seeds, num_bits)) as pool:
            xtags_hash_tuples = pool.map(cal_xtags_hashes_server_parallel,
                                         xtags_tuples)

    else:
        for xtags_tuple in xtags_tuples:
            # there is a little bit of change in the algorithm here, just the order. Nothing special
            # Instead of computer hashes after computing each xtag, the map() operation above
            # server computes all xtags, then computes hashes for each of them later

            # each pair is ((e, y) -> hashes_of_xtags_of_c)
            xtags_hash_tuples.append(
                cal_xtags_hashes_server(xtags_tuple, bf.hash_seeds, num_bits))

    logging.debug(
        ('Server generating xtag hashes time:', time.time() - start_time))

    start_time = time.time()
    # Start to match HVE
    es = list()

    if paralleled:
        # parallel processing
        with multiprocessing.Pool(processes=num_processes,
                                  initializer=init_worker_matching_shve,
                                  initargs=(K_H, iv, shve)) as pool:
            # pool = multiprocessing.Pool()
            tmp = pool.map(matching_shve_parallel, xtags_hash_tuples)
            for e in tmp:
                if e is not None:
                    es.append(e)
            logging.debug(es)
    else:
        for hash_tuple in xtags_hash_tuples:
            e = matching_shve(hash_tuple, K_H, iv, shve)
            if e is not None:
                es.append(e)

    logging.debug(('Server match SHVE time:', time.time() - start_time))

    # ---------------- CLIENT -------------
    start_time = time.time()
    # client decrypt e
    K_e = cal_cmac_aes(K_S, convert_to_bytes(keywords[0]))
    rinds = [decrypt_aes(K_e, iv, e) for e in es]
    result = [
        get_rind(base64.decodebytes(rind), K_P).decode() for rind in rinds
    ]

    logging.debug(('client decrypt e time:', time.time() - start_time))

    if benchmarking:
        return result, len(t_set_result)
    else:
        return result