def init(): group = PairingGroup('SS512') g = group.random(G1) h = group.random(ZR) gh = g**h p = group.order() pp = [g, h, gh, p] sg = group.serialize(g) sh = group.serialize(h) sgh = group.serialize(gh) pp = [sg, sh, sgh, p] np.save("groupParameter", pp) return pp
def symmetric_key_gen(): """ Function to generate symmetric key string from abe pairing element generated by the charm-crypto library :return: tuple containing abe pairing element and corresponding symmetric key string """ group_object = PairingGroup('SS512') pairing_element = group_object.random(GT) symmetric_key = group_object.serialize(pairing_element) # return pairing_element, symmetric_key return symmetric_key
def schnorr_NIZK(): # setup start = time.time() grp = PairingGroup('MNT224') ps = PS01(grp) end = time.time() print("Setup time elapse: ") print(end - start) # keygen start = time.time() (pk, sk) = ps.keygen(2) end = time.time() print("KeyGen over two attributes time elapse: ") print(end - start) # generate a secret secret = grp.random() # NIZK Schnorr prover start = time.time() na = grp.random() a = pk['Y1'][0] ** na # deterministic nb m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) # r r = na + nb * secret end = time.time() print("NIZK Schnorr on one attribute Prover time elapse: ") print(end - start) # NIZK Schnorr verifier start = time.time() m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) lh = pk['Y1'][0] ** r rh = a * (pk['Y1'][0] ** secret) ** nb end = time.time() print("NIZK Schnorr Verifier time elapse: ") print(end - start) if lh == rh: print('check success') else: print('lh:=', lh) print('rh:=', rh)
async def main(): # group setup and secret generation setup_start_time = time.time() global grp, ps, pk, secret, messages, sig, gamma, expires_in grp = PairingGroup('MNT224') messages = ['random_string'] secret = grp.hash(messages[0], ZR) ps = PS01(grp) pk = None setup_end_time = time.time() print("Setup total time: {0}s ".format(str(setup_end_time - setup_start_time))) # communicate with IdP print("****Communication with IdP****") idp_start_time = time.time() async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session: # first rt: get idp public key pk = await fetch(session, 'https://' + idp_ip + ':6000/') for key, value in pk.items(): if key in {'X2', 'g1', 'g2'}: pk[key] = grp.deserialize(value.encode()) else: pk[key] = [grp.deserialize(item.encode()) for item in value] if debug: print('idp pk:=', pk) # generate t and commitment start = time.time() t = grp.random(ZR) gt = (pk['g1'] ** t) commitment_secret = pk['Y1'][0] ** secret commitment = gt * commitment_secret if debug: print("commitment: ", commitment) end = time.time() print("CRED.PrepareBlindSign over {0} attributes time elapse: {1}s ".format(str(1), str(end - start))) # second rt: schnorr proof start = time.time() na = grp.random() a = pk['Y1'][0] ** na # schnorr NIZK: generate nb m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) r = na + nb * secret end = time.time() print("NIZK Schnorr Prover (User-IdP) over {0} element time elapse: {1}s ".format(str(1), str(end - start))) json_param = {'g_t': grp.serialize(gt).decode(), 'commitment_secret': grp.serialize(commitment_secret).decode(), 'a': grp.serialize(a).decode(), 'r': grp.serialize(r).decode()} json_rep = await fetch(session, 'https://' + idp_ip + ':6000/token', json_param) # parse the reply id_token = json_rep['id_token'] id_token = [grp.deserialize(item.encode()) for item in id_token] expires_in = json_rep['expires_in'] gamma = json_rep['gamma'] if debug: print('user id token:=', id_token) print('expires_in:=', expires_in) print('gamma:=', gamma) # unblind signature start = time.time() sig = ps.unblind_signature(t, id_token) end = time.time() print("CRED.Unblind time elapse: {0}s ".format(str(end - start))) messages.append(gamma) messages.append(expires_in) for i in range(3, len(pk['Y1'])): messages.append(str(i - 3) + 'th-element') if debug: print(messages) result = ps.verify(pk, sig, *messages) assert result, 'invalid signature' print('Successfully verification') gamma = grp.hash(gamma, ZR) idp_end_time = time.time() print("IdP total time: {0}s ".format(str(idp_end_time - idp_start_time))) # communicate with RP rp_start_time = time.time() for i in range(running_times): print("\n****Communication with RP****") async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session: json_param = generate_params_for_RP() json_rep = await fetch(session, 'https://' + rp_ip + ':6001/authenticate', json_param) if debug: print(json_rep) rp_end_time = time.time() print("RP total time: {0}s ".format(str(rp_end_time - rp_start_time)))
def setup(database: dict, password: str, bloomfilter_file=None, bf_false_positive_rate=BLOOMFILTER_DEFAULT_FALSE_POSITIVE_RATE, paralleled=False, num_processes=None) -> tuple: """ Setup method of OXT for a database :param database: database with id -> list of words :param password: password to create keys :param bloomfilter_file: file to read/write bloomfilter :param bf_false_positive_rate: bloomfilter false positive rate :param bool paralleled: should we parallel the process or not :param num_processes: number of process used if parallel :return: (key, encrypted database) """ global var_dict # TODO: generate keys from password K_P = random_secure(1) # key to XOR index K_S = random_secure(CMAC_AES128_KEY_LENGTH_IN_BYTES) # Key for e iv = random_secure( CMAC_AES128_KEY_LENGTH_IN_BYTES) # IV for AES encryption K_X = random_secure(CMAC_AES128_KEY_LENGTH_IN_BYTES) # Key for xtag K_I = random_secure(CMAC_AES128_KEY_LENGTH_IN_BYTES) # Key for index K_Z = random_secure(CMAC_AES128_KEY_LENGTH_IN_BYTES) # Key for Z K_T = random_secure(CMAC_AES128_KEY_LENGTH_IN_BYTES) # Key for keyword pairing = PairingGroup('SS512') g = pairing.random(GT) assert g.initPP(), "ERROR: Failed to init pre-computation table for g." total_pairs = 0 inverted_index_all_pairs = defaultdict( list) # word -> list of ids containing this word if paralleled: # parallel processing logger.info('Parallel gen_inverted_index') pool = multiprocessing.Pool() num_docs = len(database) inverted_tuples = pool.starmap( gen_inverted_index_paralleled, list(zip(database.items(), [K_P] * num_docs))) for inverted_list in inverted_tuples: for word, rind in inverted_list: inverted_index_all_pairs[word].append(rind) total_pairs += 1 else: # sequential processing logger.info('Seq inverted_index_all_pairs') for (ind, words) in database.items(): inverted_list = gen_inverted_index(ind, words, K_P) for word, rind in inverted_list: inverted_index_all_pairs[word].append( rind) # rind is now bytes total_pairs += 1 # generate xtags. Each xtag is for a pair (word, index) xtags = set() if paralleled: logger.info('Parallel xtags') # parallel processing with multiprocessing.Pool(processes=num_processes, initializer=init_gen_xtags_parallel, initargs=(K_X, pairing, K_I, g)) as pool: xtags_lists = pool.map(gen_xtags_parallel, inverted_index_all_pairs.items()) for xtags_list in xtags_lists: xtags.update(xtags_list) var_dict = {} else: logger.info('Seq xtags') for word, indices in inverted_index_all_pairs.items(): xtags.update(gen_xtags(word, indices, K_X, pairing, K_I, g)) # Create a Bloom filter and bitarray if bloomfilter_file is not None: bf = BloomFilter(total_pairs, bf_false_positive_rate, bloomfilter_file) else: bf = BloomFilter(total_pairs, bf_false_positive_rate) num_bits = bf.num_bits bits = bitarray(num_bits) bits.setall(False) # compute the positions of each xtag and set it # the reason we need to use bits array because the library doesn't expose bits. e.g. check if a bit is set or not xtag: str for xtag in xtags: bf.add(xtag) # mimic set in bits array for hash_seed in bf.hash_seeds: pos = bloomfilter_hash(xtag, hash_seed) % num_bits bits[pos] = True # generate encrypted database edb1 = dict() if paralleled: logger.info('Parallel edb1') # parallel processing with multiprocessing.Pool(processes=num_processes, initializer=init_gen_t_set_parallel, initargs=(K_S, K_I, K_Z, K_T, iv, pairing)) as pool: t_set_dict_lists = pool.map(gen_t_set_parallel, inverted_index_all_pairs.items()) for t_set_dict in t_set_dict_lists: edb1.update(t_set_dict) var_dict = {} else: logger.info('Seq edb1') for word, indices in inverted_index_all_pairs.items(): edb1.update( gen_t_set(word, indices, K_S, K_I, K_Z, K_T, iv, pairing)) key = (K_P, K_S, K_X, K_I, K_Z, K_T) g_serialized = pairing.serialize(g) return key, iv, g_serialized, edb1, bf, bits
def query(edb: dict, keywords: list, key: tuple, iv: bytes, bf: BloomFilter, g_serialized: bytes) -> list: """ Query OXT for some keywords :param g_serialized: :param bf: :param iv: :type key: tuple :param key: :param edb: :param keywords: :return: """ pairing = PairingGroup('SS512') # ------ CLIENT ------ g = pairing.deserialize(g_serialized) assert g.initPP(), "ERROR: Failed to init pre-computation table for g." (K_P, K_S, K_X, K_I, K_Z, K_T) = key stag = cal_cmac_aes(K_T, convert_to_bytes(keywords[0])) if not stag: return [] # ---------------- SERVER ------------- # get keys with stag t_set_result = list() c = 0 while True: key = base64.encodebytes(stag).decode() + KEY_CONNECTOR + str(c) if key in edb: t_set_result.append(edb[key]) c += 1 else: break # ---------------- CLIENT ------------- # list of (e, y) with y is Element oxt_t_set_tuples = t_set_result if not oxt_t_set_tuples: return [] # Concat xtoken with OXT Tuple xtoken_tuples = list() xterms = keywords[1:] for c in range(len(oxt_t_set_tuples)): z = cal_cmac_aes(K_Z, convert_to_bytes(keywords[0]) + convert_to_bytes(c)) e_z = pairing.init(ZR, convert_int_from_bytes(z)) xtokens_serialized = list() for xterm in xterms: kxw = cal_cmac_aes(K_X, convert_to_bytes(xterm)) e_kxw = pairing.init(ZR, convert_int_from_bytes(kxw)) xtoken = g**(e_z * e_kxw) xtokens_serialized.append(pairing.serialize(xtoken)) xtoken_tuples.append((oxt_t_set_tuples[c][1], xtokens_serialized)) # ---------------- SERVER ------------- # match xtags in BF es = list() for c in range(len(xtoken_tuples)): # check matched in bloomfilter immediately xtag_matched = True y_c = pairing.deserialize(xtoken_tuples[c][0]) xtokens_serialized = xtoken_tuples[c][1] for xtoken_serialized in xtokens_serialized: xtag = pairing.deserialize(xtoken_serialized)**y_c xtag_matched = xtag_matched and (str(xtag) in bf) if xtag_matched: es.append(oxt_t_set_tuples[c][0]) # ---------------- CLIENT ------------- # client decrypt e K_e = cal_cmac_aes(K_S, convert_to_bytes(keywords[0])) rinds = [decrypt_aes(K_e, iv, e) for e in es] result = [ get_rind(base64.decodebytes(rind), K_P).decode() for rind in rinds ] return result
def main(): groupObj = PairingGroup('SS512') # init object maabe = MAABE.MaabeRW15(groupObj) # generate pp public_parameters = maabe.setup() #print("public_parameters=>",public_parameters) g1_seri = groupObj.serialize(public_parameters['g1']) g2_seri = groupObj.serialize(public_parameters['g2']) egg_seri = groupObj.serialize(public_parameters['egg']) pp_seri = {'g1': g1_seri, 'g2': g2_seri, 'egg': egg_seri} pp_output = open("pp_output.txt", "wb") pickle.dump(pp_seri, pp_output) #generate auth key (pk1, sk1) = maabe.authsetup(public_parameters, 'UT') egga1_seri = groupObj.serialize(pk1['egga']) gy1_seri = groupObj.serialize(pk1['gy']) alpha1_seri = groupObj.serialize(sk1['alpha']) y1_seri = groupObj.serialize(sk1['y']) pk1_seri = {'name': pk1['name'], 'egga': egga1_seri, 'gy': gy1_seri} sk1_seri = {'name': sk1['name'], 'alpha': alpha1_seri, 'y': y1_seri} (pk2, sk2) = maabe.authsetup(public_parameters, 'OU') egga2_seri = groupObj.serialize(pk2['egga']) gy2_seri = groupObj.serialize(pk2['gy']) alpha2_seri = groupObj.serialize(sk2['alpha']) y2_seri = groupObj.serialize(sk2['y']) pk2_seri = {'name': pk2['name'], 'egga': egga2_seri, 'gy': gy2_seri} sk2_seri = {'name': sk2['name'], 'alpha': alpha2_seri, 'y': y2_seri} maabepk = {'UT': pk1_seri, 'OU': pk2_seri} maabesk = {'UT': sk1_seri, 'OU': sk2_seri} maabepk_output = open("maabepk_output.txt", "wb") maabesk_output = open("maabesk_output.txt", "wb") pickle.dump(maabepk, maabepk_output) pickle.dump(maabesk, maabesk_output) print("maabepk=>", maabepk) print("maabesk=>", maabesk) # generate bob attribute keys gid = "bob" user_attr1 = ['STUDENT@UT'] user_attr2 = ['STUDENT@OU'] user_sk1 = maabe.multiple_attributes_keygen(public_parameters, sk1, gid, user_attr1) user_sk2 = maabe.multiple_attributes_keygen(public_parameters, sk2, gid, user_attr2) K1_seri = groupObj.serialize(user_sk1['STUDENT@UT']['K']) KP1_seri = groupObj.serialize(user_sk1['STUDENT@UT']['KP']) K2_seri = groupObj.serialize(user_sk2['STUDENT@OU']['K']) KP2_seri = groupObj.serialize(user_sk2['STUDENT@OU']['KP']) usersk1_seri = {'STUDENT@UT': {'K': K1_seri, 'KP': KP1_seri}} usersk2_seri = {'STUDENT@OU': {'K': K2_seri, 'KP': KP2_seri}} print("user_sk1=>", user_sk1) print("user_sk2=>", user_sk2) user_sk = {'GID': gid, 'keys': merge_dicts(usersk1_seri, usersk2_seri)} usersk_output = open("usersk_output.txt", "wb") pickle.dump(user_sk, usersk_output)
debug = 0 groupObj = PairingGroup('SS512') pk = groupObj.random(GT) if debug: print pk a = SymmetricCryptoAbstraction(sha1(pk)) try: f = open('/Users/cirnotxm/down/charm.dmg', 'rb') ff = f.read() ct = a.encrypt(ff) if debug: print ct ffe = open('/Users/cirnotxm/down/jiami', 'wb') ffe.write(ct) fpk = open('/Users/cirnotxm/down/pk', 'wb') fpk.write(groupObj.serialize(pk)) finally: ffe.close() f.close() fpk.close()
from charm.adapters.abenc_adapt_hybrid import HybridABEnc import pickle if __name__ == "__main__": groupObj = PairingGroup('SS512') cpabe = CPabe_BSW07(groupObj) hyb_abe = HybridABEnc(cpabe, groupObj) (pk, mk) = hyb_abe.setup() access_policy = '((four or three) and (two or one))' sk = hyb_abe.keygen(pk, mk, ['ONE', 'TWO', 'THREE']) print(sk) plaintext = "Bounty Name: EMR Functional Testing" ciphertext = hyb_abe.encrypt(pk, plaintext, access_policy) print(ciphertext) ciphertext["c1"]["C"] = groupObj.serialize(ciphertext["c1"]["C"]) for key in ciphertext["c1"]["Cy"]: ciphertext["c1"]["Cy"][key] = groupObj.serialize( ciphertext["c1"]["Cy"][key]) ciphertext["c1"]["C_tilde"] = groupObj.serialize( ciphertext["c1"]["C_tilde"]) for key in ciphertext["c1"]["Cyp"]: ciphertext["c1"]["Cyp"][key] = groupObj.serialize( ciphertext["c1"]["Cyp"][key]) ciphertext2 = ciphertext ciphertext2["c1"]["C"] = groupObj.deserialize(ciphertext["c1"]["C"]) for key in ciphertext2["c1"]["Cy"]: ciphertext2["c1"]["Cy"][key] = groupObj.deserialize( ciphertext2["c1"]["Cy"][key]) ciphertext2["c1"]["C_tilde"] = groupObj.deserialize(
def measure_time(attribute_num): # setup start = time.time() grp = PairingGroup('MNT224') ps = PS01(grp) end = time.time() print("Setup time elapse: ") print(end - start) # keygen start = time.time() (pk, sk) = ps.keygen(attribute_num) end = time.time() print("KeyGen over {0} attributes time elapse: {1}s ".format(str(attribute_num), str(end - start))) # generate a secret secret = grp.random() # NIZK Schnorr prover start = time.time() na = grp.random() a = pk['Y1'][0] ** na # deterministic nb m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) # r r = na + nb * secret end = time.time() print("NIZK Schnorr over {0} attributes Prover time elapse: {1}s".format(str(attribute_num), str((end - start)*attribute_num))) # NIZK Schnorr verifier start = time.time() m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) lh = pk['Y1'][0] ** r rh = a * (pk['Y1'][0] ** secret) ** nb end = time.time() print("NIZK Schnorr over {0} attributes Verifier time elapse: {1}s".format(str(attribute_num), str((end - start)*attribute_num))) if lh == rh: print('NIZK check success') else: print('NIZK check failure') # request ID messages = ["hello" + str(i) for i in range(0, attribute_num)] start = time.time() t, commitment = ps.commitment(pk, *messages) end = time.time() print("requestID over {0} attributes time elapse: {1}s".format(str(attribute_num), str(end - start))) # prove ID start = time.time() sig = ps.sign(sk, pk, commitment) end = time.time() print("ProvideID over {0} attributes time elapse: {1}s".format(str(attribute_num), str(end - start))) # unblind signature start = time.time() sig = ps.unblind_signature(t, sig) end = time.time() print("Unblind over {0} attributes time elapse: {1}s".format(str(attribute_num), str(end - start))) # prove ID start = time.time() rand_sig = ps.randomize_sig(sig) cipher_sk = grp.random() cipher_1 = pk['Y2'][1] ** cipher_sk cipher_pk = (pk['Y2'][1] ** grp.hash('authority')) ** cipher_sk cipher_2 = cipher_pk * (pk['Y2'][0] ** grp.hash(messages[0], ZR)) end = time.time() print("Credential Randomize over {0} attributes time elapse: {1}s".format(str(attribute_num), str(end - start))) # cred.verify start = time.time() result = ps.verify(pk, rand_sig, *messages) end = time.time() print("RP's Credential Verify over {0} attributes time elapse: {1}s".format(str(attribute_num), str(end - start)))
def test_ps_sign(): grp = PairingGroup('MNT224') ps = PS01(grp) # messages = ['Hi there', 'Not there', 'Some message ................', 'Dont know .............', 'great!!!!'] messages = ["hi there"] (pk, sk) = ps.keygen(20) if debug: print("Keygen...") print("pk :=", pk) print("sk :=", sk) pk_len = 0 sk_len = 0 pk_serial = dict(pk) sk_serial = dict(sk) for key, value in sk_serial.items(): sk_serial[key] = grp.serialize(value).decode() for key, value in pk_serial.items(): if key in {'X2', 'g1', 'g2'}: pk_serial[key] = grp.serialize(value).decode() else: pk_serial[key] = [grp.serialize(item).decode() for item in value] for key, value in pk_serial.items(): if key in {'X2', 'g1', 'g2'}: pk_len += len(pk_serial[key]) else: for item in pk_serial[key]: pk_len += len(item) for key, value in sk_serial.items(): sk_len += len(sk_serial[key]) print('public key bytes: ', str(pk_len)) print('private key bytes: ', str(sk_len)) t = grp.random() print(t) print('random number len') print(len(grp.serialize(t).decode())) zeta = grp.random(G1) zeta = zeta ** t print(zeta) print('group element len') print(len(grp.serialize(zeta).decode())) t, commitment = ps.commitment(pk, *messages) if debug: print("commitment: ", commitment) be_signed = commitment sig = ps.sign(sk, pk, commitment) if debug: print("Signature: ", sig) sig = ps.unblind_signature(t, sig) print(len(grp.serialize(sig[0]).decode())) print(len(grp.serialize(sig[1]).decode())) result = ps.verify(pk, sig, *messages) assert result, "INVALID signature!" if debug: print("Successful Verification!!!") rand_sig = ps.randomize_sig(sig) assert sig != rand_sig if debug: print("Randomized Signature: ", rand_sig) result = ps.verify(pk, rand_sig, *messages) assert result, "INVALID signature!" if debug: print("Successful Verification!!!")
groupObj = PairingGroup('SS512') pk = groupObj.random(GT) if debug :print pk a = SymmetricCryptoAbstraction(sha1(pk)) try: f = open('/Users/cirnotxm/down/charm.dmg','rb') ff = f.read() ct = a.encrypt(ff) if debug :print ct ffe = open('/Users/cirnotxm/down/jiami','wb') ffe.write(ct) fpk = open('/Users/cirnotxm/down/pk','wb') fpk.write(groupObj.serialize(pk)) finally: ffe.close() f.close() fpk.close()
async def main(): # group setup and secret generation setup_start_time = time.time() global grp, ps, pk, secret, messages, sig, gamma, expires_in grp = PairingGroup('MNT224') messages = ['random_string'] secret = grp.hash(messages[0], ZR) ps = PS01(grp) pk = None setup_end_time = time.time() print("Setup total time: {0}s ".format(str(setup_end_time - setup_start_time))) # communicate with IdP print("****Communication with IdP****") idp_start_time = time.time() async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session: # first rt: get idp public key pk = await fetch(session, 'https://' + idp_ip + ':6000/') for key, value in pk.items(): if key in {'X2', 'g1', 'g2'}: pk[key] = grp.deserialize(value.encode()) else: pk[key] = [grp.deserialize(item.encode()) for item in value] if debug: print('idp pk:=', pk) # generate t and commitment start = time.time() t = grp.random(ZR) gt = (pk['g1'] ** t) commitment_secret = pk['Y1'][0] ** secret commitment = gt * commitment_secret if debug: print("commitment: ", commitment) end = time.time() print("CRED.PrepareBlindSign over {0} attributes time elapse: {1}s ".format(str(1), str(end - start))) # second rt: schnorr proof start = time.time() na = grp.random() a = pk['Y1'][0] ** na # schnorr NIZK: generate nb m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) r = na + nb * secret end = time.time() print("NIZK Schnorr Prover (User-IdP) over {0} element time elapse: {1}s ".format(str(1), str(end - start))) json_param = {'g_t': grp.serialize(gt).decode(), 'commitment_secret': grp.serialize(commitment_secret).decode(), 'a': grp.serialize(a).decode(), 'r': grp.serialize(r).decode()} average_time = 0 counter = 0 for i in range(0, 20): time1 = time.time() print(time.time()) json_rep = await fetch(session, 'https://' + idp_ip + ':6000/token', json_param) time2 = time.time() counter += 1 average_time += (time2 - time1) if 1/running_frequency - (time2 - time1) > 0: time.sleep(1/running_frequency - (time2 - time1)) print("average time per credential: ", str(average_time/counter)) idp_end_time = time.time() print("IdP total time: {0}s ".format(str(idp_end_time - idp_start_time)))
xor = lambda ss, cc: ''.join(chr(ord(s) ^ ord(c)) for s, c in zip(ss, cc)) a0 = group.random(G1) a1 = group.random(G2) a = pair(a0, a1) H2 = lambda x: group.hash(str(x) + str(group.random(ZR)), ZR) # b0 = group.random(G1) # b1 = group.random(G2) # b = pair(b0,b1) b = group.random() a_s = group.serialize(a) b_s = group.serialize(b) c_s = xor(H2(a_s), b_s) a group.deserialize(xor(c_s, b_s)) # ==a assert a == group.deserialize(xor(c_s, b_s)), 'nope' # c = group.deserialize(c_s) # wrong # c1 = group.deserialize(xor(a_s,b_s)) # wrong # c_s1 = group.serialize(c) # wrong # c1_s = group.serialize(c1) # wrong # group.deserialize(xor(c1_s,b_s)) # wrong
def basic_test(number_of_basic_trials, number_of_attribute_authorities, number_of_attributes): """ :param number_of_basic_trials: Number of times to run the function and calculate the average :param number_of_attribute_authorities: Number of attribute authorities :param number_of_attributes: Number of attributes :return: """ # scheme setup group_object = PairingGroup('SS512') omacpabe = OMACPABE(group_object) GPP, GMK = omacpabe.abenc_casetup() users = {} # public user data authorities = {} # authority data dictionary attribute_authorities = [] # list of attribute authorities authorities_and_attributes = { } # dictionary of attributes and the matching authorities attribute_master_list = [] # master list of all possible attributes seed_length = int(number_of_attributes / number_of_attribute_authorities) seed_attributes = [ i + 1 for i in range(seed_length) ] # list comprehension to generate number list to aid in generation of attributes for i in range(number_of_attribute_authorities): attribute_authority_name = "AUTHORITY" + str( i + 1) # create attribute authorities attribute_authorities.append( attribute_authority_name) # add new authorities to overall list current_attribute_authority_attributes = [ ] # attributes for current authority for seed_attr in seed_attributes: authority_attribute = attribute_authority_name + "." + str( seed_attr) # create attribute current_attribute_authority_attributes.append( authority_attribute ) # add attribute to authority attribute list authorities_and_attributes[ attribute_authority_name] = current_attribute_authority_attributes # add authority as key and its attributes as value to the dictionary attribute_master_list += current_attribute_authority_attributes # add attributes created to master attribute list for authority in authorities_and_attributes.keys(): omacpabe.abenc_aareg(GPP, authority, authorities_and_attributes[authority], authorities) alice = { 'id': 'alice', 'authoritySecretKeys': {}, 'keys': None } # new user alice alice['keys'], users[alice['id']] = omacpabe.abenc_userreg(GPP) for authority in authorities.keys(): alice['authoritySecretKeys'][authority] = {} for attr in authorities_and_attributes[authority]: omacpabe.abenc_keygen(GPP, authorities[authority], attr, users[alice['id']], alice['authoritySecretKeys'][authority]) plain_text_secret_key_group_element = group_object.random(GT) # showing usage of serialize and deserialize for converting group elements bit_string_from_group_element = group_object.serialize( plain_text_secret_key_group_element) assert isinstance(bit_string_from_group_element, object) group_element_from_bit_string = group_object.deserialize( bit_string_from_group_element) assert group_element_from_bit_string == plain_text_secret_key_group_element, 'SERIALIZATION ERROR!' policy_string = gp(attribute_master_list, number_of_attributes) # generate policy # benchmarking encryption_times = [ ] # list to hold encryption times for multiple iterations for i in range(number_of_basic_trials): start_time = clock() ciphertexts = omacpabe.abenc_encrypt( GPP, policy_string, plain_text_secret_key_group_element, authorities) duration = clock() - start_time encryption_times.append(duration) # average_encryption_time = sum(encryption_times) / len(encryption_times) # print("average encryption time = ", average_encryption_time) token, partially_decrypted_ciphertext = omacpabe.abenc_generatetoken( GPP, ciphertexts, alice['authoritySecretKeys'], alice['keys'][0]) decryption_times = [ ] # list to hold decryption times for multiple iterations for i in range(number_of_basic_trials): start_time = clock() plaintext = omacpabe.abenc_decrypt(partially_decrypted_ciphertext, token, alice['keys']) duration = clock() - start_time decryption_times.append(duration) # average_decryption_time = sum(decryption_times) / len(decryption_times) # print("average decryption time = ", average_decryption_time) assert plain_text_secret_key_group_element == plaintext, 'FAILED DECRYPTION!' # print('SUCCESSFUL DECRYPTION') return encryption_times, decryption_times
def main(): groupObj = PairingGroup('SS512') dabe = Dabe(groupObj) # Waste one random instance of the PRG sequence. #print(groupObj.random()) hyb_abema = HybridABEncMA(dabe, groupObj) #Setup global parameters for all new authorities gp = hyb_abema.setup() #Instantiate a few authorities #Attribute names must be globally unique. HybridABEncMA #Two authorities may not issue keys for the same attribute. #Otherwise, the decryption algorithm will not know which private key to use jhu_attributes = ['jhu.professor', 'jhu.staff', 'jhu.student'] jhmi_attributes = [ 'jhmi.doctor', 'jhmi.nurse', 'jhmi.staff', 'jhmi.researcher' ] (jhuSK, jhuPK) = hyb_abema.authsetup(gp, jhu_attributes) (jhmiSK, jhmiPK) = hyb_abema.authsetup(gp, jhmi_attributes) allAuthPK = {} allAuthPK.update(jhuPK) allAuthPK.update(jhmiPK) # Generate new keys with same parameters, such that we demonstrate whether ciphertexts encrypted with these new keys # cannot be decrypted with old user's keys. (jhuSKnew, jhuPKnew) = hyb_abema.authsetup(gp, jhu_attributes) (jhmiSKnew, jhmiPKnew) = hyb_abema.authsetup(gp, jhmi_attributes) allAuthPKnew = {} allAuthPKnew.update(jhuPKnew) allAuthPKnew.update(jhmiPKnew) #Setup a user with a few keys bobs_gid = "20110615 [email protected] cryptokey" K = {} Ksame = { } # A key to allow us to demonstrate that the generation creates the same keys a second time; keygen is deterministic. print("JhuSK: ", jhuSK) print("jhmiSK: ", jhmiSK) hyb_abema.keygen(gp, jhuSK, 'jhu.professor', bobs_gid, K) #hyb_abema.keygen(gp, jhuSK,jhu_attributes, bobs_gid, K) # Does not work; only one attribute at a time. hyb_abema.keygen(gp, jhmiSK, 'jhmi.researcher', bobs_gid, K) hyb_abema.keygen(gp, jhuSK, 'jhu.professor', bobs_gid, Ksame) hyb_abema.keygen(gp, jhmiSK, 'jhmi.researcher', bobs_gid, Ksame) # Let's set up an "alternate" set of ABE secret keys for bob, generated with the same attributes but with a different # gp. Let's see whether decryption is successful using different combinations of gp. gpAlternative = hyb_abema.setup() Kalternative = {} hyb_abema.keygen(gpAlternative, jhuSK, 'jhu.professor', bobs_gid, Kalternative) hyb_abema.keygen(gpAlternative, jhmiSK, 'jhmi.researcher', bobs_gid, Kalternative) # I will also recreate a set of keys for bob using original gp, such that we check whether these keys decrypt old ciphertexts using same ABE encryption keys and gp. Knew = {} hyb_abema.keygen(gp, jhuSK, 'jhu.professor', bobs_gid, Knew) hyb_abema.keygen(gp, jhmiSK, 'jhmi.researcher', bobs_gid, Knew) msg = b'Hello World, I am a sensitive record!' size = len(msg) policy_str = "(jhmi.doctor OR (jhmi.researcher AND jhu.professor))" #ct = hyb_abema.encrypt(allAuthPK, gp, msg, policy_str) #ctAlternative = hyb_abema.encrypt(allAuthPK, gpAlternative, msg, policy_str) #ctNewAuthorityKeys = hyb_abema.encrypt(allAuthPKnew, gp, msg, policy_str) ct = hyb_abema.encrypt(gp, allAuthPK, msg, policy_str) ctAlternative = hyb_abema.encrypt(gpAlternative, allAuthPK, msg, policy_str) ctNewAuthorityKeys = hyb_abema.encrypt(gp, allAuthPKnew, msg, policy_str) if debug: print("Ciphertext") print("c1 =>", ct['c1']) print("c2 =>", ct['c2']) print("\n\nUser secret key K:") print(K) print("\n\nUser secret key Ksame:") print(Ksame) print("\n\nPublic keys:") print(allAuthPK) print("\n\njhuSK key:") print(jhuSK) print(type(jhuSK['JHU.STUDENT']['alpha_i'])) print(type(jhuSK['JHU.STUDENT']['y_i'])) K_json = json.dumps(K, cls=jsonhelper.KeyEncoder, pairingCurve=groupObj.param) print("\n\nJSON representation of K:\n", K_json) K_fromJson = json.loads(K_json, cls=jsonhelper.KeyDecoder) print("\n\nDecoding K from JSON:\n", K_fromJson) #json.dumps(K) # Let's extract pieces of the secret key and attempt to serialize such that JSON can manipulate them. # Use Charm's pairinggroup.PairingGroup serialize and deserialize methods (which are coded in C). print("\n\nOne secret key:") print(K['JHMI.RESEARCHER']['k']) print(K['JHMI.RESEARCHER']['k'].__class__.__name__) #print(K['JHMI.RESEARCHER']['k'].__class__.__dict__) #print(charm.core.math.pairing.ElementType.__name__) print( isinstance(type(K['JHMI.RESEARCHER']['k']), charm.core.math.pairing.__class__)) # Copy the object to a variable. obj = K['JHMI.RESEARCHER']['k'] print(K['JHMI.RESEARCHER']['k'].__class__.__name__ == 'Element') # Test working with a variable. print(obj.__class__.__name__ == 'Element') print(K['JHMI.RESEARCHER']['k']) print("Serialized:") serialized_key = groupObj.serialize(K['JHMI.RESEARCHER']['k']).decode() print(serialized_key) print(type(serialized_key)) #print(groupObj.Pairing) print("Deserialized:") print(groupObj.deserialize(serialized_key.encode())) # Let's try deserializing with another groupObj, but with same parameter. groupObjOther = PairingGroup(groupObj.param) print(groupObjOther.deserialize(serialized_key.encode())) print("Random 1: ", groupObj.random()) print("Random 2: ", groupObj.random()) print("Random 3: ", groupObj.random()) try: print( "Decrypting message using gp for both ciphertext and secret keys.") orig_msg = hyb_abema.decrypt(gp, K, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for ciphertext and gpAlternative (Kalternative) for secret keys." ) orig_msg = hyb_abema.decrypt(gp, Kalternative, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gpAlternative for ciphertext and gp for secret keys." ) orig_msg = hyb_abema.decrypt(gp, K, ctAlternative) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gpAlternative for ciphertext and gpAlternative (Kalternative) for secret keys." ) orig_msg = hyb_abema.decrypt(gp, Kalternative, ctAlternative) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gpAlternative for ciphertext and gpAlternative (Kalternative) for secret keys, and gpAlternative for decrypt method." ) print("Note that authority's ABE secret keys were generated with gp.") orig_msg = hyb_abema.decrypt(gpAlternative, Kalternative, ctAlternative) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for both ciphertext and secret keys, but gpAlternative for decrypt method." ) orig_msg = hyb_abema.decrypt(gpAlternative, K, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for ciphertext and Knew for secret keys (generated with gp)." ) orig_msg = hyb_abema.decrypt(gp, Knew, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for both ciphertext and secret keys, but a new ciphertext generated with new authority's keys with same attributes." ) orig_msg = hyb_abema.decrypt(gp, K, ctNewAuthorityKeys) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e)