def abe_key_extract(sym_key): """ Function to extract abe pairing element from the symmetric key string :param sym_key: symmetric key :return: return abe pairing element that corresponds to symmetric key string """ groupObj = PairingGroup('SS512') abe_key = groupObj.deserialize(sym_key) return abe_key
def query(edb: dict, keywords: list, key: tuple, iv: bytes, bf: BloomFilter, g_serialized: bytes) -> list: """ Query OXT for some keywords :param g_serialized: :param bf: :param iv: :type key: tuple :param key: :param edb: :param keywords: :return: """ pairing = PairingGroup('SS512') # ------ CLIENT ------ g = pairing.deserialize(g_serialized) assert g.initPP(), "ERROR: Failed to init pre-computation table for g." (K_P, K_S, K_X, K_I, K_Z, K_T) = key stag = cal_cmac_aes(K_T, convert_to_bytes(keywords[0])) if not stag: return [] # ---------------- SERVER ------------- # get keys with stag t_set_result = list() c = 0 while True: key = base64.encodebytes(stag).decode() + KEY_CONNECTOR + str(c) if key in edb: t_set_result.append(edb[key]) c += 1 else: break # ---------------- CLIENT ------------- # list of (e, y) with y is Element oxt_t_set_tuples = t_set_result if not oxt_t_set_tuples: return [] # Concat xtoken with OXT Tuple xtoken_tuples = list() xterms = keywords[1:] for c in range(len(oxt_t_set_tuples)): z = cal_cmac_aes(K_Z, convert_to_bytes(keywords[0]) + convert_to_bytes(c)) e_z = pairing.init(ZR, convert_int_from_bytes(z)) xtokens_serialized = list() for xterm in xterms: kxw = cal_cmac_aes(K_X, convert_to_bytes(xterm)) e_kxw = pairing.init(ZR, convert_int_from_bytes(kxw)) xtoken = g**(e_z * e_kxw) xtokens_serialized.append(pairing.serialize(xtoken)) xtoken_tuples.append((oxt_t_set_tuples[c][1], xtokens_serialized)) # ---------------- SERVER ------------- # match xtags in BF es = list() for c in range(len(xtoken_tuples)): # check matched in bloomfilter immediately xtag_matched = True y_c = pairing.deserialize(xtoken_tuples[c][0]) xtokens_serialized = xtoken_tuples[c][1] for xtoken_serialized in xtokens_serialized: xtag = pairing.deserialize(xtoken_serialized)**y_c xtag_matched = xtag_matched and (str(xtag) in bf) if xtag_matched: es.append(oxt_t_set_tuples[c][0]) # ---------------- CLIENT ------------- # client decrypt e K_e = cal_cmac_aes(K_S, convert_to_bytes(keywords[0])) rinds = [decrypt_aes(K_e, iv, e) for e in es] result = [ get_rind(base64.decodebytes(rind), K_P).decode() for rind in rinds ] return result
#!/usr/bin/env python3 from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, GT, pair from utils import b64encode, jencode import json import base64 if __name__ == "__main__": G = PairingGroup('SS512') g = input("g: ") g = G.random(G1) if g == "" else G.deserialize(base64.b64decode(g)) skey = G.random(ZR) pkey = g**skey m = G.random(ZR) pk = {"g": g, "pk": pkey} sk = {"g": g, "pk": pkey, "sk": skey} mg = {"m": m} print("Generated credentials: {}".format( json.dumps(b64encode(sk), indent=4))) with open("cred.pk", "w+") as f: f.write(jencode(pk)) with open("cred.sk", "w+") as f: f.write(jencode(sk))
hyb_abe = HybridABEnc(cpabe, groupObj) (pk, mk) = hyb_abe.setup() access_policy = '((four or three) and (two or one))' sk = hyb_abe.keygen(pk, mk, ['ONE', 'TWO', 'THREE']) print(sk) plaintext = "Bounty Name: EMR Functional Testing" ciphertext = hyb_abe.encrypt(pk, plaintext, access_policy) print(ciphertext) ciphertext["c1"]["C"] = groupObj.serialize(ciphertext["c1"]["C"]) for key in ciphertext["c1"]["Cy"]: ciphertext["c1"]["Cy"][key] = groupObj.serialize( ciphertext["c1"]["Cy"][key]) ciphertext["c1"]["C_tilde"] = groupObj.serialize( ciphertext["c1"]["C_tilde"]) for key in ciphertext["c1"]["Cyp"]: ciphertext["c1"]["Cyp"][key] = groupObj.serialize( ciphertext["c1"]["Cyp"][key]) ciphertext2 = ciphertext ciphertext2["c1"]["C"] = groupObj.deserialize(ciphertext["c1"]["C"]) for key in ciphertext2["c1"]["Cy"]: ciphertext2["c1"]["Cy"][key] = groupObj.deserialize( ciphertext2["c1"]["Cy"][key]) ciphertext2["c1"]["C_tilde"] = groupObj.deserialize( ciphertext2["c1"]["C_tilde"]) for key in ciphertext2["c1"]["Cyp"]: ciphertext2["c1"]["Cyp"][key] = groupObj.deserialize( ciphertext2["c1"]["Cyp"][key]) print(hyb_abe.decrypt(pk, sk, ciphertext2) == plaintext)
def query(edb: dict, shve: dict, keywords: list, key: tuple, iv: bytes, bf: BloomFilter, g_serialized: bytes, paralleled=False, num_processes=None, benchmarking=False): """ Query HXT for some keywords :param edb: OXT encrypted database :param shve: SHVE data :param keywords: list of keywords :param key: all keys :param iv: IV :param bf: bloomfilter :param g_serialized: serialized g value :param paralleled: should we parallel the process or not. NOTE: (maybe) incorrect parallelism. :param num_processes: number of processes used for parallelism :param benchmarking: whether this is a benchmark or not (default is False) :return: the list of results. If benchmarking, also return (number of xtags for stag) as the 2nd value of the tuple """ pairing = PairingGroup('SS512') start_time = time.time() # ------ CLIENT ------ g = pairing.deserialize(g_serialized) assert g.initPP(), "ERROR: Failed to init pre-computation table for g." (K_P, K_S, K_X, K_I, K_Z, K_T, K_H) = key num_bits = bf.num_bits # client generates stag stag = cal_cmac_aes(K_T, convert_to_bytes(keywords[0])) logging.debug(('client generates stag time:', time.time() - start_time)) result = [] if not stag: if benchmarking: return result, 0 else: return result start_time = time.time() # ---------------- SERVER ------------- # get keys with stag t_set_result = list() c = 0 while True: key = base64.encodebytes(stag).decode() + KEY_CONNECTOR + str(c) if key in edb: t_set_result.append(edb[key]) c += 1 else: break logging.debug( ('server get keys with stag time:', time.time() - start_time)) # ---------------- CLIENT ------------- start_time = time.time() # list of (e, y) with y is Element oxt_t_set_tuples = t_set_result if not oxt_t_set_tuples: if benchmarking: return result, len(t_set_result) else: return result # Concat xtoken with OXT Tuple xtoken_tuples = list() w1 = keywords[0] xterms = keywords[1:] num_t_set_stag = len(oxt_t_set_tuples) if paralleled: # parallel processing with multiprocessing.Pool(processes=num_processes, initializer=init_worker_cal_xtokens_client, initargs=(K_Z, K_X, w1, xterms, pairing, g, oxt_t_set_tuples)) as pool: # pool = multiprocessing.Pool() xtoken_tuples = pool.map(cal_xtokens_client_parallel, [c for c in range(num_t_set_stag)]) else: for c in range(num_t_set_stag): xtoken_tuple = cal_xtokens_client(K_Z, K_X, w1, c, xterms, pairing, g, oxt_t_set_tuples) xtoken_tuples.append( xtoken_tuple) # each pair is ((e, y_c), xtokens_[c]) logging.debug(('client generates xtokens time:', time.time() - start_time)) # ---------------- SERVER ------------- start_time = time.time() # Server is generating xtag xtags_tuples = list() if paralleled: # parallel processing with multiprocessing.Pool(processes=num_processes, initializer=init_worker_cal_xtags_server, initargs=(pairing, )) as pool: xtags_tuples = pool.map(cal_xtags_server_parallel, xtoken_tuples) else: for c in range(len(xtoken_tuples)): xtags_tuples.append(cal_xtags_server( xtoken_tuples[c], pairing)) # each pair is ((e, y) -> xtags_of_c) logging.debug(('Server generating xtag time:', time.time() - start_time)) start_time = time.time() xtags_hash_tuples = list() if paralleled: # parallel processing with multiprocessing.Pool( processes=num_processes, initializer=init_worker_cal_xtags_hashes_server, initargs=(bf.hash_seeds, num_bits)) as pool: xtags_hash_tuples = pool.map(cal_xtags_hashes_server_parallel, xtags_tuples) else: for xtags_tuple in xtags_tuples: # there is a little bit of change in the algorithm here, just the order. Nothing special # Instead of computer hashes after computing each xtag, the map() operation above # server computes all xtags, then computes hashes for each of them later # each pair is ((e, y) -> hashes_of_xtags_of_c) xtags_hash_tuples.append( cal_xtags_hashes_server(xtags_tuple, bf.hash_seeds, num_bits)) logging.debug( ('Server generating xtag hashes time:', time.time() - start_time)) start_time = time.time() # Start to match HVE es = list() if paralleled: # parallel processing with multiprocessing.Pool(processes=num_processes, initializer=init_worker_matching_shve, initargs=(K_H, iv, shve)) as pool: # pool = multiprocessing.Pool() tmp = pool.map(matching_shve_parallel, xtags_hash_tuples) for e in tmp: if e is not None: es.append(e) logging.debug(es) else: for hash_tuple in xtags_hash_tuples: e = matching_shve(hash_tuple, K_H, iv, shve) if e is not None: es.append(e) logging.debug(('Server match SHVE time:', time.time() - start_time)) # ---------------- CLIENT ------------- start_time = time.time() # client decrypt e K_e = cal_cmac_aes(K_S, convert_to_bytes(keywords[0])) rinds = [decrypt_aes(K_e, iv, e) for e in es] result = [ get_rind(base64.decodebytes(rind), K_P).decode() for rind in rinds ] logging.debug(('client decrypt e time:', time.time() - start_time)) if benchmarking: return result, len(t_set_result) else: return result
def merge_dicts(*dict_args): """ Given any number of dicts, shallow copy and merge into a new dict, precedence goes to key value pairs in latter dicts. """ result = {} for dictionary in dict_args: result.update(dictionary) return result # get pp pp_file = open("pp_output.txt", "rb") pp_unpi = pickle.load(pp_file) g1_deseri = groupObj.deserialize(pp_unpi['g1']) g2_deseri = groupObj.deserialize(pp_unpi['g2']) egg_deseri = groupObj.deserialize(pp_unpi['egg']) public_parameters = {'g1': g1_deseri, 'g2': g2_deseri, 'egg': egg_deseri} #print(public_parameters) # get maabepk maabepk_file = open("maabepk_output.txt", "rb") maabepk_unpi = pickle.load(maabepk_file) egga1_deseri = groupObj.deserialize(maabepk_unpi['UT']['egga']) gy1_deseri = groupObj.deserialize(maabepk_unpi['UT']['gy']) egga2_deseri = groupObj.deserialize(maabepk_unpi['OU']['egga']) gy2_deseri = groupObj.deserialize(maabepk_unpi['OU']['gy']) pk1 = {'egga': egga1_deseri, 'gy': gy1_deseri} pk2 = {'egga': egga2_deseri, 'gy': gy2_deseri} maabepk = {'UT': pk1, 'OU': pk2}
from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, GT, pair from schemeParam import publicParam, userSecretData testDatafilePath = '/home/ty/workspace_for_py/AllSchemeTestData/test0' def getUserParam(k): userParam = { 'x': group.deserialize(userSecretData.sks[k]), 'u': group.deserialize(publicParam.us[k]), 'fileName': userSecretData.sks[k].decode() } return userParam group = PairingGroup('SS512') g = group.deserialize(publicParam.g) userParam = { 'x': group.deserialize(userSecretData.sks[0]), 'u': group.deserialize(publicParam.us[0]), 'fileName': userSecretData.sks[0].decode() } verInfos = [publicParam.verInfos[i] for i in range(50000)] # some params in tian's scheme tian_r = group.random(ZR) segmentNum = 200 #blockSize: 4KB blockNumber = 50000 #62500 trialsNumber = 20
#!/usr/bin/env python3 from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, GT, pair from utils import b64encode, jencode, product import json import base64 if __name__ == "__main__": G = PairingGroup('SS512') # (g1, g2) g1 = input("g1: ") g1 = G.random(G1) if g1 == "" else G.deserialize(base64.b64decode(g1)) g2 = input("g2: ") g2 = G.random(G1) if g2 == "" else G.deserialize(base64.b64decode(g2)) g = (g1, g2) skey = G.random(ZR, 2) pkey = product(g_**a for g_, a in zip(g, skey)) pk = {"g": g, "pk": pkey} sk = {"g": g, "pk": pkey, "sk": skey} print("Generated credentials: {}".format( json.dumps(b64encode(sk), indent=4))) with open("cred.pk", "w+") as f: f.write(jencode(pk))
async def main(): # group setup and secret generation setup_start_time = time.time() global grp, ps, pk, secret, messages, sig, gamma, expires_in grp = PairingGroup('MNT224') messages = ['random_string'] secret = grp.hash(messages[0], ZR) ps = PS01(grp) pk = None setup_end_time = time.time() print("Setup total time: {0}s ".format(str(setup_end_time - setup_start_time))) # communicate with IdP print("****Communication with IdP****") idp_start_time = time.time() async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session: # first rt: get idp public key pk = await fetch(session, 'https://' + idp_ip + ':6000/') for key, value in pk.items(): if key in {'X2', 'g1', 'g2'}: pk[key] = grp.deserialize(value.encode()) else: pk[key] = [grp.deserialize(item.encode()) for item in value] if debug: print('idp pk:=', pk) # generate t and commitment start = time.time() t = grp.random(ZR) gt = (pk['g1'] ** t) commitment_secret = pk['Y1'][0] ** secret commitment = gt * commitment_secret if debug: print("commitment: ", commitment) end = time.time() print("CRED.PrepareBlindSign over {0} attributes time elapse: {1}s ".format(str(1), str(end - start))) # second rt: schnorr proof start = time.time() na = grp.random() a = pk['Y1'][0] ** na # schnorr NIZK: generate nb m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) r = na + nb * secret end = time.time() print("NIZK Schnorr Prover (User-IdP) over {0} element time elapse: {1}s ".format(str(1), str(end - start))) json_param = {'g_t': grp.serialize(gt).decode(), 'commitment_secret': grp.serialize(commitment_secret).decode(), 'a': grp.serialize(a).decode(), 'r': grp.serialize(r).decode()} average_time = 0 counter = 0 for i in range(0, 20): time1 = time.time() print(time.time()) json_rep = await fetch(session, 'https://' + idp_ip + ':6000/token', json_param) time2 = time.time() counter += 1 average_time += (time2 - time1) if 1/running_frequency - (time2 - time1) > 0: time.sleep(1/running_frequency - (time2 - time1)) print("average time per credential: ", str(average_time/counter)) idp_end_time = time.time() print("IdP total time: {0}s ".format(str(idp_end_time - idp_start_time)))
def app_main(): app = Flask(__name__, template_folder="templates") # parameters grp = PairingGroup('MNT224') ps = PS01(grp) # get idp public key pk = requests.get('https://' + idp_ip + ':6000/', verify=False).json() for key, value in pk.items(): if key in {'X2', 'g1', 'g2'}: pk[key] = grp.deserialize(value.encode()) else: pk[key] = [grp.deserialize(item.encode()) for item in value] if debug: print('idp pk:=', pk, flush=True) @app.route('/authenticate', methods=['GET', 'POST']) def authenticate(): # For the purpose of throughput measurement global user_auth_start_tp, counter if user_auth_start_tp == 0: user_auth_start_tp = time.time() print("RP Throughput: {0}s for {1} authentications".format( str(time.time() - user_auth_start_tp), str(counter))) counter += 1 print("Authentication request content length: ", request.content_length, flush=True) authenticate_request = request.get_json() # first finish the Schnorr verification hidden_element_num = int( authenticate_request['additional_element_num']) schnorr_pas = [ grp.deserialize(authenticate_request['g2_t'].encode()), grp.deserialize(authenticate_request['user_id'].encode()), grp.deserialize( authenticate_request['commitment_secret'].encode()), grp.deserialize(authenticate_request['commitment_gamma'].encode()), ] cipher_1 = grp.deserialize( authenticate_request['el_cipher_1'].encode()) cipher_2 = grp.deserialize( authenticate_request['el_cipher_2'].encode()) schnorr_as = [ grp.deserialize(authenticate_request['g2_t_a'].encode()), grp.deserialize(authenticate_request['user_id_a'].encode()), grp.deserialize( authenticate_request['commitment_secret_a'].encode()), grp.deserialize( authenticate_request['commitment_gamma_a'].encode()), ] cipher_1_a = grp.deserialize( authenticate_request['el_cipher_1_a'].encode()) cipher_2_a = grp.deserialize( authenticate_request['el_cipher_2_a'].encode()) schnorr_rs = [ grp.deserialize(authenticate_request['g2_t_r'].encode()), grp.deserialize(authenticate_request['user_id_r'].encode()), grp.deserialize( authenticate_request['commitment_secret_r'].encode()), grp.deserialize( authenticate_request['commitment_gamma_r'].encode()), ] cipher_1_r = grp.deserialize( authenticate_request['el_cipher_1_r'].encode()) cipher_2_r1 = grp.deserialize( authenticate_request['el_cipher_2_r1'].encode()) cipher_2_r2 = grp.deserialize( authenticate_request['el_cipher_2_r2'].encode()) schnorr_bases = [ pk['g2'], grp.hash('domain', G1), pk['Y2'][0], pk['Y2'][1], ] for i in range(hidden_element_num): schnorr_pas.append( grp.deserialize(authenticate_request[str(i) + 'th-pa'].encode())) schnorr_as.append( grp.deserialize(authenticate_request[str(i) + 'th-a'].encode())) schnorr_rs.append( grp.deserialize(authenticate_request[str(i) + 'th-r'].encode())) schnorr_bases.append(pk['Y2'][3 + i]) if debug: print("Schnorr PAs: "******"Schnorr As: ") print(schnorr_as) print("Schnorr Rs: ") print(schnorr_rs) # generate nbs start = time.time() m = hashlib.sha256() m.update(grp.serialize(schnorr_bases[0])) m.update(grp.serialize(schnorr_as[0])) m.update(grp.serialize(schnorr_pas[0])) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) schnorr_result = True for i in range(len(schnorr_pas)): lh = schnorr_bases[i]**schnorr_rs[i] rh = schnorr_as[i] * schnorr_pas[i]**nb schnorr_result = (schnorr_result and (lh == rh)) if debug and schnorr_result is False: print(i) # ciphertext 1 lh = grp.hash('random2', G1)**cipher_1_r rh = cipher_1_a * cipher_1**nb schnorr_result = (schnorr_result and (lh == rh)) # ciphertext 2 authority_pk = pk['g1']**grp.hash('authority') lh = (authority_pk**cipher_2_r1) * (grp.hash('random1', G1)** cipher_2_r2) rh = cipher_2_a * cipher_2**nb schnorr_result = (schnorr_result and (lh == rh)) end = time.time() print( "NIZK Schnorr Verifier (User-RP) over {0} element time elapse: {1}s " .format(str(len(schnorr_pas) + 2), str(end - start)), flush=True) if schnorr_result is True: if debug: print('schnorr checking succeeds', flush=True) else: if debug: print('schnorr checking fails', flush=True) return jsonify(status='failure') # PS signature proof start = time.time() rand_sig = [ grp.deserialize(item.encode()) for item in authenticate_request['rand_sig'] ] expires_in = authenticate_request['expires_in'] s1p, s2p = rand_sig messages = [ schnorr_pas[2], schnorr_pas[3], pk['Y2'][2]**grp.hash(expires_in, ZR) ] for i in range(hidden_element_num): messages.append(schnorr_pas[4 + i]) for i in range(3 + hidden_element_num, len(pk['Y2'])): messages.append(pk['Y2'][i]**grp.hash(str(i - 3) + 'th-element')) lh = pair(s1p, pk['X2']) lh = lh * ps.product([pair(s1p, m) for m in messages]) lh = lh * pair(s1p, schnorr_pas[0]) # lh_2 = pk['X2'] # for m in messages: # lh_2 = lh_2 * m # lh_2 = lh_2 * schnorr_pas[0] # lh = pair(s1p, lh_2) rh = pair(s2p, pk['g2']) end = time.time() print("Cred.Verify over {0} element time elapse: {1}s ".format( str(len(pk['Y2'])), str(end - start)), flush=True) if lh == rh: if debug: print('PS proof check succeeds', flush=True) return jsonify(status='success') else: if debug: print('PS proof check fails', flush=True) print('lh:=', lh, flush=True) print('rh:=', rh, flush=True) return jsonify(status='failure') app.run(host='0.0.0.0', port=6001, ssl_context='adhoc')
import hashlib from . import AES from .cpabe import PairingGroup, CPabe_sheme from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, GT, pair, serialize, deserialize attrs = {'医疗部': 'A', '门诊部': 'B', '住院部': 'C', '医务部': 'D', '护理部': 'E', '住院部': 'F', '院长': 'G'} # policy = "G or (B and C)" groupObj = PairingGroup('SS512') cpabe = CPabe_sheme(groupObj) msk = dict() with open('login/msk.txt', 'r') as f: for line in f.readlines(): line = line.strip().split() msk[line[0]] = groupObj.deserialize(bytes(line[1], encoding='utf-8')) pk = dict() with open('login/pk.txt', 'r') as f: for line in f.readlines(): line = line.strip() i = str.find(line, ' ') pk[line[:i]] = groupObj.deserialize( bytes(line[i+1:], encoding='utf-8')) def genePolicy(attr): attr = attr.split(',') poli = [] for att in attr: poli.append(attrs[att]) pol = ' or '.join(poli) # print(pol)
a = pair(a0, a1) H2 = lambda x: group.hash(str(x) + str(group.random(ZR)), ZR) # b0 = group.random(G1) # b1 = group.random(G2) # b = pair(b0,b1) b = group.random() a_s = group.serialize(a) b_s = group.serialize(b) c_s = xor(H2(a_s), b_s) a group.deserialize(xor(c_s, b_s)) # ==a assert a == group.deserialize(xor(c_s, b_s)), 'nope' # c = group.deserialize(c_s) # wrong # c1 = group.deserialize(xor(a_s,b_s)) # wrong # c_s1 = group.serialize(c) # wrong # c1_s = group.serialize(c1) # wrong # group.deserialize(xor(c1_s,b_s)) # wrong # group.deserialize(xor(c_s1,b_s)) # wrong # for i in var: # print i # for i in xrange(0,len(var)): # for j in xrange(i+1,len(var)):
def basic_test(number_of_basic_trials, number_of_attribute_authorities, number_of_attributes): """ :param number_of_basic_trials: Number of times to run the function and calculate the average :param number_of_attribute_authorities: Number of attribute authorities :param number_of_attributes: Number of attributes :return: """ # scheme setup group_object = PairingGroup('SS512') omacpabe = OMACPABE(group_object) GPP, GMK = omacpabe.abenc_casetup() users = {} # public user data authorities = {} # authority data dictionary attribute_authorities = [] # list of attribute authorities authorities_and_attributes = { } # dictionary of attributes and the matching authorities attribute_master_list = [] # master list of all possible attributes seed_length = int(number_of_attributes / number_of_attribute_authorities) seed_attributes = [ i + 1 for i in range(seed_length) ] # list comprehension to generate number list to aid in generation of attributes for i in range(number_of_attribute_authorities): attribute_authority_name = "AUTHORITY" + str( i + 1) # create attribute authorities attribute_authorities.append( attribute_authority_name) # add new authorities to overall list current_attribute_authority_attributes = [ ] # attributes for current authority for seed_attr in seed_attributes: authority_attribute = attribute_authority_name + "." + str( seed_attr) # create attribute current_attribute_authority_attributes.append( authority_attribute ) # add attribute to authority attribute list authorities_and_attributes[ attribute_authority_name] = current_attribute_authority_attributes # add authority as key and its attributes as value to the dictionary attribute_master_list += current_attribute_authority_attributes # add attributes created to master attribute list for authority in authorities_and_attributes.keys(): omacpabe.abenc_aareg(GPP, authority, authorities_and_attributes[authority], authorities) alice = { 'id': 'alice', 'authoritySecretKeys': {}, 'keys': None } # new user alice alice['keys'], users[alice['id']] = omacpabe.abenc_userreg(GPP) for authority in authorities.keys(): alice['authoritySecretKeys'][authority] = {} for attr in authorities_and_attributes[authority]: omacpabe.abenc_keygen(GPP, authorities[authority], attr, users[alice['id']], alice['authoritySecretKeys'][authority]) plain_text_secret_key_group_element = group_object.random(GT) # showing usage of serialize and deserialize for converting group elements bit_string_from_group_element = group_object.serialize( plain_text_secret_key_group_element) assert isinstance(bit_string_from_group_element, object) group_element_from_bit_string = group_object.deserialize( bit_string_from_group_element) assert group_element_from_bit_string == plain_text_secret_key_group_element, 'SERIALIZATION ERROR!' policy_string = gp(attribute_master_list, number_of_attributes) # generate policy # benchmarking encryption_times = [ ] # list to hold encryption times for multiple iterations for i in range(number_of_basic_trials): start_time = clock() ciphertexts = omacpabe.abenc_encrypt( GPP, policy_string, plain_text_secret_key_group_element, authorities) duration = clock() - start_time encryption_times.append(duration) # average_encryption_time = sum(encryption_times) / len(encryption_times) # print("average encryption time = ", average_encryption_time) token, partially_decrypted_ciphertext = omacpabe.abenc_generatetoken( GPP, ciphertexts, alice['authoritySecretKeys'], alice['keys'][0]) decryption_times = [ ] # list to hold decryption times for multiple iterations for i in range(number_of_basic_trials): start_time = clock() plaintext = omacpabe.abenc_decrypt(partially_decrypted_ciphertext, token, alice['keys']) duration = clock() - start_time decryption_times.append(duration) # average_decryption_time = sum(decryption_times) / len(decryption_times) # print("average decryption time = ", average_decryption_time) assert plain_text_secret_key_group_element == plaintext, 'FAILED DECRYPTION!' # print('SUCCESSFUL DECRYPTION') return encryption_times, decryption_times
async def main(): # group setup and secret generation setup_start_time = time.time() global grp, ps, pk, secret, messages, sig, gamma, expires_in grp = PairingGroup('MNT224') messages = ['random_string'] secret = grp.hash(messages[0], ZR) ps = PS01(grp) pk = None setup_end_time = time.time() print("Setup total time: {0}s ".format(str(setup_end_time - setup_start_time))) # communicate with IdP print("****Communication with IdP****") idp_start_time = time.time() async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session: # first rt: get idp public key pk = await fetch(session, 'https://' + idp_ip + ':6000/') for key, value in pk.items(): if key in {'X2', 'g1', 'g2'}: pk[key] = grp.deserialize(value.encode()) else: pk[key] = [grp.deserialize(item.encode()) for item in value] if debug: print('idp pk:=', pk) # generate t and commitment start = time.time() t = grp.random(ZR) gt = (pk['g1'] ** t) commitment_secret = pk['Y1'][0] ** secret commitment = gt * commitment_secret if debug: print("commitment: ", commitment) end = time.time() print("CRED.PrepareBlindSign over {0} attributes time elapse: {1}s ".format(str(1), str(end - start))) # second rt: schnorr proof start = time.time() na = grp.random() a = pk['Y1'][0] ** na # schnorr NIZK: generate nb m = hashlib.sha256() m.update(grp.serialize(pk['Y1'][0])) m.update(grp.serialize(a)) m.update(grp.serialize(pk['Y1'][0] ** secret)) m.update(b'userid') # replaced with real values nb = m.digest() nb = grp.hash(nb) r = na + nb * secret end = time.time() print("NIZK Schnorr Prover (User-IdP) over {0} element time elapse: {1}s ".format(str(1), str(end - start))) json_param = {'g_t': grp.serialize(gt).decode(), 'commitment_secret': grp.serialize(commitment_secret).decode(), 'a': grp.serialize(a).decode(), 'r': grp.serialize(r).decode()} json_rep = await fetch(session, 'https://' + idp_ip + ':6000/token', json_param) # parse the reply id_token = json_rep['id_token'] id_token = [grp.deserialize(item.encode()) for item in id_token] expires_in = json_rep['expires_in'] gamma = json_rep['gamma'] if debug: print('user id token:=', id_token) print('expires_in:=', expires_in) print('gamma:=', gamma) # unblind signature start = time.time() sig = ps.unblind_signature(t, id_token) end = time.time() print("CRED.Unblind time elapse: {0}s ".format(str(end - start))) messages.append(gamma) messages.append(expires_in) for i in range(3, len(pk['Y1'])): messages.append(str(i - 3) + 'th-element') if debug: print(messages) result = ps.verify(pk, sig, *messages) assert result, 'invalid signature' print('Successfully verification') gamma = grp.hash(gamma, ZR) idp_end_time = time.time() print("IdP total time: {0}s ".format(str(idp_end_time - idp_start_time))) # communicate with RP rp_start_time = time.time() for i in range(running_times): print("\n****Communication with RP****") async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(ssl=False)) as session: json_param = generate_params_for_RP() json_rep = await fetch(session, 'https://' + rp_ip + ':6001/authenticate', json_param) if debug: print(json_rep) rp_end_time = time.time() print("RP total time: {0}s ".format(str(rp_end_time - rp_start_time)))
from charm.toolbox.symcrypto import SymmetricCryptoAbstraction, AuthenticatedCryptoAbstraction, MessageAuthenticator from charm.toolbox.pairinggroup import PairingGroup, GT from charm.core.math.pairing import hashPair as sha1 debug = 0 groupObj = PairingGroup('SS512') try: f = open('/Users/cirnotxm/down/pk', 'rb') pk = f.read() a = SymmetricCryptoAbstraction(sha1(groupObj.deserialize(pk))) ffe = open('/Users/cirnotxm/down/jiami', 'rb') ct = ffe.read() de = a.decrypt(ct) fpk = open('/Users/cirnotxm/down/jiemi.dmg', 'wb') fpk.write(de) finally: ffe.close() f.close() fpk.close()
from charm.toolbox.pairinggroup import PairingGroup, ZR, G1, G2, pair # need to install Charm # requires PKG to use symmetric mapping, i.e. (G1, G1)->G2 group = PairingGroup('SS512') # master secret key s = group.deserialize('0:RBthuauSAhEEpxuyKLh+jzsRYv0=') def pairing_key_negotiation(id1, id2): public_key_1 = group.hash(id1, G1) public_key_2 = group.hash(id2, G1) private_key_1 = s * public_key_1 key = pair(private_key_1, public_key_2) return group.serialize(key)
def main(): groupObj = PairingGroup('SS512') dabe = Dabe(groupObj) # Waste one random instance of the PRG sequence. #print(groupObj.random()) hyb_abema = HybridABEncMA(dabe, groupObj) #Setup global parameters for all new authorities gp = hyb_abema.setup() #Instantiate a few authorities #Attribute names must be globally unique. HybridABEncMA #Two authorities may not issue keys for the same attribute. #Otherwise, the decryption algorithm will not know which private key to use jhu_attributes = ['jhu.professor', 'jhu.staff', 'jhu.student'] jhmi_attributes = [ 'jhmi.doctor', 'jhmi.nurse', 'jhmi.staff', 'jhmi.researcher' ] (jhuSK, jhuPK) = hyb_abema.authsetup(gp, jhu_attributes) (jhmiSK, jhmiPK) = hyb_abema.authsetup(gp, jhmi_attributes) allAuthPK = {} allAuthPK.update(jhuPK) allAuthPK.update(jhmiPK) # Generate new keys with same parameters, such that we demonstrate whether ciphertexts encrypted with these new keys # cannot be decrypted with old user's keys. (jhuSKnew, jhuPKnew) = hyb_abema.authsetup(gp, jhu_attributes) (jhmiSKnew, jhmiPKnew) = hyb_abema.authsetup(gp, jhmi_attributes) allAuthPKnew = {} allAuthPKnew.update(jhuPKnew) allAuthPKnew.update(jhmiPKnew) #Setup a user with a few keys bobs_gid = "20110615 [email protected] cryptokey" K = {} Ksame = { } # A key to allow us to demonstrate that the generation creates the same keys a second time; keygen is deterministic. print("JhuSK: ", jhuSK) print("jhmiSK: ", jhmiSK) hyb_abema.keygen(gp, jhuSK, 'jhu.professor', bobs_gid, K) #hyb_abema.keygen(gp, jhuSK,jhu_attributes, bobs_gid, K) # Does not work; only one attribute at a time. hyb_abema.keygen(gp, jhmiSK, 'jhmi.researcher', bobs_gid, K) hyb_abema.keygen(gp, jhuSK, 'jhu.professor', bobs_gid, Ksame) hyb_abema.keygen(gp, jhmiSK, 'jhmi.researcher', bobs_gid, Ksame) # Let's set up an "alternate" set of ABE secret keys for bob, generated with the same attributes but with a different # gp. Let's see whether decryption is successful using different combinations of gp. gpAlternative = hyb_abema.setup() Kalternative = {} hyb_abema.keygen(gpAlternative, jhuSK, 'jhu.professor', bobs_gid, Kalternative) hyb_abema.keygen(gpAlternative, jhmiSK, 'jhmi.researcher', bobs_gid, Kalternative) # I will also recreate a set of keys for bob using original gp, such that we check whether these keys decrypt old ciphertexts using same ABE encryption keys and gp. Knew = {} hyb_abema.keygen(gp, jhuSK, 'jhu.professor', bobs_gid, Knew) hyb_abema.keygen(gp, jhmiSK, 'jhmi.researcher', bobs_gid, Knew) msg = b'Hello World, I am a sensitive record!' size = len(msg) policy_str = "(jhmi.doctor OR (jhmi.researcher AND jhu.professor))" #ct = hyb_abema.encrypt(allAuthPK, gp, msg, policy_str) #ctAlternative = hyb_abema.encrypt(allAuthPK, gpAlternative, msg, policy_str) #ctNewAuthorityKeys = hyb_abema.encrypt(allAuthPKnew, gp, msg, policy_str) ct = hyb_abema.encrypt(gp, allAuthPK, msg, policy_str) ctAlternative = hyb_abema.encrypt(gpAlternative, allAuthPK, msg, policy_str) ctNewAuthorityKeys = hyb_abema.encrypt(gp, allAuthPKnew, msg, policy_str) if debug: print("Ciphertext") print("c1 =>", ct['c1']) print("c2 =>", ct['c2']) print("\n\nUser secret key K:") print(K) print("\n\nUser secret key Ksame:") print(Ksame) print("\n\nPublic keys:") print(allAuthPK) print("\n\njhuSK key:") print(jhuSK) print(type(jhuSK['JHU.STUDENT']['alpha_i'])) print(type(jhuSK['JHU.STUDENT']['y_i'])) K_json = json.dumps(K, cls=jsonhelper.KeyEncoder, pairingCurve=groupObj.param) print("\n\nJSON representation of K:\n", K_json) K_fromJson = json.loads(K_json, cls=jsonhelper.KeyDecoder) print("\n\nDecoding K from JSON:\n", K_fromJson) #json.dumps(K) # Let's extract pieces of the secret key and attempt to serialize such that JSON can manipulate them. # Use Charm's pairinggroup.PairingGroup serialize and deserialize methods (which are coded in C). print("\n\nOne secret key:") print(K['JHMI.RESEARCHER']['k']) print(K['JHMI.RESEARCHER']['k'].__class__.__name__) #print(K['JHMI.RESEARCHER']['k'].__class__.__dict__) #print(charm.core.math.pairing.ElementType.__name__) print( isinstance(type(K['JHMI.RESEARCHER']['k']), charm.core.math.pairing.__class__)) # Copy the object to a variable. obj = K['JHMI.RESEARCHER']['k'] print(K['JHMI.RESEARCHER']['k'].__class__.__name__ == 'Element') # Test working with a variable. print(obj.__class__.__name__ == 'Element') print(K['JHMI.RESEARCHER']['k']) print("Serialized:") serialized_key = groupObj.serialize(K['JHMI.RESEARCHER']['k']).decode() print(serialized_key) print(type(serialized_key)) #print(groupObj.Pairing) print("Deserialized:") print(groupObj.deserialize(serialized_key.encode())) # Let's try deserializing with another groupObj, but with same parameter. groupObjOther = PairingGroup(groupObj.param) print(groupObjOther.deserialize(serialized_key.encode())) print("Random 1: ", groupObj.random()) print("Random 2: ", groupObj.random()) print("Random 3: ", groupObj.random()) try: print( "Decrypting message using gp for both ciphertext and secret keys.") orig_msg = hyb_abema.decrypt(gp, K, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for ciphertext and gpAlternative (Kalternative) for secret keys." ) orig_msg = hyb_abema.decrypt(gp, Kalternative, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gpAlternative for ciphertext and gp for secret keys." ) orig_msg = hyb_abema.decrypt(gp, K, ctAlternative) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gpAlternative for ciphertext and gpAlternative (Kalternative) for secret keys." ) orig_msg = hyb_abema.decrypt(gp, Kalternative, ctAlternative) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gpAlternative for ciphertext and gpAlternative (Kalternative) for secret keys, and gpAlternative for decrypt method." ) print("Note that authority's ABE secret keys were generated with gp.") orig_msg = hyb_abema.decrypt(gpAlternative, Kalternative, ctAlternative) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for both ciphertext and secret keys, but gpAlternative for decrypt method." ) orig_msg = hyb_abema.decrypt(gpAlternative, K, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for ciphertext and Knew for secret keys (generated with gp)." ) orig_msg = hyb_abema.decrypt(gp, Knew, ct) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e) try: print( "Decrypting message using gp for both ciphertext and secret keys, but a new ciphertext generated with new authority's keys with same attributes." ) orig_msg = hyb_abema.decrypt(gp, K, ctNewAuthorityKeys) if debug: print("Result =>", orig_msg) assert orig_msg == msg, "Failed Decryption!!!" if debug: print("Successful Decryption!!!") except Exception as e: print(e)