def test_bilinear_math(): from honeybadgermpc.betterpairing import ZR, G1, G2, GT, pair a = G1.rand() b = G2.rand() c = pair(a, b) i = ZR.random() assert pair(a**i, b) == c**i assert pair(a, b**i) == c**i assert pair(a**i, b**i) == c**(i**2) a.preprocess(8) b.preprocess(3) c.preprocess(5) i = ZR.random() assert pair(a**i, b) == c**i assert pair(a, b**i) == c**i assert pair(a**i, b**i) == c**(i**2) a **= ZR.random() b **= ZR.random() c **= ZR.random() a2 = G1.rand() assert a / a2 == a * a2**-1 b2 = G2.rand() assert b / b2 == b * b2**-1 c2 = GT.rand() assert c / c2 == c * c2**-1 assert (a**-1)**-1 == a assert (b**-1)**-1 == b assert (c**-1)**-1 == c
def _get_dealer_msg(self, values, n): # Notice we currently required the number of values shared to be divisible by t+1. secret_count = len(values) redundant_poly_count = secret_count // (self.t + 1) * (n - (self.t + 1)) r = ZR.random() phis = [ self.poly.random(self.t, values[k]) for k in range(secret_count) ] psis = [] orig_poly_commitments = [ self.poly_commit.commit(phis[k], r) for k in range(secret_count) ] for batch_idx in range(secret_count // (self.t + 1)): base_idx = batch_idx * (self.t + 1) known_polys = [[i + 1, phis[base_idx + i]] for i in range(self.t + 1)] psis.extend([ poly_interpolate_at_x(self.poly, known_polys, i + 1) for i in range(self.t + 1, self.n) ]) redundant_poly_commitments = [ self.poly_commit.commit(psis[k], r) for k in range(redundant_poly_count) ] ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) dispersal_msg_list = [None] * n orig_poly_witnesses = [ self.poly_commit.double_batch_create_witness( phis[i::(self.t + 1)], r) for i in range(self.t + 1) ] redundant_poly_witnesses = [ self.poly_commit.double_batch_create_witness( psis[i::(n - (self.t + 1))], r) for i in range(n - (self.t + 1)) ] fault_i = randint(1, n - 1) # fault_i = 4 fault_k = randint(1, secret_count - 1) for i in range(n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) orig_shares = [phis[k](i + 1) for k in range(secret_count)] if i == fault_i: orig_shares[fault_k] = ZR.random() # redundant_shares = [psis[k](i + 1) for k in range(redundant_poly_count)] # Redundant shares are not required to send. z = (orig_shares, [orig_poly_witnesses[j][i] for j in range(self.t + 1)], [ redundant_poly_witnesses[j][i] for j in range(n - (self.t + 1)) ]) zz = SymmetricCrypto.encrypt(str(shared_key).encode(), z) dispersal_msg_list[i] = zz return dumps((orig_poly_commitments, redundant_poly_commitments, ephemeral_public_key)), dispersal_msg_list
def _get_dealer_msg(self, values, n): # Sample B random degree-(t) polynomials of form φ(·) # such that each φ_i(0) = si and φ_i(j) is Pj’s share of si # The same as B (batch_size) fault_n = randint(1, n - 1) fault_k = randint(1, len(values) - 1) secret_count = len(values) phi = [None] * secret_count commitments = [None] * secret_count # BatchPolyCommit # Cs <- BatchPolyCommit(SP,φ(·,k)) # TODO: Whether we should keep track of that or not r = ZR.random() for k in range(secret_count): phi[k] = self.poly.random(self.t, values[k]) commitments[k] = self.poly_commit.commit(phi[k], r) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) dispersal_msg_list = [None] * n witnesses = self.poly_commit.double_batch_create_witness(phi, r) for i in range(n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) phis_i = [phi[k](i + 1) for k in range(secret_count)] if i == fault_n: phis_i[fault_k] = ZR.random() z = (phis_i, witnesses[i]) zz = SymmetricCrypto.encrypt(str(shared_key).encode(), z) dispersal_msg_list[i] = zz dispersal_msg_list[i] = zz return dumps( (commitments, ephemeral_public_key)), dispersal_msg_list
def _get_dealer_msg(self, values, n): fault_n = randint(1, n - 1) secret_size = len(values) phi = [None] * secret_size commitments = [None] * secret_size aux_poly = [None] * secret_size for k in range(secret_size): phi[k] = self.poly.random(self.t, values[k]) commitments[k], aux_poly[k] = self.poly_commit.commit(phi[k]) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) dispersal_msg_list = [None] * n for i in range(n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) z = [None] * secret_size for k in range(secret_size): witness = self.poly_commit.create_witness( phi[k], aux_poly[k], i + 1) if i == fault_n: z[k] = (ZR.random(), ZR.random(), witness) else: z[k] = (phi[k](i + 1), aux_poly[k](i + 1), witness) zz = SymmetricCrypto.encrypt(str(shared_key).encode(), z) dispersal_msg_list[i] = zz return dumps( (commitments, ephemeral_public_key)), dispersal_msg_list
def test_benchmark_create_witness(benchmark, t): alpha = ZR.random() g = G1.rand() h = G1.rand() crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) pc = PolyCommitConst(crs) phi = polynomials_over(ZR).random(t) c, phi_hat = pc.commit(phi) pc.preprocess_prover(10) i = ZR.random() benchmark(pc.create_witness, phi, phi_hat, i)
def get_avss_params(n, t): g, h = G1.rand(), G1.rand() public_keys, private_keys = [None] * n, [None] * n for i in range(n): private_keys[i] = ZR.random(0) public_keys[i] = pow(g, private_keys[i]) return g, h, public_keys, private_keys
async def _run(peers, n, t, my_id, batch_size): g, h, pks, sks = get_avss_params(n + 1, t) async with ProcessProgramRunner(peers, n + 1, t, my_id) as runner: send, recv = runner.get_send_recv("HBAVSS_BATCH") crs = gen_pc_const_crs(t, g=g, h=h) values = None dealer_id = n if my_id == dealer_id: # Dealer values = [ZR.random(0)] * batch_size logger.info("Starting DEALER") logger.info(f"Dealer timestamp: {time.time()}") else: logger.info("Starting RECIPIENT: %d", my_id) with HbAvssBatch(pks, sks[my_id], crs, n, t, my_id, send, recv) as hbavss: begin_time = time.time() if my_id != dealer_id: hbavss_task = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id, values=values, client_mode=True)) await hbavss.output_queue.get() end_time = time.time() logger.info(f"Recipient time: {(end_time - begin_time)}") hbavss_task.cancel() else: await hbavss.avss(0, dealer_id=dealer_id, values=values, client_mode=True) end_time = time.time() logger.info(f"Dealer time: {(end_time - begin_time)}")
def test_hbacss0_pcl_max_faulty_shares(benchmark_router, benchmark, batch_multiple, t): from pypairing import G1, ZR loop = asyncio.get_event_loop() n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) values = [ZR.random()] * batch_multiple * (t + 1) crs = [g] params = (t, n, g, h, pks, sks, crs, values) def _prog(): loop.run_until_complete( hbacss0_pcl_max_faulty_shares(benchmark_router, params)) benchmark(_prog) # # main function to be used with kernprof # if __name__ == "__main__": # from pypairing import G1, ZR # def benchmark_router(n): # router = SimpleRouter(n) # return router.sends, router.recvs, router.broadcasts # # # loop = asyncio.get_event_loop() # t = 33 # batch_multiple = 11 # n = 3 * t + 1 # g, h, pks, sks = get_avss_params_pyp(n, t) # values = [ZR.random()] * batch_multiple * (t + 1) # crs = [g] # params = (t, n, g, h, pks, sks, crs, values) # # loop.run_until_complete(hbacss2_pcl_all_correct(benchmark_router, params)) # loop.run_until_complete(hbacss1_pcl_max_faulty_shares(benchmark_router, params))
def test_benchmark_create_witness(benchmark, t): g = G1.rand() h = G1.rand() pc = PolyCommitLin([g, h]) phi_hat = polynomials_over(ZR).random(t) i = ZR.random() benchmark(pc.create_witness, phi_hat, i)
async def test_hbavss_light_client_mode(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n + 1) crs = [g, h] value = ZR.random() avss_tasks = [None] * (n + 1) hbavss_list = [None] * n dealer_id = n with ExitStack() as stack: client_hbavss = HbAvssLight(pks, None, crs, n, t, dealer_id, sends[dealer_id], recvs[dealer_id]) stack.enter_context(client_hbavss) avss_tasks[n] = asyncio.create_task( client_hbavss.avss(0, value=value, client_mode=True)) for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id, client_mode=True)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) assert polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), shares)) == value
def get_avss_params(n, t, my_id): g, h = G1.rand(seed=[0, 0, 0, 1]), G1.rand(seed=[0, 0, 0, 2]) public_keys, private_keys = [None] * n, [None] * n for i in range(n): private_keys[i] = ZR.random(seed=17 + i) public_keys[i] = pow(g, private_keys[i]) return g, h, public_keys, private_keys[my_id]
def _get_dealer_msg(self, value): if type(value) in (list, tuple): valuelist = value else: valuelist = [value] philist, commitlist, auxlist = [], [], [] fault_i = randint(0, self.n - 1) for val in valuelist: phi = self.poly.random(self.t, val) philist.append(phi) commitment, aux_poly = self.poly_commit.commit(phi) commitlist.append(commitment) auxlist.append(aux_poly) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) z = [None] * self.n for i in range(self.n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) shares, witnesses = [], [] for phi in philist: shares.append(phi(i + 1)) for aux in auxlist: witnesses.append( self.poly_commit.create_witness(aux, i + 1)) if i == fault_i: shares[20] = ZR.random() z[i] = SymmetricCrypto.encrypt( str(shared_key).encode(), (shares, witnesses)) return dumps((commitlist, ephemeral_public_key, z))
def get_avss_params_pyp(n, t): from pypairing import G1, ZR g, h = G1.rand(), G1.rand() public_keys, private_keys = [None] * n, [None] * n for i in range(n): private_keys[i] = ZR.random() public_keys[i] = pow(g, private_keys[i]) return g, h, public_keys, private_keys
def test_benchmark_commit(benchmark, t): alpha = ZR.random() g = G1.rand() h = G1.rand() crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) pc = PolyCommitConst(crs) phi = polynomials_over(ZR).random(t) benchmark(pc.commit, phi)
def test_zr_math(): from honeybadgermpc.betterpairing import ZR assert ZR("2")**3 == 8 assert ZR(200) / 10 == ZR(20) assert ZR(14) + 4 == 18 assert ZR("0xa") - ZR(4) == 6 a = ZR.random() assert a**-3 * a**-5 == a**-8 assert (a**-1) * a == a**0 assert a**0 == 1
def test_pc_const(): t = 3 alpha = ZR.random() g = G1.rand() h = G1.rand() crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) pc = PolyCommitConst(crs) phi = polynomials_over(ZR).random(t) c, phi_hat = pc.commit(phi) witness = pc.create_witness(phi, phi_hat, 3) assert c == g**phi(alpha) * h**phi_hat(alpha) assert pc.verify_eval(c, 3, phi(3), phi_hat(3), witness) assert not pc.verify_eval(c, 4, phi(3), phi_hat(3), witness)
async def test_hbacss1(test_router): from pypairing import G1, ZR #from honeybadgermpc.betterpairing import G1, ZR t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) #g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = gen_pc_const_dl_crs(t, g=g) pc = PolyCommitConstDL(crs) values = [ZR.random()] * 2 * (t + 1) avss_tasks = [None] * n dealer_id = randint(0, n - 1) shares = [None] * n with ExitStack() as stack: hbavss_list = [None] * n for i in range(n): hbavss = Hbacss1(pks, sks[i], crs, n, t, i, sends[i], recvs[i], pc=pc) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
def test_hbacss2_pcl_all_correct(benchmark_router, benchmark, t): from pypairing import G1, ZR loop = asyncio.get_event_loop() n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) values = [ZR.random()] * 6 * (t + 1) * (t + 1) crs = [g] params = (t, n, g, h, pks, sks, crs, values) def _prog(): loop.run_until_complete( hbacss2_pcl_all_correct(benchmark_router, params)) benchmark(_prog)
def test_serialization(): from honeybadgermpc.betterpairing import ZR, G1, G2, GT a = ZR.random() b = G1.rand() c = G2.rand() d = GT.rand() assert a == ZR(a.__getstate__()) # assert b == G1(b.__getstate__()) assert c == G2(c.__getstate__()) assert d == GT(d.__getstate__()) bb = G1() bb.__setstate__(b.__getstate__()) assert bb == b
def test_hbacss1_pcl_max_faulty_shares(benchmark_router, benchmark, batch_multiple, t): from pypairing import G1, ZR loop = asyncio.get_event_loop() n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) values = [ZR.random()] * batch_multiple * (t + 1) crs = [g] params = (t, n, g, h, pks, sks, crs, values) def _prog(): loop.run_until_complete( hbacss1_pcl_max_faulty_shares(benchmark_router, params)) benchmark(_prog)
def _get_dealer_msg(self, value): fault_i = randint(0, self.n - 1) phi = self.poly.random(self.t, value) commitment, aux_poly = self.poly_commit.commit(phi) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) z = [None] * self.n for i in range(self.n): witness = self.poly_commit.create_witness(aux_poly, i + 1) shared_key = pow(self.public_keys[i], ephemeral_secret_key) if i == fault_i: z[i] = SymmetricCrypto.encrypt( str(ZR.random()).encode(), ([phi(i + 1)], [witness])) else: z[i] = SymmetricCrypto.encrypt( str(shared_key).encode(), ([phi(i + 1)], [witness])) return dumps(([commitment], ephemeral_public_key, z))
def test_hbacss2_actual_pcl_all_correct(benchmark_router, benchmark, batch_multiple, t): from pypairing import G1, ZR loop = asyncio.get_event_loop() n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) values = [ZR.random()] * batch_multiple * (t + 1) * (t + 1) crs = [g] params = (t, n, g, h, pks, sks, crs, values) pcl = PolyCommitLog(degree_max=t) pcl.preprocess_verifier(16) pcl.preprocess_prover(16) def _prog(): loop.run_until_complete( hbacss2_actual_pcl_all_correct(benchmark_router, params, pcl)) benchmark(_prog)
async def test_hbacss0(test_router): from pypairing import G1, ZR t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) sends, recvs, _ = test_router(n) # TODO: add configurable crs specifically for poly_commit_log crs = [g] values = [ZR.random()] * (t + 1) avss_tasks = [None] * n dealer_id = randint(0, n - 1) shares = [None] * n with ExitStack() as stack: hbavss_list = [None] * n for i in range(n): hbavss = Hbacss0(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
async def test_hbavss_batch_batch(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = gen_pc_const_crs(t, g=g, h=h) values = [ZR.random()] * 50 avss_tasks = [None] * n dealer_id = randint(0, n - 1) shares = [None] * n with ExitStack() as stack: hbavss_list = [None] * n for i in range(n): hbavss = HbAvssBatch(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
async def test_hbavss_light_share_open(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] value = ZR.random() avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, value=value)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) async def _prog(context): share_value = context.field(shares[context.myid]) assert await context.Share(share_value).open() == value program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) await program_runner.join()
async def test_hbavss_light_batch(test_router): t = 2 n = 3 * t + 1 batchsize = 50 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] values = [int(ZR.random()) for _ in range(batchsize)] avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, value=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) # shares = await asyncio.gather(*avss_tasks) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [[] for _ in range(batchsize)] for i in range(n): for j in range(batchsize): shares[j].append(outputs[i][2][j]) for j in range(batchsize): assert (polynomials_over(ZR).interpolate_at( zip(range(1, n + 1), shares[j])) == values[j])
def gen_pc_const_crs(t, alpha=None, g=None, h=None, ghat=None): nonetype = type(None) assert type(t) is int assert type(alpha) in (ZR, int, nonetype) assert type(g) in (G1, nonetype) assert type(h) in (G1, nonetype) assert type(ghat) in (G2, nonetype) if alpha is None: alpha = ZR.random(0) if g is None: g = G1.rand([0, 0, 0, 1]) if h is None: h = G1.rand([0, 0, 0, 1]) if ghat is None: ghat = G2.rand([0, 0, 0, 1]) (gs, ghats, hs) = ([], [], []) for i in range(t + 1): gs.append(g**(alpha**i)) for i in range(2): ghats.append(ghat**(alpha**i)) for i in range(t + 1): hs.append(h**(alpha**i)) crs = [gs, ghats, hs] return crs
async def test_hbavss_batch_encryption_fault(test_router): class BadDealer(HbAvssBatch): def _get_dealer_msg(self, values, n): fault_n = randint(1, n - 1) secret_size = len(values) phi = [None] * secret_size commitments = [None] * secret_size aux_poly = [None] * secret_size for k in range(secret_size): phi[k] = self.poly.random(self.t, values[k]) commitments[k], aux_poly[k] = self.poly_commit.commit(phi[k]) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) dispersal_msg_list = [None] * n for i in range(n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) z = [None] * secret_size for k in range(secret_size): witness = self.poly_commit.create_witness( phi[k], aux_poly[k], i + 1) if i == fault_n: z[k] = (ZR.random(), ZR.random(), witness) else: z[k] = (phi[k](i + 1), aux_poly[k](i + 1), witness) zz = SymmetricCrypto.encrypt(str(shared_key).encode(), z) dispersal_msg_list[i] = zz return dumps( (commitments, ephemeral_public_key)), dispersal_msg_list t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = gen_pc_const_crs(t, g=g, h=h) values = [ZR.random()] * (t + 1) avss_tasks = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: hbavss_list = [] for i in range(n): if i == dealer_id: hbavss = BadDealer(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) else: hbavss = HbAvssBatch(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list.append(hbavss) stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) avss_tasks[i].add_done_callback(print_exception_callback) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
async def test_hbavss_light_batch_share_fault(test_router): class BadDealer(HbAvssLight): def _get_dealer_msg(self, value): if type(value) in (list, tuple): valuelist = value else: valuelist = [value] philist, commitlist, auxlist = [], [], [] fault_i = randint(0, self.n - 1) for val in valuelist: phi = self.poly.random(self.t, val) philist.append(phi) commitment, aux_poly = self.poly_commit.commit(phi) commitlist.append(commitment) auxlist.append(aux_poly) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) z = [None] * self.n for i in range(self.n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) shares, witnesses = [], [] for phi in philist: shares.append(phi(i + 1)) for aux in auxlist: witnesses.append( self.poly_commit.create_witness(aux, i + 1)) if i == fault_i: shares[20] = ZR.random() z[i] = SymmetricCrypto.encrypt( str(shared_key).encode(), (shares, witnesses)) return dumps((commitlist, ephemeral_public_key, z)) t = 2 n = 3 * t + 1 batchsize = 50 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] values = [int(ZR.random()) for _ in range(batchsize)] avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): if i == dealer_id: hbavss = BadDealer(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) else: hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, value=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) # shares = await asyncio.gather(*avss_tasks) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [[] for _ in range(batchsize)] for i in range(n): for j in range(batchsize): shares[j].append(outputs[i][2][j]) for j in range(batchsize): assert (polynomials_over(ZR).interpolate_at( zip(range(1, n + 1), shares[j])) == values[j])
async def test_hbavss_light_parallel_share_array_open(test_router): t = 2 n = 3 * t + 1 k = 4 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] values = [int(ZR.random()) for _ in range(k)] dealer_id = randint(0, n - 1) with ExitStack() as stack: avss_tasks = [None] * n hbavss_list = [None] * n for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: v, d = values, None else: v, d = None, dealer_id avss_tasks[i] = asyncio.create_task( hbavss.avss_parallel(0, k, v, d)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = [None] * k for j in range(k): outputs[j] = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() # Sort the outputs incase they're out of order round_outputs = [[[] for __ in range(n)] for _ in range(k)] for i in range(k): for j in range(n): round_outputs[outputs[i][j][1]][j] = outputs[i][j] shares = [[] for _ in range(n)] for i in range(k): round_output = round_outputs[i] for j in range(len(round_output)): shares[j].append(round_output[j][2]) async def _prog(context): share_values = list(map(context.field, shares[context.myid])) opened_shares = set(await context.ShareArray(share_values).open()) # The set of opened share should have exactly `k` values assert len(opened_shares) == k # All the values in the set of opened shares should be from the initial values for i in opened_shares: assert i.value in values program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) await program_runner.join()