def test_double_batch_pc_log_batch_differing_degrees(t): pc = PolyCommitLog() n = 2*t+1 phi1 = polynomials_over(ZR).random(t) phi2 = polynomials_over(ZR).random(t) r = ZR.random() c1 = pc.commit(phi1, r) c2 = pc.commit(phi2, r) witnesses = pc.double_batch_create_witness([phi1, phi2], r, n=n) assert pc.batch_verify_eval([c1,c2], 4, [phi1(4), phi2(4)], witnesses[3]) assert pc.batch_verify_eval([c1,c2], 4, [phi1(4), phi2(4)], witnesses[3], degree=t) assert not pc.batch_verify_eval([c1,c2], 4, [phi1(4), phi2(4)], witnesses[3], degree=t+1) assert not pc.batch_verify_eval([c1,c2], 4, [phi1(4), phi2(4)], witnesses[3], degree=t-1)
def test_double_batch_pc_log_batch_prove_and_verify(t): pc = PolyCommitLog() phi1 = polynomials_over(ZR).random(t) phi2 = polynomials_over(ZR).random(t) phi3 = polynomials_over(ZR).random(t) r = ZR.random() c1 = pc.commit(phi1, r) c2 = pc.commit(phi2, r) witnesses = pc.double_batch_create_witness([phi1, phi2], r) assert pc.batch_verify_eval([c1,c2], 4, [phi1(4), phi2(4)], witnesses[3]) assert not pc.batch_verify_eval([c1,c2], 3, [phi1(4), phi2(4)], witnesses[3]) assert not pc.batch_verify_eval([c1,c2], 4, [phi1(4), phi2(4)], witnesses[2]) assert not pc.batch_verify_eval([c1,c1], 4, [phi1(4), phi2(4)], witnesses[3])
async def test_hbavss_light_client_mode(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n + 1) crs = [g, h] value = ZR.random() avss_tasks = [None] * (n + 1) hbavss_list = [None] * n dealer_id = n with ExitStack() as stack: client_hbavss = HbAvssLight(pks, None, crs, n, t, dealer_id, sends[dealer_id], recvs[dealer_id]) stack.enter_context(client_hbavss) avss_tasks[n] = asyncio.create_task( client_hbavss.avss(0, value=value, client_mode=True)) for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id, client_mode=True)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) assert polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), shares)) == value
async def test_HbACSS1Recoverer(test_router): t = 2 n = 3 * t + 1 poly = polynomials_over(ZR) secrets = [ZR.random() for i in range(t + 1)] secretpolys = [poly.random(t, secrets[i]) for i in range(t + 1)] crs = gen_pc_const_dl_crs(t) pc = PolyCommitConstDL(crs) commits = [pc.commit(phi) for phi in secretpolys] #witnesses[i][j] should give the proof for party i's share of polynomial number j witnesses = [[pc.create_witness(phi, i) for phi in secretpolys] for i in range(1, n + 1)] shares = [[phi(i) for phi in secretpolys] for i in range(1, n + 1)] sends, recvs, _ = test_router(n) loop = asyncio.get_event_loop() players = [ HbACSS1Recoverer(crs, n, t, i, sends[i], recvs[i], shares[i], True, commits, witnesses[i], pc=pc) for i in range(n) ] playertasks = [loop.create_task(player._run()) for player in players] for task in playertasks: task.add_done_callback(print_exception_callback) #loop.run_forever() await asyncio.gather(*playertasks)
def test_pc_log_batch(t): pc = PolyCommitLog() phi = polynomials_over(ZR).random(t) r = ZR.random() c = pc.commit(phi, r) witnesses = pc.batch_create_witness(phi, r) assert pc.verify_eval(c, 4, phi(4), witnesses[3])
def __init__(self, public_keys, private_key, crs, n, t, my_id, send, recv, pc=None, field=ZR): # (# noqa: E501) self.public_keys, self.private_key = public_keys, private_key self.n, self.t, self.my_id = n, t, my_id self.g = crs[0] # Create a mechanism to split the `recv` channels based on `tag` self.subscribe_recv_task, self.subscribe_recv = subscribe_recv(recv) # Create a mechanism to split the `send` channels based on `tag` def _send(tag): return wrap_send(tag, send) self.get_send = _send # This is added to consume the share the moment it is generated. # This is especially helpful when running multiple AVSSes in parallel. self.output_queue = asyncio.Queue() self.field = field self.poly = polynomials_over(self.field) if pc is None: self.poly_commit = PolyCommitLin(crs, field=self.field) self.poly_commit.preprocess(5) else: self.poly_commit = pc
def test_benchmark_create_witness(benchmark, t): g = G1.rand() h = G1.rand() pc = PolyCommitLin([g, h]) phi_hat = polynomials_over(ZR).random(t) i = ZR.random() benchmark(pc.create_witness, phi_hat, i)
def test_benchmark_commit(benchmark, t): alpha = ZR.random() g = G1.rand() h = G1.rand() crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) pc = PolyCommitConst(crs) phi = polynomials_over(ZR).random(t) benchmark(pc.commit, phi)
def test_poly_commit(): poly_commit = PolyCommitLin([G1.rand(), G1.rand()]) degree = randint(10, 50) phi = polynomials_over(ZR).random(degree) cs, aux = poly_commit.commit(phi) i = randint(0, degree - 1) witness = poly_commit.create_witness(aux, i) assert poly_commit.verify_eval(cs, i, phi(i), witness)
def commit(self, phi): degree = len(phi.coeffs) - 1 phi_hat = polynomials_over(self.field).random(degree) cs = [ pow(self.g, phi.coeffs[i]) * pow(self.h, phi_hat.coeffs[i]) for i in range(degree + 1) ] return cs, phi_hat
def test_hbacss2_size_benchmark_batch_creation(benchmark, t): pc = PolyCommitLog(degree_max=t) pc.preprocess_prover(16) r = ZR.random() phis = [] for _ in range(t): phi_curr = polynomials_over(ZR).random(t) phis.append(phi_curr) benchmark(pc.double_batch_create_witness, phis, r)
def create_witness(self, phi, i): poly = polynomials_over(self.field) div = poly([-1 * i, 1]) psi = (phi - poly([phi(i)])) / div witness = G1.identity() j = 0 for item in self.gs[:-1]: witness *= item**psi.coeffs[j] j += 1 return witness
def test_pc_log(t): pc = PolyCommitLog() phi = polynomials_over(ZR).random(t) # ToDo: see if other polycommits return the commit randomness # rather than expecting it as arg r = ZR.random() c = pc.commit(phi, r) witness = pc.create_witness(phi, r, 3) assert pc.verify_eval(c, 3, phi(3), witness) assert not pc.verify_eval(c, 4, phi(3), witness) assert not pc.verify_eval(G1.rand(), 3, phi(3), witness)
def test_benchmark_create_witness(benchmark, t): alpha = ZR.random() g = G1.rand() h = G1.rand() crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) pc = PolyCommitConst(crs) phi = polynomials_over(ZR).random(t) c, phi_hat = pc.commit(phi) pc.preprocess_prover(10) i = ZR.random() benchmark(pc.create_witness, phi, phi_hat, i)
def __init__(self, ctx, field=Field, g=g, h=h): self.ctx = ctx self.field = field self.send = ctx.send self.recv = ctx.recv self.g = g self.h = h self.N = ctx.N self.t = ctx.t self.vss_id = 0 self.my_id = ctx.myid self.poly = polynomials_over(self.field)
def commit(self, phi): c = G1.one() phi_hat = polynomials_over(self.field).random(self.t) i = 0 for item in self.gs: c *= item**phi.coeffs[i] i += 1 i = 0 for item in self.hs: c *= item**phi_hat.coeffs[i] i += 1 # c should equal g **(phi(alpha)) h **(phi_hat(alpha)) return c, phi_hat
def test_pc_const(): t = 3 alpha = ZR.random() g = G1.rand() h = G1.rand() crs = gen_pc_const_crs(t, alpha=alpha, g=g, h=h) pc = PolyCommitConst(crs) phi = polynomials_over(ZR).random(t) c, phi_hat = pc.commit(phi) witness = pc.create_witness(phi, phi_hat, 3) assert c == g**phi(alpha) * h**phi_hat(alpha) assert pc.verify_eval(c, 3, phi(3), phi_hat(3), witness) assert not pc.verify_eval(c, 4, phi(3), phi_hat(3), witness)
async def test_hbacss1(test_router): from pypairing import G1, ZR #from honeybadgermpc.betterpairing import G1, ZR t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) #g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = gen_pc_const_dl_crs(t, g=g) pc = PolyCommitConstDL(crs) values = [ZR.random()] * 2 * (t + 1) avss_tasks = [None] * n dealer_id = randint(0, n - 1) shares = [None] * n with ExitStack() as stack: hbavss_list = [None] * n for i in range(n): hbavss = Hbacss1(pks, sks[i], crs, n, t, i, sends[i], recvs[i], pc=pc) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
async def _get_inputmask(self, idx): # Private reconstruct contract_concise = ConciseContract(self.contract) n = contract_concise.n() poly = polynomials_over(field) eval_point = EvalPoint(field, n, use_omega_powers=False) shares = [] for i in range(n): share = self.req_mask(i, idx) shares.append(share) shares = await asyncio.gather(*shares) shares = [(eval_point(i), share) for i, share in enumerate(shares)] mask = poly.interpolate_at(shares, 0) return mask
def gen_accumulator_tree(n): omegas = get_all_roots_of_unity(n) #tree is a list of lists, where tree[i][j] is the j'th leaf in the i'th row (bottom row is row 0) numlevels = ceil(log(n, 2)) + 1 tree = [2**i * [None] for i in reversed(range(numlevels))] poly = polynomials_over(ZR) for i in range(len(omegas)): tree[0][bit_reverse(i, numlevels - 1)] = poly([ZR(-1) * omegas[i], ZR(1)]) for k in range(1, numlevels): for i in range(len(tree[k])): tree[k][i] = tree[k - 1][2 * i] * tree[k - 1][2 * i + 1] return tree
def create_witness(self, phi, phi_hat, i): poly = polynomials_over(self.field) div = poly([-1 * i, 1]) psi = (phi - poly([phi(i)])) / div psi_hat = (phi_hat - poly([phi_hat(i)])) / div witness = G1.one() j = 0 for item in self.gs[:-1]: witness *= item**psi.coeffs[j] j += 1 j = 0 for item in self.hs[:-1]: witness *= item**psi_hat.coeffs[j] j += 1 return witness
async def robust_reconstruct(field_futures, field, n, t, point, degree): use_omega_powers = point.use_omega_powers enc = EncoderFactory.get( point, Algorithm.FFT if use_omega_powers else Algorithm.VANDERMONDE) dec = DecoderFactory.get( point, Algorithm.FFT if use_omega_powers else Algorithm.VANDERMONDE) robust_dec = RobustDecoderFactory.get(t, point, algorithm=Algorithm.GAO) incremental_decoder = IncrementalDecoder(enc, dec, robust_dec, degree, 1, t) async for (idx, d) in fetch_one(field_futures): incremental_decoder.add(idx, [d.value]) if incremental_decoder.done(): polys, errors = incremental_decoder.get_results() return polynomials_over(field)(polys[0]), errors return None, None
async def test_hbavss_batch_batch_gf(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = gen_pc_const_crs(t, g=g, h=h) field = GF(Subgroup.BLS12_381) values = [field.random() for _ in range(50)] avss_tasks = [None] * n dealer_id = randint(0, n - 1) shares = [None] * n with ExitStack() as stack: hbavss_list = [None] * n for i in range(n): hbavss = HbAvssBatch(pks, sks[i], crs, n, t, i, sends[i], recvs[i], field=field) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(field).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
def test_hbacss2_size_benchmark_batch_verify(benchmark, t): pc = PolyCommitLog(degree_max=t) pc.preprocess_verifier(16) phis = [] r = ZR.random() cs = [] for _ in range(t): phi_curr = polynomials_over(ZR).random(t) phis.append(phi_curr) c_curr = pc.commit(phi_curr, r) cs.append(c_curr) witnesses = pc.double_batch_create_witness(phis, r) i = 4 phis_at_4 = [] for j in range(len(phis)): phis_at_4.append(phis[j](i)) benchmark(pc.batch_verify_eval, cs, i, phis_at_4, witnesses[i - 1])
async def test_hbavss_light_gf(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] field = GF(Subgroup.BLS12_381) value = field.random() avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i], field=field) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, value=value)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) # shares = await asyncio.gather(*avss_tasks) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) assert polynomials_over(field).interpolate_at(zip(range(1, n + 1), shares)) == value
def __init__(self, crs, n, t, my_id, send, recv, shares, sharesvalid, commits, evalproofs, pc=None, field=ZR): self.crs, self.n, self.t, self.my_id, self.send, self.recv, self.pc = crs, n, t, my_id, send, recv, pc self.poly = polynomials_over(field) # assume we have all the shares and proofs and know if they're valid self.commits = commits self.evalproofs = evalproofs self.shares = shares self.sharesvalid = sharesvalid
def test_benchmark_gao_robust_decode(benchmark, t, galois_field): n = 3 * t + 1 galois_field = GF(Subgroup.BLS12_381) point = EvalPoint(galois_field, n) dec = GaoRobustDecoder(t, point) parties = [_ for _ in range(n)] poly = polynomials_over(galois_field) truepoly = poly.random(degree=t) faults = [] while len(faults) < t: r = randint(0, n - 1) if r not in faults: faults.append(r) shares_with_faults = [] for i in parties: if i in faults: shares_with_faults.append(int(galois_field.random())) else: shares_with_faults.append(int(truepoly(i + 1))) benchmark(dec.robust_decode, parties, shares_with_faults)
async def test_hbacss0(test_router): from pypairing import G1, ZR t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params_pyp(n, t) sends, recvs, _ = test_router(n) # TODO: add configurable crs specifically for poly_commit_log crs = [g] values = [ZR.random()] * (t + 1) avss_tasks = [None] * n dealer_id = randint(0, n - 1) shares = [None] * n with ExitStack() as stack: hbavss_list = [None] * n for i in range(n): hbavss = Hbacss0(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, values=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values
def __init__(self, public_keys, private_key, crs, n, t, my_id, send, recv, pc=None, field=ZR): # (# noqa: E501) self.public_keys, self.private_key = public_keys, private_key self.n, self.t, self.my_id = n, t, my_id assert len(crs) == 3 assert len(crs[0]) == t + 1 self.g = crs[0][0] # Create a mechanism to split the `recv` channels based on `tag` self.subscribe_recv_task, self.subscribe_recv = subscribe_recv(recv) # Create a mechanism to split the `send` channels based on `tag` def _send(tag): return wrap_send(tag, send) self.get_send = _send self.field = field self.poly = polynomials_over(self.field) if pc is not None: self.poly_commit = pc else: self.poly_commit = PolyCommitConst(crs, field=self.field) self.poly_commit.preprocess_prover() self.poly_commit.preprocess_verifier() self.avid_msg_queue = asyncio.Queue() self.tasks = [] self.shares_future = asyncio.Future() self.output_queue = asyncio.Queue()
async def test_hbavss_batch_client_mode(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n + 1, t) sends, recvs, _ = test_router(n + 1) crs = gen_pc_const_crs(t, g=g, h=h) values = [ZR.random()] * (t + 1) avss_tasks = [None] * (n + 1) hbavss_list = [None] * n dealer_id = n with ExitStack() as stack: client_hbavss = HbAvssBatch(pks, None, crs, n, t, dealer_id, sends[dealer_id], recvs[dealer_id]) stack.enter_context(client_hbavss) avss_tasks[n] = asyncio.create_task( client_hbavss.avss(0, values=values, client_mode=True)) for i in range(n): hbavss = HbAvssBatch(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id, client_mode=True)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) shares = [output[2] for output in outputs] for task in avss_tasks: task.cancel() fliped_shares = list(map(list, zip(*shares))) recovered_values = [] for item in fliped_shares: recovered_values.append( polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), item))) assert recovered_values == values