def __enter__(self): n, t, my_id = self.n, self.t, self.my_id send, recv = self.get_send_recv(f"{self.tag}-AVSS") g, h, pks, sk = get_avss_params(n, t, my_id) crs = [g, h] self.avss_instance = HbAvssLight(pks, sk, crs, n, t, my_id, send, recv) self.avss_instance.__enter__() self.tasks.append(asyncio.create_task(self._runner())) send, recv = self.get_send_recv(f"{self.tag}-AVSS_VALUE_PROCESSOR") pk, sks = dealer(n, t + 1, seed=17) self.avss_value_processor = AvssValueProcessor( pk, sks[my_id], n, t, my_id, send, recv, self.avss_instance.output_queue.get, self.avss_value_processor_chunk_size, ) self.avss_value_processor.__enter__() self.tasks.append(asyncio.create_task(self._extract())) return self
async def test_hbavss_light(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] value = ZR.random() avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, value=value)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) assert polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), shares)) == value
async def hbavss_light_batch_dealer(test_router, params): (t, n, g, h, pks, sks, crs, pc, values, field) = params sends, recvs, _ = test_router(n + 1) dealer_id = n hbavss = HbAvssLight( pks, None, crs, n, t, dealer_id, sends[dealer_id], recvs[dealer_id], pc=pc, field=field, ) # (# noqa: E501) await asyncio.create_task(hbavss.avss(0, value=values, client_mode=True))
async def hbavss_light_batch(test_router, params): (t, n, g, h, pks, sks, crs, pc, values, field) = params sends, recvs, _ = test_router(n) avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): hbavss = HbAvssLight( pks, sks[i], crs, n, t, i, sends[i], recvs[i], pc=pc, field=field ) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, value=values)) else: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, dealer_id=dealer_id)) await asyncio.gather(*[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel()
async def test_hbavss_light_share_open(test_router): t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] value = ZR.random() avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, value=value)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) async def _prog(context): share_value = context.field(shares[context.myid]) assert await context.Share(share_value).open() == value program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) await program_runner.join()
class PreProcessingBase(ABC): PERIOD_IN_SECONDS = 3 def __init__( self, n, t, my_id, send, recv, tag, batch_size=10, avss_value_processor_chunk_size=1, ): self.n, self.t, self.my_id = n, t, my_id self.tag = tag self.avss_value_processor_chunk_size = avss_value_processor_chunk_size # Batch size of values to AVSS from a node self.batch_size = batch_size # Minimum number of values before triggering another set of AVSSes self.low_watermark = self.batch_size self.output_queue = asyncio.Queue() # Create a mechanism to split the `send` and `recv` channels based on `tag` subscribe_recv_task, subscribe = subscribe_recv(recv) self.tasks = [subscribe_recv_task] def _get_send_recv(tag): return wrap_send(tag, send), subscribe(tag) self.get_send_recv = _get_send_recv async def get(self): return await self.output_queue.get() @abstractmethod def _get_input_batch(self): raise NotImplementedError async def _trigger_and_wait_for_avss(self, avss_id): inputs = self._get_input_batch() assert type(inputs) in [tuple, list] avss_tasks = [] avss_tasks.append( asyncio.create_task( self.avss_instance.avss_parallel( avss_id, len(inputs), values=inputs, dealer_id=self.my_id ) ) ) for i in range(self.n): if i != self.my_id: avss_tasks.append( asyncio.create_task( self.avss_instance.avss_parallel( avss_id, len(inputs), dealer_id=i ) ) ) await asyncio.gather(*avss_tasks) async def _runner(self): counter = 0 logging.debug("[%d] Starting preprocessing runner: %s", self.my_id, self.tag) while True: # If the number of values in the output queue are below the lower # watermark then we want to trigger the next set of AVSSes. if self.output_queue.qsize() < self.low_watermark: logging.debug("[%d] Starting AVSS Batch: %d", self.my_id, counter) await self._trigger_and_wait_for_avss(counter) logging.debug("[%d] AVSS Batch Completed: %d", self.my_id, counter) counter += 1 # Wait for sometime before checking again. await asyncio.sleep(PreProcessingBase.PERIOD_IN_SECONDS) async def _get_output_batch(self, group_size=1): for i in range(self.batch_size): batch = [] while True: value = await self.avss_value_processor.get() if value is None: break batch.append(value) assert len(batch) / group_size >= self.n - self.t assert len(batch) / group_size <= self.n yield batch async def _extract(self): raise NotImplementedError def __enter__(self): n, t, my_id = self.n, self.t, self.my_id send, recv = self.get_send_recv(f"{self.tag}-AVSS") g, h, pks, sk = get_avss_params(n, t, my_id) crs = [g, h] self.avss_instance = HbAvssLight(pks, sk, crs, n, t, my_id, send, recv) self.avss_instance.__enter__() self.tasks.append(asyncio.create_task(self._runner())) send, recv = self.get_send_recv(f"{self.tag}-AVSS_VALUE_PROCESSOR") pk, sks = dealer(n, t + 1, seed=17) self.avss_value_processor = AvssValueProcessor( pk, sks[my_id], n, t, my_id, send, recv, self.avss_instance.output_queue.get, self.avss_value_processor_chunk_size, ) self.avss_value_processor.__enter__() self.tasks.append(asyncio.create_task(self._extract())) return self def __exit__(self, *args): for task in self.tasks: task.cancel() self.avss_instance.__exit__(*args) self.avss_value_processor.__exit__(*args)
async def test_hbavss_light_parallel_share_array_open(test_router): t = 2 n = 3 * t + 1 k = 4 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] values = [int(ZR.random()) for _ in range(k)] dealer_id = randint(0, n - 1) with ExitStack() as stack: avss_tasks = [None] * n hbavss_list = [None] * n for i in range(n): hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: v, d = values, None else: v, d = None, dealer_id avss_tasks[i] = asyncio.create_task( hbavss.avss_parallel(0, k, v, d)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = [None] * k for j in range(k): outputs[j] = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() # Sort the outputs incase they're out of order round_outputs = [[[] for __ in range(n)] for _ in range(k)] for i in range(k): for j in range(n): round_outputs[outputs[i][j][1]][j] = outputs[i][j] shares = [[] for _ in range(n)] for i in range(k): round_output = round_outputs[i] for j in range(len(round_output)): shares[j].append(round_output[j][2]) async def _prog(context): share_values = list(map(context.field, shares[context.myid])) opened_shares = set(await context.ShareArray(share_values).open()) # The set of opened share should have exactly `k` values assert len(opened_shares) == k # All the values in the set of opened shares should be from the initial values for i in opened_shares: assert i.value in values program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) await program_runner.join()
async def test_hbavss_light_batch_share_fault(test_router): class BadDealer(HbAvssLight): def _get_dealer_msg(self, value): if type(value) in (list, tuple): valuelist = value else: valuelist = [value] philist, commitlist, auxlist = [], [], [] fault_i = randint(0, self.n - 1) for val in valuelist: phi = self.poly.random(self.t, val) philist.append(phi) commitment, aux_poly = self.poly_commit.commit(phi) commitlist.append(commitment) auxlist.append(aux_poly) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) z = [None] * self.n for i in range(self.n): shared_key = pow(self.public_keys[i], ephemeral_secret_key) shares, witnesses = [], [] for phi in philist: shares.append(phi(i + 1)) for aux in auxlist: witnesses.append( self.poly_commit.create_witness(aux, i + 1)) if i == fault_i: shares[20] = ZR.random() z[i] = SymmetricCrypto.encrypt( str(shared_key).encode(), (shares, witnesses)) return dumps((commitlist, ephemeral_public_key, z)) t = 2 n = 3 * t + 1 batchsize = 50 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] values = [int(ZR.random()) for _ in range(batchsize)] avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): if i == dealer_id: hbavss = BadDealer(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) else: hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, value=values)) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) # shares = await asyncio.gather(*avss_tasks) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [[] for _ in range(batchsize)] for i in range(n): for j in range(batchsize): shares[j].append(outputs[i][2][j]) for j in range(batchsize): assert (polynomials_over(ZR).interpolate_at( zip(range(1, n + 1), shares[j])) == values[j])
async def test_hbavss_light_encryption_fault(test_router): # Injects one undecryptable ciphertext class BadDealer(HbAvssLight): def _get_dealer_msg(self, value): fault_i = randint(0, self.n - 1) phi = self.poly.random(self.t, value) commitment, aux_poly = self.poly_commit.commit(phi) ephemeral_secret_key = self.field.random() ephemeral_public_key = pow(self.g, ephemeral_secret_key) z = [None] * self.n for i in range(self.n): witness = self.poly_commit.create_witness(aux_poly, i + 1) shared_key = pow(self.public_keys[i], ephemeral_secret_key) if i == fault_i: z[i] = SymmetricCrypto.encrypt( str(ZR.random()).encode(), ([phi(i + 1)], [witness])) else: z[i] = SymmetricCrypto.encrypt( str(shared_key).encode(), ([phi(i + 1)], [witness])) return dumps(([commitment], ephemeral_public_key, z)) t = 2 n = 3 * t + 1 g, h, pks, sks = get_avss_params(n, t) sends, recvs, _ = test_router(n) crs = [g, h] value = ZR.random() avss_tasks = [None] * n hbavss_list = [None] * n dealer_id = randint(0, n - 1) with ExitStack() as stack: for i in range(n): if i == dealer_id: hbavss = BadDealer(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) else: hbavss = HbAvssLight(pks, sks[i], crs, n, t, i, sends[i], recvs[i]) hbavss_list[i] = hbavss stack.enter_context(hbavss) if i == dealer_id: avss_tasks[i] = asyncio.create_task(hbavss.avss(0, value=value)) avss_tasks[i].add_done_callback(print_exception_callback) else: avss_tasks[i] = asyncio.create_task( hbavss.avss(0, dealer_id=dealer_id)) avss_tasks[i].add_done_callback(print_exception_callback) outputs = await asyncio.gather( *[hbavss_list[i].output_queue.get() for i in range(n)]) for task in avss_tasks: task.cancel() shares = [] for item in outputs: shares.append(item[2]) assert polynomials_over(ZR).interpolate_at(zip(range(1, n + 1), shares)) == value