async def test_phase1(galois_field): field = galois_field n, t, k = 5, 2, 1 pp_elements = PreProcessedElements() pp_elements.generate_powers(k, n, t, 1) pp_elements.generate_rands(k, n, t) async def verify_phase1(ctx, **kwargs): k_ = kwargs["k"] b_ = await ctx.preproc.get_powers(ctx, 0)[0].open() file_prefixes = [uuid4().hex] await pm.all_secrets_phase1(ctx, k=k, file_prefixes=file_prefixes) file_name = f"{file_prefixes[0]}-{ctx.myid}.input" file_path = f"{pp_elements.data_directory}{file_name}" with open(file_path, "r") as f: assert int(f.readline()) == field.modulus # next line is a random share, which should open successfully a_ = await ctx.Share(int(f.readline())).open() assert int(f.readline()) == (a_ - b_).value assert int(f.readline()) == k_ for i in range(1, k_ + 1): assert (await ctx.Share(int(f.readline())).open()).value == b_**(i) program_runner = TaskProgramRunner(n, t) program_runner.add(verify_phase1, k=k) await program_runner.join()
def get_powermixing_setup_commands(max_k, runid, s3manager, instance_ids): from honeybadgermpc.preprocessing import PreProcessedElements from honeybadgermpc.preprocessing import PreProcessingConstants as Constants n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T k = max_k if max_k else AwsConfig.MPC_CONFIG.K logging.info("Starting to create preprocessing files.") stime = time() pp_elements = PreProcessedElements() pp_elements.generate_powers(k, n, t, k) pp_elements.generate_rands(k, n, t) logging.info(f"Preprocessing files created in {time()-stime}") setup_commands = [] total_time = 0 logging.info(f"Uploading input files to AWS S3.") for i, instance_id in enumerate(instance_ids): url = s3manager.upload_file(f"aws/download_input.sh") commands = [ "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH), f"curl -sSO {url}", "mkdir -p sharedata", "cp download_input.sh sharedata/download_input.sh ", "mkdir -p benchmark-logs", "ulimit -n 10000", ] file_names = [] for j in range(k): prefix1 = f"{pp_elements.mixins[Constants.POWERS].file_prefix}_{j}" file_names.append( pp_elements.mixins[Constants.POWERS].build_filename( n, t, i, prefix=prefix1)) file_names.append( pp_elements.mixins[Constants.RANDS].build_filename(n, t, i)) stime = time() urls = s3manager.upload_files(file_names) total_time += time() - stime with open("%s-%d-links" % (runid, i), "w") as f: for url in urls: print(url, file=f) fname = f"{runid}-{i}-links" url = s3manager.upload_file(fname) commands.append( f"cd sharedata; curl -sSO {url}; bash download_input.sh {fname}") setup_commands.append([instance_id, commands]) logging.info(f"Upload completed in {total_time} seconds.") return setup_commands
async def test_get_powers(): n, t = 4, 1 pp_elements = PreProcessedElements() nums, num_powers = 2, 3 pp_elements.generate_powers(num_powers, n, t, nums) async def _prog(ctx): for i in range(nums): powers = ctx.preproc.get_powers(ctx, i) x = await powers[0].open() for i, power in enumerate(powers[1:]): assert await power.open() == pow(x, i + 2) program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) await program_runner.join()
async def test_asynchronous_mixing(): import asyncio import apps.asynchromix.powermixing as pm from honeybadgermpc.mpc import TaskProgramRunner n, t, k = 3, 1, 4 pp_elements = PreProcessedElements() pp_elements.generate_powers(k, n, t, k) pp_elements.generate_rands(1000, n, t) async def verify_output(context, **kwargs): result, input_shares = kwargs["result"], kwargs["input_shares"] my_shares = input_shares[context.myid] assert len(result) == len(my_shares) inputs = await asyncio.gather( *[context.Share(sh.v, t).open() for sh in my_shares]) assert sorted(map(lambda x: x.value, inputs)) == sorted(result) result, input_shares = await pm.async_mixing(n, t, k) program_runner = TaskProgramRunner(n, t) program_runner.add(verify_output, result=result, input_shares=input_shares) await program_runner.join()
pp_elements = PreProcessedElements() pp_elements.clear_preprocessing() asyncio.set_event_loop(asyncio.new_event_loop()) loop = asyncio.get_event_loop() try: if not HbmpcConfig.skip_preprocessing: # Need to keep these fixed when running on processes. field = GF(Subgroup.BLS12_381) a_s = [field(i) for i in range(1000 + k, 1000, -1)] if HbmpcConfig.my_id == 0: pp_elements.generate_rands(k, HbmpcConfig.N, HbmpcConfig.t) pp_elements.generate_powers(k, HbmpcConfig.N, HbmpcConfig.t, k) pp_elements.preprocessing_done() else: loop.run_until_complete(pp_elements.wait_for_preprocessing()) loop.run_until_complete( async_mixing_in_processes( HbmpcConfig.peers, HbmpcConfig.N, HbmpcConfig.t, k, run_id, HbmpcConfig.my_id, )) finally: loop.close()