def get_ipc_setup_commands(s3manager, instance_ids): from honeybadgermpc.preprocessing import PreProcessedElements from honeybadgermpc.preprocessing import PreProcessingConstants as Constants n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T num_triples = AwsConfig.MPC_CONFIG.NUM_TRIPLES pp_elements = PreProcessedElements() pp_elements.generate_zeros(num_triples, n, t) pp_elements.generate_triples(num_triples, n, t) triple_urls = s3manager.upload_files([ pp_elements.mixins[Constants.TRIPLES]._build_file_name(n, t, i) for i in range(n) ]) zero_urls = s3manager.upload_files([ pp_elements.mixins[Constants.ZEROS]._build_file_name(n, t, i) for i in range(n) ]) setup_commands = [[ instance_id, [ "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH), "mkdir -p sharedata", "cd sharedata; curl -sSO %s" % (triple_urls[i]), "cd sharedata; curl -sSO %s" % (zero_urls[i]), "mkdir -p benchmark-logs", ], ] for i, instance_id in enumerate(instance_ids)] return setup_commands
async def tutorial_fixedpoint(): n, t = 4, 1 pp = FakePreProcessedElements() pp.generate_zeros(100, n, t) pp.generate_triples(1000, n, t) pp.generate_bits(1000, n, t) program_runner = TaskProgramRunner(n, t, config) program_runner.add(_prog) results = await program_runner.join() return results
async def tutorial_1(): # Create a test network of 4 nodes (no sockets, just asyncio tasks) n, t = 4, 1 pp = FakePreProcessedElements() pp.generate_zeros(100, n, t) pp.generate_triples(100, n, t) pp.generate_bits(100, n, t) program_runner = TaskProgramRunner(n, t, config) program_runner.add(prog) results = await program_runner.join() return results
async def prog(): n, t = 4, 1 pp = FakePreProcessedElements() pp.generate_zeros(1000, n, t) pp.generate_triples(120000, n, t) pp.generate_share_bits(1000, n, t) pp.generate_bits(3000, n, t) pp.generate_rands(10000, n, t) program_runner = TaskProgramRunner(n, t, config) program_runner.add(laesa_test_1) results = await program_runner.join() return results
async def test_get_zero(): n, t = 4, 1 num_zeros = 2 pp_elements = PreProcessedElements() pp_elements.generate_zeros(1000, n, t) async def _prog(ctx): for _ in range(num_zeros): x_sh = ctx.preproc.get_zero(ctx) assert await x_sh.open() == 0 program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) await program_runner.join()
def get_bit_dec_setup_commands(s3manager, instance_ids): from honeybadgermpc.preprocessing import PreProcessedElements from honeybadgermpc.preprocessing import PreProcessingConstants as Constants n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T logging.info("Starting to create preprocessing files.") stime = time() pp_elements = PreProcessedElements() pp_elements.generate_triples(150000, n, t) pp_elements.generate_rands(66000, n, t) pp_elements.generate_bits(10000, n, t) pp_elements.generate_zeros(200, n, t) logging.info(f"Preprocessing files created in {time()-stime}") setup_commands = [] total_time = 0 logging.info(f"Uploading input files to AWS S3.") stime = time() triple_urls = s3manager.upload_files( [build_file_name_triple(n, t, i) for i in range(n)]) rands_urls = s3manager.upload_files( [build_file_name_rand(n, t, i) for i in range(n)]) zeros_urls = s3manager.upload_files( [build_file_name_zero(n, t, i) for i in range(n)]) bits_urls = s3manager.upload_files( [build_file_name_bit(n, t, i) for i in range(n)]) logging.info(f"Inputs successfully uploaded in {time()-stime} seconds.") setup_commands = [[ instance_id, [ "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH), "mkdir -p sharedata", "cd sharedata; curl -sSO %s" % (triple_urls[i]), "cd sharedata; curl -sSO %s" % (rands_urls[i]), "cd sharedata; curl -sSO %s" % (zeros_urls[i]), "cd sharedata; curl -sSO %s" % (bits_urls[i]), "mkdir -p benchmark-logs", ], ] for i, instance_id in enumerate(instance_ids)] return setup_commands
async def test_mpc_programs(peers, n, t, my_id): from honeybadgermpc.mpc import test_prog1, test_prog2, test_batchopening from honeybadgermpc.preprocessing import PreProcessedElements from honeybadgermpc.preprocessing import wait_for_preprocessing, preprocessing_done if not HbmpcConfig.skip_preprocessing: # Only one party needs to generate the preprocessed elements for testing if HbmpcConfig.my_id == 0: pp_elements = PreProcessedElements() pp_elements.generate_zeros(1000, HbmpcConfig.N, HbmpcConfig.t) pp_elements.generate_triples(1000, HbmpcConfig.N, HbmpcConfig.t) preprocessing_done() else: await wait_for_preprocessing() async with ProcessProgramRunner(peers, n, t, my_id) as runner: test_prog1 # r1 = runner.execute("0", test_prog1) r2 = runner.execute("1", test_prog2) r3 = runner.execute("2", test_batchopening) results = await asyncio.gather(*[r2, r3]) return results
async def test_open_shares(): n, t = 3, 1 number_of_secrets = 100 pp_elements = PreProcessedElements() pp_elements.generate_zeros(1000, n, t) async def _prog(context): secrets = [] for _ in range(number_of_secrets): s = await context.preproc.get_zero(context).open() assert s == 0 secrets.append(s) print("[%d] Finished" % (context.myid, )) return secrets program_runner = TaskProgramRunner(n, t) program_runner.add(_prog) results = await program_runner.join() assert len(results) == n assert all(len(secrets) == number_of_secrets for secrets in results) assert all(secret == 0 for secrets in results for secret in secrets)
def _preprocess(n, t, k, to_generate): from honeybadgermpc.preprocessing import PreProcessedElements pp_elements = PreProcessedElements() for kind in to_generate: if kind == "triples": pp_elements.generate_triples(k, n, t) elif kind == "cubes": pp_elements.generate_cubes(k, n, t) elif kind == "zeros": pp_elements.generate_zeros(k, n, t) elif kind == "rands": pp_elements.generate_rands(k, n, t) elif kind == "bits": pp_elements.generate_bits(k, n, t) elif kind == "one_minus_one": pp_elements.generate_one_minus_ones(k, n, t) elif kind == "double_shares": pp_elements.generate_double_shares(k, n, t) elif kind == "share_bits": pp_elements.generate_share_bits(k, n, t) else: raise ValueError(f"{kind} must be manually preprocessed")