async def test_butterfly_network(): n, t, k, delta = 3, 1, 32, -9999 pp_elements = PreProcessedElements() pp_elements.generate_rands(1000, n, t) pp_elements.generate_one_minus_ones(1000, n, t) pp_elements.generate_triples(1500, n, t) async def verify_output(ctx, **kwargs): k, delta = kwargs["k"], kwargs["delta"] inputs = [ctx.preproc.get_rand(ctx) for _ in range(k)] sorted_input = sorted(await ctx.ShareArray(inputs).open(), key=lambda x: x.value) share_arr = await butterfly.butterfly_network_helper(ctx, k=k, delta=delta, inputs=inputs) outputs = await share_arr.open() assert len(sorted_input) == len(outputs) sorted_output = sorted(outputs, key=lambda x: x.value) for i, j in zip(sorted_input, sorted_output): assert i == j program_runner = TaskProgramRunner( n, t, {MixinConstants.MultiplyShareArray: BeaverMultiplyArrays()}) program_runner.add(verify_output, k=k, delta=delta) await program_runner.join()
def get_butterfly_network_setup_commands(max_k, s3manager, instance_ids): from honeybadgermpc.preprocessing import PreProcessedElements from honeybadgermpc.preprocessing import PreProcessingConstants as Constants n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T k = max_k if max_k else AwsConfig.MPC_CONFIG.K logging.info("Starting to create preprocessing files.") stime = time() num_switches = k * int(log(k, 2))**2 pp_elements = PreProcessedElements() pp_elements.generate_triples(2 * num_switches, n, t) pp_elements.generate_one_minus_ones(num_switches, n, t) pp_elements.generate_rands(k, n, t) logging.info(f"Preprocessing files created in {time()-stime}") logging.info("Uploading inputs to AWS S3.") stime = time() triple_urls = s3manager.upload_files([ pp_elements.mixins[Constants.TRIPLES]._build_file_name(n, t, i) for i in range(n) ]) input_urls = s3manager.upload_files([ pp_elements.mixins[Constants.RANDS]._build_file_name(n, t, i) for i in range(n) ]) rand_share_urls = s3manager.upload_files([ pp_elements.mixins[Constants.ONE_MINUS_ONE]._build_file_name(n, t, i) for i in range(n) ]) logging.info(f"Inputs successfully uploaded in {time()-stime} seconds.") setup_commands = [[ instance_id, [ "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH), "mkdir -p sharedata", "cd sharedata; curl -sSO %s" % (triple_urls[i]), "cd sharedata; curl -sSO %s" % (rand_share_urls[i]), "cd sharedata; curl -sSO %s" % (input_urls[i]), "mkdir -p benchmark-logs", ], ] for i, instance_id in enumerate(instance_ids)] return setup_commands
def _preprocess(n, t, k, to_generate): from honeybadgermpc.preprocessing import PreProcessedElements pp_elements = PreProcessedElements() for kind in to_generate: if kind == "triples": pp_elements.generate_triples(k, n, t) elif kind == "cubes": pp_elements.generate_cubes(k, n, t) elif kind == "zeros": pp_elements.generate_zeros(k, n, t) elif kind == "rands": pp_elements.generate_rands(k, n, t) elif kind == "bits": pp_elements.generate_bits(k, n, t) elif kind == "one_minus_one": pp_elements.generate_one_minus_ones(k, n, t) elif kind == "double_shares": pp_elements.generate_double_shares(k, n, t) elif kind == "share_bits": pp_elements.generate_share_bits(k, n, t) else: raise ValueError(f"{kind} must be manually preprocessed")
HbmpcConfig.load_config() k = int(HbmpcConfig.extras["k"]) pp_elements = PreProcessedElements() pp_elements.clear_preprocessing() asyncio.set_event_loop(asyncio.new_event_loop()) loop = asyncio.get_event_loop() loop.set_debug(True) try: if not HbmpcConfig.skip_preprocessing: if HbmpcConfig.my_id == 0: NUM_SWITCHES = k * int(log(k, 2))**2 pp_elements.generate_one_minus_ones(NUM_SWITCHES, HbmpcConfig.N, HbmpcConfig.t) pp_elements.generate_triples(2 * NUM_SWITCHES, HbmpcConfig.N, HbmpcConfig.t) pp_elements.generate_rands(k, HbmpcConfig.N, HbmpcConfig.t) pp_elements.preprocessing_done() else: loop.run_until_complete(pp_elements.wait_for_preprocessing()) loop.run_until_complete( _run(HbmpcConfig.peers, HbmpcConfig.N, HbmpcConfig.t, HbmpcConfig.my_id)) finally: loop.close() pp_elements.clear_preprocessing()