Esempio n. 1
0
def get_dkg_setup_commands(s3manager, instance_ids):
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import PreProcessingConstants as Constants

    n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T

    logging.info("Starting to create preprocessing files.")
    stime = time()
    pp_elements = PreProcessedElements()
    pp_elements.generate_triples(600, n, t)
    pp_elements.generate_rands(600, n, t)
    logging.info(f"Preprocessing files created in {time()-stime}")

    setup_commands = []
    total_time = 0
    logging.info(f"Uploading input files to AWS S3.")
    stime = time()

    triple_urls = s3manager.upload_files(
        [build_file_name_triple(n, t, i) for i in range(n)])
    input_urls = s3manager.upload_files(
        [build_file_name_rand(n, t, i) for i in range(n)])
    logging.info(f"Inputs successfully uploaded in {time()-stime} seconds.")

    setup_commands = [[
        instance_id,
        [
            "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH),
            "mkdir -p sharedata",
            "cd sharedata; curl -sSO %s" % (triple_urls[i]),
            "cd sharedata; curl -sSO %s" % (input_urls[i]),
            "mkdir -p benchmark-logs",
        ],
    ] for i, instance_id in enumerate(instance_ids)]
    return setup_commands
Esempio n. 2
0
def get_ipc_setup_commands(s3manager, instance_ids):
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import PreProcessingConstants as Constants

    n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T

    num_triples = AwsConfig.MPC_CONFIG.NUM_TRIPLES
    pp_elements = PreProcessedElements()

    pp_elements.generate_zeros(num_triples, n, t)
    pp_elements.generate_triples(num_triples, n, t)

    triple_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.TRIPLES]._build_file_name(n, t, i)
        for i in range(n)
    ])
    zero_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.ZEROS]._build_file_name(n, t, i)
        for i in range(n)
    ])

    setup_commands = [[
        instance_id,
        [
            "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH),
            "mkdir -p sharedata",
            "cd sharedata; curl -sSO %s" % (triple_urls[i]),
            "cd sharedata; curl -sSO %s" % (zero_urls[i]),
            "mkdir -p benchmark-logs",
        ],
    ] for i, instance_id in enumerate(instance_ids)]

    return setup_commands
Esempio n. 3
0
async def test_open_future_shares():
    n, t = 4, 1
    pp_elements = PreProcessedElements()
    pp_elements.generate_rands(1000, n, t)
    pp_elements.generate_triples(1000, n, t)

    async def _prog(context):
        e1_, e2_ = [context.preproc.get_rand(context) for _ in range(2)]
        e1, e2 = await asyncio.gather(*[e1_.open(), e2_.open()],
                                      return_exceptions=True)

        s_prod_f = e1_ * e2_
        s_prod_f2 = s_prod_f * e1_
        final_prod = s_prod_f2 + e1_ + e2_
        final_prod_2 = final_prod * e1_
        wrapped_final_prod_2 = context.Share(final_prod_2.open())

        assert await s_prod_f2.open() == e1 * e1 * e2
        assert await final_prod.open() == e1 * e1 * e2 + e1 + e2
        assert await final_prod_2.open() == (e1 * e1 * e2 + e1 + e2) * e1
        assert await wrapped_final_prod_2.open() == await final_prod_2.open()

    program_runner = TaskProgramRunner(
        n, t, {MixinConstants.MultiplyShare: BeaverMultiply()})
    program_runner.add(_prog)
    await program_runner.join()
Esempio n. 4
0
async def test_butterfly_network():
    n, t, k, delta = 3, 1, 32, -9999
    pp_elements = PreProcessedElements()
    pp_elements.generate_rands(1000, n, t)
    pp_elements.generate_one_minus_ones(1000, n, t)
    pp_elements.generate_triples(1500, n, t)

    async def verify_output(ctx, **kwargs):
        k, delta = kwargs["k"], kwargs["delta"]
        inputs = [ctx.preproc.get_rand(ctx) for _ in range(k)]
        sorted_input = sorted(
            await ctx.ShareArray(inputs).open(), key=lambda x: x.value
        )

        share_arr = await butterfly.butterfly_network_helper(
            ctx, k=k, delta=delta, inputs=inputs
        )
        outputs = await share_arr.open()

        assert len(sorted_input) == len(outputs)
        sorted_output = sorted(outputs, key=lambda x: x.value)
        for i, j in zip(sorted_input, sorted_output):
            assert i == j

    program_runner = TaskProgramRunner(
        n, t, {MixinConstants.MultiplyShareArray: BeaverMultiplyArrays()}
    )
    program_runner.add(verify_output, k=k, delta=delta)
    await program_runner.join()
Esempio n. 5
0
async def tutorial_fixedpoint():
    n, t = 4, 1
    pp = FakePreProcessedElements()
    pp.generate_zeros(100, n, t)
    pp.generate_triples(1000, n, t)
    pp.generate_bits(1000, n, t)
    program_runner = TaskProgramRunner(n, t, config)
    program_runner.add(_prog)
    results = await program_runner.join()
    return results
Esempio n. 6
0
async def tutorial_1():
    # Create a test network of 4 nodes (no sockets, just asyncio tasks)
    n, t = 4, 1
    pp = FakePreProcessedElements()
    pp.generate_zeros(100, n, t)
    pp.generate_triples(100, n, t)
    pp.generate_bits(100, n, t)
    program_runner = TaskProgramRunner(n, t, config)
    program_runner.add(prog)
    results = await program_runner.join()
    return results
Esempio n. 7
0
async def prog():
    n, t = 4, 1
    pp = FakePreProcessedElements()
    pp.generate_zeros(1000, n, t)
    pp.generate_triples(120000, n, t)
    pp.generate_share_bits(1000, n, t)
    pp.generate_bits(3000, n, t)
    pp.generate_rands(10000, n, t)
    program_runner = TaskProgramRunner(n, t, config)
    program_runner.add(laesa_test_1)
    results = await program_runner.join()
    return results
Esempio n. 8
0
async def test_get_triple():
    n, t = 4, 1
    num_triples = 2
    pp_elements = PreProcessedElements()
    pp_elements.generate_triples(1000, n, t)

    async def _prog(ctx):
        for _ in range(num_triples):
            a_sh, b_sh, ab_sh = ctx.preproc.get_triples(ctx)
            a, b, ab = await a_sh.open(), await b_sh.open(), await ab_sh.open()
            assert a * b == ab

    program_runner = TaskProgramRunner(n, t)
    program_runner.add(_prog)
    await program_runner.join()
Esempio n. 9
0
def get_butterfly_network_setup_commands(max_k, s3manager, instance_ids):
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import PreProcessingConstants as Constants

    n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T
    k = max_k if max_k else AwsConfig.MPC_CONFIG.K

    logging.info("Starting to create preprocessing files.")
    stime = time()
    num_switches = k * int(log(k, 2))**2
    pp_elements = PreProcessedElements()
    pp_elements.generate_triples(2 * num_switches, n, t)
    pp_elements.generate_one_minus_ones(num_switches, n, t)
    pp_elements.generate_rands(k, n, t)
    logging.info(f"Preprocessing files created in {time()-stime}")

    logging.info("Uploading inputs to AWS S3.")
    stime = time()
    triple_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.TRIPLES]._build_file_name(n, t, i)
        for i in range(n)
    ])
    input_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.RANDS]._build_file_name(n, t, i)
        for i in range(n)
    ])
    rand_share_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.ONE_MINUS_ONE]._build_file_name(n, t, i)
        for i in range(n)
    ])
    logging.info(f"Inputs successfully uploaded in {time()-stime} seconds.")

    setup_commands = [[
        instance_id,
        [
            "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH),
            "mkdir -p sharedata",
            "cd sharedata; curl -sSO %s" % (triple_urls[i]),
            "cd sharedata; curl -sSO %s" % (rand_share_urls[i]),
            "cd sharedata; curl -sSO %s" % (input_urls[i]),
            "mkdir -p benchmark-logs",
        ],
    ] for i, instance_id in enumerate(instance_ids)]

    return setup_commands
Esempio n. 10
0
async def test_mpc_programs(peers, n, t, my_id):
    from honeybadgermpc.mpc import test_prog1, test_prog2, test_batchopening
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import wait_for_preprocessing, preprocessing_done

    if not HbmpcConfig.skip_preprocessing:
        # Only one party needs to generate the preprocessed elements for testing
        if HbmpcConfig.my_id == 0:
            pp_elements = PreProcessedElements()
            pp_elements.generate_zeros(1000, HbmpcConfig.N, HbmpcConfig.t)
            pp_elements.generate_triples(1000, HbmpcConfig.N, HbmpcConfig.t)
            preprocessing_done()
        else:
            await wait_for_preprocessing()

    async with ProcessProgramRunner(peers, n, t, my_id) as runner:
        test_prog1  # r1 = runner.execute("0", test_prog1)
        r2 = runner.execute("1", test_prog2)
        r3 = runner.execute("2", test_batchopening)
        results = await asyncio.gather(*[r2, r3])
        return results
Esempio n. 11
0
def _preprocess(n, t, k, to_generate):
    from honeybadgermpc.preprocessing import PreProcessedElements

    pp_elements = PreProcessedElements()
    for kind in to_generate:
        if kind == "triples":
            pp_elements.generate_triples(k, n, t)
        elif kind == "cubes":
            pp_elements.generate_cubes(k, n, t)
        elif kind == "zeros":
            pp_elements.generate_zeros(k, n, t)
        elif kind == "rands":
            pp_elements.generate_rands(k, n, t)
        elif kind == "bits":
            pp_elements.generate_bits(k, n, t)
        elif kind == "one_minus_one":
            pp_elements.generate_one_minus_ones(k, n, t)
        elif kind == "double_shares":
            pp_elements.generate_double_shares(k, n, t)
        elif kind == "share_bits":
            pp_elements.generate_share_bits(k, n, t)
        else:
            raise ValueError(f"{kind} must be manually preprocessed")
    HbmpcConfig.load_config()

    k = int(HbmpcConfig.extras["k"])

    pp_elements = PreProcessedElements()
    pp_elements.clear_preprocessing()

    asyncio.set_event_loop(asyncio.new_event_loop())
    loop = asyncio.get_event_loop()
    loop.set_debug(True)
    try:
        if not HbmpcConfig.skip_preprocessing:
            if HbmpcConfig.my_id == 0:
                NUM_SWITCHES = k * int(log(k, 2))**2
                pp_elements.generate_one_minus_ones(NUM_SWITCHES,
                                                    HbmpcConfig.N,
                                                    HbmpcConfig.t)
                pp_elements.generate_triples(2 * NUM_SWITCHES, HbmpcConfig.N,
                                             HbmpcConfig.t)
                pp_elements.generate_rands(k, HbmpcConfig.N, HbmpcConfig.t)
                pp_elements.preprocessing_done()
            else:
                loop.run_until_complete(pp_elements.wait_for_preprocessing())

        loop.run_until_complete(
            _run(HbmpcConfig.peers, HbmpcConfig.N, HbmpcConfig.t,
                 HbmpcConfig.my_id))
    finally:
        loop.close()
        pp_elements.clear_preprocessing()
Esempio n. 13
0

if __name__ == "__main__":
    from honeybadgermpc.config import HbmpcConfig
    import sys

    if not HbmpcConfig.peers:
        print(f"WARNING: the $CONFIG_PATH environment variable wasn't set. "
              f"Please run this file with `scripts/launch-tmuxlocal.sh "
              f"apps/tutorial/hbmpc-tutorial-2.py conf/mpc/local`")
        sys.exit(1)

    asyncio.set_event_loop(asyncio.new_event_loop())
    loop = asyncio.get_event_loop()
    loop.set_debug(True)
    try:
        if HbmpcConfig.my_id == 0:
            k = 100  # How many of each kind of preproc
            pp_elements = FakePreProcessedElements()
            pp_elements.generate_bits(k, HbmpcConfig.N, HbmpcConfig.t)
            pp_elements.generate_triples(k, HbmpcConfig.N, HbmpcConfig.t)
            pp_elements.preprocessing_done()
        else:
            loop.run_until_complete(pp_elements.wait_for_preprocessing())

        loop.run_until_complete(
            _run(HbmpcConfig.peers, HbmpcConfig.N, HbmpcConfig.t,
                 HbmpcConfig.my_id))
    finally:
        loop.close()