async def test_cant_multiply_shares_from_different_contexts():
    from honeybadgermpc.mpc import TaskProgramRunner
    import asyncio

    n, t, k = 9, 2, 2000

    pp_elements = PreProcessedElements()
    pp_elements.generate_double_shares(k, n, t)
    pp_elements.generate_rands(k, n, t)

    async def _prog(context):
        share = context.Share(1)
        return share

    test_runner_1 = TaskProgramRunner(n, t)
    test_runner_2 = TaskProgramRunner(n, t)

    test_runner_1.add(_prog)
    test_runner_2.add(_prog)

    s1, s2 = await asyncio.gather(test_runner_1.join(), test_runner_2.join())

    async def _prog2(context):
        with raises(TypeError):
            await s1[0] * s2[0]

    test_runner_3 = TaskProgramRunner(
        n, t, {DoubleSharingMultiply.name,
               DoubleSharingMultiply()})
    test_runner_3.add(_prog2)
    await test_runner_3.join()
Exemple #2
0
async def test_phase1(galois_field):
    field = galois_field
    n, t, k = 5, 2, 1
    pp_elements = PreProcessedElements()
    pp_elements.generate_powers(k, n, t, 1)
    pp_elements.generate_rands(k, n, t)

    async def verify_phase1(ctx, **kwargs):
        k_ = kwargs["k"]
        b_ = await ctx.preproc.get_powers(ctx, 0)[0].open()
        file_prefixes = [uuid4().hex]
        await pm.all_secrets_phase1(ctx, k=k, file_prefixes=file_prefixes)
        file_name = f"{file_prefixes[0]}-{ctx.myid}.input"
        file_path = f"{pp_elements.data_directory}{file_name}"
        with open(file_path, "r") as f:
            assert int(f.readline()) == field.modulus
            # next line is a random share, which should open successfully
            a_ = await ctx.Share(int(f.readline())).open()
            assert int(f.readline()) == (a_ - b_).value
            assert int(f.readline()) == k_
            for i in range(1, k_ + 1):
                assert (await
                        ctx.Share(int(f.readline())).open()).value == b_**(i)

    program_runner = TaskProgramRunner(n, t)
    program_runner.add(verify_phase1, k=k)
    await program_runner.join()
Exemple #3
0
async def test_open_future_shares():
    n, t = 4, 1
    pp_elements = PreProcessedElements()
    pp_elements.generate_rands(1000, n, t)
    pp_elements.generate_triples(1000, n, t)

    async def _prog(context):
        e1_, e2_ = [context.preproc.get_rand(context) for _ in range(2)]
        e1, e2 = await asyncio.gather(*[e1_.open(), e2_.open()],
                                      return_exceptions=True)

        s_prod_f = e1_ * e2_
        s_prod_f2 = s_prod_f * e1_
        final_prod = s_prod_f2 + e1_ + e2_
        final_prod_2 = final_prod * e1_
        wrapped_final_prod_2 = context.Share(final_prod_2.open())

        assert await s_prod_f2.open() == e1 * e1 * e2
        assert await final_prod.open() == e1 * e1 * e2 + e1 + e2
        assert await final_prod_2.open() == (e1 * e1 * e2 + e1 + e2) * e1
        assert await wrapped_final_prod_2.open() == await final_prod_2.open()

    program_runner = TaskProgramRunner(
        n, t, {MixinConstants.MultiplyShare: BeaverMultiply()})
    program_runner.add(_prog)
    await program_runner.join()
Exemple #4
0
async def test_butterfly_network():
    n, t, k, delta = 3, 1, 32, -9999
    pp_elements = PreProcessedElements()
    pp_elements.generate_rands(1000, n, t)
    pp_elements.generate_one_minus_ones(1000, n, t)
    pp_elements.generate_triples(1500, n, t)

    async def verify_output(ctx, **kwargs):
        k, delta = kwargs["k"], kwargs["delta"]
        inputs = [ctx.preproc.get_rand(ctx) for _ in range(k)]
        sorted_input = sorted(await ctx.ShareArray(inputs).open(),
                              key=lambda x: x.value)

        share_arr = await butterfly.butterfly_network_helper(ctx,
                                                             k=k,
                                                             delta=delta,
                                                             inputs=inputs)
        outputs = await share_arr.open()

        assert len(sorted_input) == len(outputs)
        sorted_output = sorted(outputs, key=lambda x: x.value)
        for i, j in zip(sorted_input, sorted_output):
            assert i == j

    program_runner = TaskProgramRunner(
        n, t, {MixinConstants.MultiplyShareArray: BeaverMultiplyArrays()})
    program_runner.add(verify_output, k=k, delta=delta)
    await program_runner.join()
Exemple #5
0
def get_dkg_setup_commands(s3manager, instance_ids):
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import PreProcessingConstants as Constants

    n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T

    logging.info("Starting to create preprocessing files.")
    stime = time()
    pp_elements = PreProcessedElements()
    pp_elements.generate_triples(600, n, t)
    pp_elements.generate_rands(600, n, t)
    logging.info(f"Preprocessing files created in {time()-stime}")

    setup_commands = []
    total_time = 0
    logging.info(f"Uploading input files to AWS S3.")
    stime = time()

    triple_urls = s3manager.upload_files(
        [build_file_name_triple(n, t, i) for i in range(n)])
    input_urls = s3manager.upload_files(
        [build_file_name_rand(n, t, i) for i in range(n)])
    logging.info(f"Inputs successfully uploaded in {time()-stime} seconds.")

    setup_commands = [[
        instance_id,
        [
            "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH),
            "mkdir -p sharedata",
            "cd sharedata; curl -sSO %s" % (triple_urls[i]),
            "cd sharedata; curl -sSO %s" % (input_urls[i]),
            "mkdir -p benchmark-logs",
        ],
    ] for i, instance_id in enumerate(instance_ids)]
    return setup_commands
Exemple #6
0
async def prog():
    n, t = 4, 1
    pp = FakePreProcessedElements()
    pp.generate_zeros(1000, n, t)
    pp.generate_triples(120000, n, t)
    pp.generate_share_bits(1000, n, t)
    pp.generate_bits(3000, n, t)
    pp.generate_rands(10000, n, t)
    program_runner = TaskProgramRunner(n, t, config)
    program_runner.add(laesa_test_1)
    results = await program_runner.join()
    return results
Exemple #7
0
def get_powermixing_setup_commands(max_k, runid, s3manager, instance_ids):
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import PreProcessingConstants as Constants

    n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T
    k = max_k if max_k else AwsConfig.MPC_CONFIG.K

    logging.info("Starting to create preprocessing files.")
    stime = time()
    pp_elements = PreProcessedElements()
    pp_elements.generate_powers(k, n, t, k)
    pp_elements.generate_rands(k, n, t)
    logging.info(f"Preprocessing files created in {time()-stime}")

    setup_commands = []
    total_time = 0
    logging.info(f"Uploading input files to AWS S3.")

    for i, instance_id in enumerate(instance_ids):
        url = s3manager.upload_file(f"aws/download_input.sh")
        commands = [
            "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH),
            f"curl -sSO {url}",
            "mkdir -p sharedata",
            "cp download_input.sh sharedata/download_input.sh ",
            "mkdir -p benchmark-logs",
            "ulimit -n 10000",
        ]
        file_names = []
        for j in range(k):
            prefix1 = f"{pp_elements.mixins[Constants.POWERS].file_prefix}_{j}"
            file_names.append(
                pp_elements.mixins[Constants.POWERS].build_filename(
                    n, t, i, prefix=prefix1))

            file_names.append(
                pp_elements.mixins[Constants.RANDS].build_filename(n, t, i))

        stime = time()
        urls = s3manager.upload_files(file_names)
        total_time += time() - stime
        with open("%s-%d-links" % (runid, i), "w") as f:
            for url in urls:
                print(url, file=f)
        fname = f"{runid}-{i}-links"
        url = s3manager.upload_file(fname)
        commands.append(
            f"cd sharedata; curl -sSO {url}; bash download_input.sh {fname}")
        setup_commands.append([instance_id, commands])

    logging.info(f"Upload completed in {total_time} seconds.")

    return setup_commands
Exemple #8
0
async def test_get_rand():
    n, t = 4, 1
    num_rands = 2
    pp_elements = PreProcessedElements()
    pp_elements.generate_rands(1000, n, t)

    async def _prog(ctx):
        for _ in range(num_rands):
            # Nothing to assert here, just check if the
            # required number of rands are generated
            ctx.preproc.get_rand(ctx)

    program_runner = TaskProgramRunner(n, t)
    program_runner.add(_prog)
    await program_runner.join()
Exemple #9
0
def get_butterfly_network_setup_commands(max_k, s3manager, instance_ids):
    from honeybadgermpc.preprocessing import PreProcessedElements
    from honeybadgermpc.preprocessing import PreProcessingConstants as Constants

    n, t = AwsConfig.TOTAL_VM_COUNT, AwsConfig.MPC_CONFIG.T
    k = max_k if max_k else AwsConfig.MPC_CONFIG.K

    logging.info("Starting to create preprocessing files.")
    stime = time()
    num_switches = k * int(log(k, 2))**2
    pp_elements = PreProcessedElements()
    pp_elements.generate_triples(2 * num_switches, n, t)
    pp_elements.generate_one_minus_ones(num_switches, n, t)
    pp_elements.generate_rands(k, n, t)
    logging.info(f"Preprocessing files created in {time()-stime}")

    logging.info("Uploading inputs to AWS S3.")
    stime = time()
    triple_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.TRIPLES]._build_file_name(n, t, i)
        for i in range(n)
    ])
    input_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.RANDS]._build_file_name(n, t, i)
        for i in range(n)
    ])
    rand_share_urls = s3manager.upload_files([
        pp_elements.mixins[Constants.ONE_MINUS_ONE]._build_file_name(n, t, i)
        for i in range(n)
    ])
    logging.info(f"Inputs successfully uploaded in {time()-stime} seconds.")

    setup_commands = [[
        instance_id,
        [
            "sudo docker pull %s" % (AwsConfig.DOCKER_IMAGE_PATH),
            "mkdir -p sharedata",
            "cd sharedata; curl -sSO %s" % (triple_urls[i]),
            "cd sharedata; curl -sSO %s" % (rand_share_urls[i]),
            "cd sharedata; curl -sSO %s" % (input_urls[i]),
            "mkdir -p benchmark-logs",
        ],
    ] for i, instance_id in enumerate(instance_ids)]

    return setup_commands
async def test_degree_reduction_share_array(test_runner):
    n, t = 7, 2
    pp_elements = PreProcessedElements()
    pp_elements.generate_rands(1000, n, 2 * t)
    pp_elements.generate_double_shares(1000, n, t)

    async def _prog(context):
        shares = [context.preproc.get_rand(context, 2 * t) for _ in range(10)]
        sh_x_2t = context.ShareArray(shares, 2 * t)
        x_actual = await (
            await DoubleSharingMultiplyArrays.reduce_degree_share_array(
                context, sh_x_2t)).open()

        x_expected = await sh_x_2t.open()
        for a, b in zip(x_actual, x_expected):
            assert a == b

    await run_test_program(_prog, test_runner, n, t)
Exemple #11
0
async def test_asynchronous_mixing():
    import asyncio
    import apps.asynchromix.powermixing as pm
    from honeybadgermpc.mpc import TaskProgramRunner

    n, t, k = 3, 1, 4
    pp_elements = PreProcessedElements()
    pp_elements.generate_powers(k, n, t, k)
    pp_elements.generate_rands(1000, n, t)

    async def verify_output(context, **kwargs):
        result, input_shares = kwargs["result"], kwargs["input_shares"]
        my_shares = input_shares[context.myid]
        assert len(result) == len(my_shares)

        inputs = await asyncio.gather(
            *[context.Share(sh.v, t).open() for sh in my_shares])
        assert sorted(map(lambda x: x.value, inputs)) == sorted(result)

    result, input_shares = await pm.async_mixing(n, t, k)
    program_runner = TaskProgramRunner(n, t)
    program_runner.add(verify_output, result=result, input_shares=input_shares)
    await program_runner.join()
Exemple #12
0
def _preprocess(n, t, k, to_generate):
    from honeybadgermpc.preprocessing import PreProcessedElements

    pp_elements = PreProcessedElements()
    for kind in to_generate:
        if kind == "triples":
            pp_elements.generate_triples(k, n, t)
        elif kind == "cubes":
            pp_elements.generate_cubes(k, n, t)
        elif kind == "zeros":
            pp_elements.generate_zeros(k, n, t)
        elif kind == "rands":
            pp_elements.generate_rands(k, n, t)
        elif kind == "bits":
            pp_elements.generate_bits(k, n, t)
        elif kind == "one_minus_one":
            pp_elements.generate_one_minus_ones(k, n, t)
        elif kind == "double_shares":
            pp_elements.generate_double_shares(k, n, t)
        elif kind == "share_bits":
            pp_elements.generate_share_bits(k, n, t)
        else:
            raise ValueError(f"{kind} must be manually preprocessed")
Exemple #13
0
    k = int(HbmpcConfig.extras["k"])

    pp_elements = PreProcessedElements()
    pp_elements.clear_preprocessing()

    asyncio.set_event_loop(asyncio.new_event_loop())
    loop = asyncio.get_event_loop()

    try:
        if not HbmpcConfig.skip_preprocessing:
            # Need to keep these fixed when running on processes.
            field = GF(Subgroup.BLS12_381)
            a_s = [field(i) for i in range(1000 + k, 1000, -1)]

            if HbmpcConfig.my_id == 0:
                pp_elements.generate_rands(k, HbmpcConfig.N, HbmpcConfig.t)
                pp_elements.generate_powers(k, HbmpcConfig.N, HbmpcConfig.t, k)
                pp_elements.preprocessing_done()
            else:
                loop.run_until_complete(pp_elements.wait_for_preprocessing())

        loop.run_until_complete(
            async_mixing_in_processes(
                HbmpcConfig.peers,
                HbmpcConfig.N,
                HbmpcConfig.t,
                k,
                run_id,
                HbmpcConfig.my_id,
            ))
    finally: