Ejemplo n.º 1
0
def test_data_uploaded(requests, valid_project_params):
    new_project_data = requests.post(url + '/projects',
                                     headers={
                                         'Authorization': 'invalid'
                                     },
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    description_1 = get_project_description(requests, new_project_data)
    expected_number_parties = get_expected_number_parties(valid_project_params)
    assert description_1['number_parties'] == expected_number_parties
    assert description_1['parties_contributed'] == 0

    datasets = generate_overlapping_clk_data([100] * expected_number_parties,
                                             overlap=0.8)
    for i, dataset, update_token in zip(itertools.count(1), datasets,
                                        new_project_data['update_tokens']):
        r = requests.post(
            url + '/projects/{}/clks'.format(new_project_data['project_id']),
            headers={'Authorization': update_token},
            json={'clks': dataset})
        assert r.status_code == 201
        time.sleep(0.5)
        description_2 = get_project_description(requests, new_project_data)
        assert description_2['number_parties'] == expected_number_parties
        assert description_2['parties_contributed'] == i
Ejemplo n.º 2
0
def _check_new_project_response_fields(new_project_data, project_params):
    assert 'project_id' in new_project_data
    assert 'update_tokens' in new_project_data
    assert 'result_token' in new_project_data
    actual_number_parties = len(new_project_data['update_tokens'])
    expected_number_parties = get_expected_number_parties(project_params)
    assert actual_number_parties == expected_number_parties
Ejemplo n.º 3
0
def test_create_then_describe_valid_auth(requests, valid_project_params):
    project_respose = requests.post(url + '/projects',
                                    json={
                                        'schema': {},
                                        **valid_project_params
                                    }).json()
    r = requests.get(
        url + '/projects/{}'.format(project_respose['project_id']),
        headers={'Authorization': project_respose['result_token']})
    assert r.status_code == 200
    project_description = r.json()

    assert 'project_id' in project_description
    assert 'name' in project_description
    assert 'notes' in project_description
    assert 'schema' in project_description
    assert 'error' in project_description
    assert not project_description['error']
    assert 'public_key' not in project_description
    assert 'paillier_context' not in project_description

    assert valid_project_params['result_type'] == project_description[
        'result_type']
    expected_number_parties = get_expected_number_parties(valid_project_params)
    assert expected_number_parties == project_description['number_parties']
    assert '' == project_description['name'], 'default name should be blank'
    assert '' == project_description['notes'], 'default notes should be blank'
Ejemplo n.º 4
0
def test_project_upload_invalid_clks_then_valid_clks_same_authentication(requests, valid_project_params):
    """
    Test that a token can be re-used to upload clks after the upload failed.
    So first, create a project, upload clks with a token (which should NOT work with a 400 error),
    and then re-upload clks using the same token which should work.
    """
    expected_number_parties = get_expected_number_parties(valid_project_params)

    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    update_tokens = new_project_data['update_tokens']

    assert len(update_tokens) == expected_number_parties

    small_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata/clks_128B_1k.bin')
    token_to_reuse = update_tokens[0]
    # This should fail as we are not providing the good count.
    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 2000, expected_status_code=400)

    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 1000)
Ejemplo n.º 5
0
def test_project_upload_using_twice_same_authentication(requests, valid_project_params):
    """
    Test that a token cannot be re-used to upload clks.
    So first, create a project, upload clks with a token (which should work), and then re-upload clks using the same
    token which should return a 403 error.
    """
    expected_number_parties = get_expected_number_parties(valid_project_params)
    if expected_number_parties < 2:
        # The test is not made for less than two parties
        return

    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    update_tokens = new_project_data['update_tokens']

    assert len(update_tokens) == expected_number_parties

    small_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata/clks_128B_1k.bin')
    token_to_reuse = update_tokens[0]
    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 1000)

    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 1000, expected_status_code=403)
Ejemplo n.º 6
0
def test_project_binary_data_uploaded(requests, valid_project_params, binary_test_file_path):
    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    update_tokens = new_project_data['update_tokens']
    expected_number_parties = get_expected_number_parties(valid_project_params)
    assert len(update_tokens) == expected_number_parties

    for token in update_tokens:
        upload_binary_data_from_file(
            requests,
            binary_test_file_path, new_project_data['project_id'], token, 1000)

    run_id = post_run(requests, new_project_data, 0.99)
    result = get_run_result(requests, new_project_data, run_id, wait=True)

    if valid_project_params['result_type'] == 'groups':
        assert 'groups' in result
        groups = result['groups']
        assert len(groups) == 1000
        for group in groups:
            dataset_indices = {di for di, _ in group}
            record_indices = {ri for _, ri in group}
            assert len(record_indices) == 1
            assert dataset_indices == set(range(expected_number_parties))
        # Check every record is represented
        all_record_indices = {next(iter(group))[1] for group in groups}
        assert all_record_indices == set(range(1000))
def test_project_binary_data_upload_with_different_encoded_size(
        requests, encoding_size, valid_project_params):
    expected_number_parties = get_expected_number_parties(valid_project_params)
    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()

    common = next(
        binary_pack_for_upload(generate_clks(1, encoding_size), encoding_size))

    data = []
    for i in range(expected_number_parties):
        generated_clks = generate_clks(499, encoding_size)
        packed_clks = binary_pack_for_upload(generated_clks, encoding_size)
        packed_joined = b''.join(packed_clks)
        packed_with_common = (packed_joined + common if i == 0 else common +
                              packed_joined)
        data.append(packed_with_common)

    project_id = new_project_data['project_id']
    for d, token in zip(data, new_project_data['update_tokens']):
        assert len(d) == 500 * encoding_size
        upload_binary_data(requests,
                           d,
                           project_id,
                           token,
                           500,
                           size=encoding_size)

    run_id = post_run(requests, new_project_data, 0.99)
    result = get_run_result(requests,
                            new_project_data,
                            run_id,
                            wait=True,
                            timeout=240)
    if valid_project_params['result_type'] == 'groups':
        assert 'groups' in result
        groups = result['groups']
        groups_set = {frozenset(map(tuple, group)) for group in groups}
        common_set = frozenset(
            (i, 499 if i == 0 else 0) for i in range(expected_number_parties))
        assert common_set in groups_set