Ejemplo n.º 1
0
def test_project_upload_invalid_clks_then_valid_clks_same_authentication(requests, valid_project_params):
    """
    Test that a token can be re-used to upload clks after the upload failed.
    So first, create a project, upload clks with a token (which should NOT work with a 400 error),
    and then re-upload clks using the same token which should work.
    """
    expected_number_parties = get_expected_number_parties(valid_project_params)

    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    update_tokens = new_project_data['update_tokens']

    assert len(update_tokens) == expected_number_parties

    small_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata/clks_128B_1k.bin')
    token_to_reuse = update_tokens[0]
    # This should fail as we are not providing the good count.
    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 2000, expected_status_code=400)

    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 1000)
Ejemplo n.º 2
0
def test_project_upload_using_twice_same_authentication(requests, valid_project_params):
    """
    Test that a token cannot be re-used to upload clks.
    So first, create a project, upload clks with a token (which should work), and then re-upload clks using the same
    token which should return a 403 error.
    """
    expected_number_parties = get_expected_number_parties(valid_project_params)
    if expected_number_parties < 2:
        # The test is not made for less than two parties
        return

    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    update_tokens = new_project_data['update_tokens']

    assert len(update_tokens) == expected_number_parties

    small_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata/clks_128B_1k.bin')
    token_to_reuse = update_tokens[0]
    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 1000)

    upload_binary_data_from_file(
        requests,
        small_file_path, new_project_data['project_id'], token_to_reuse, 1000, expected_status_code=403)
Ejemplo n.º 3
0
def test_project_binary_data_uploaded(requests, valid_project_params, binary_test_file_path):
    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    update_tokens = new_project_data['update_tokens']
    expected_number_parties = get_expected_number_parties(valid_project_params)
    assert len(update_tokens) == expected_number_parties

    for token in update_tokens:
        upload_binary_data_from_file(
            requests,
            binary_test_file_path, new_project_data['project_id'], token, 1000)

    run_id = post_run(requests, new_project_data, 0.99)
    result = get_run_result(requests, new_project_data, run_id, wait=True)

    if valid_project_params['result_type'] == 'groups':
        assert 'groups' in result
        groups = result['groups']
        assert len(groups) == 1000
        for group in groups:
            dataset_indices = {di for di, _ in group}
            record_indices = {ri for _, ri in group}
            assert len(record_indices) == 1
            assert dataset_indices == set(range(expected_number_parties))
        # Check every record is represented
        all_record_indices = {next(iter(group))[1] for group in groups}
        assert all_record_indices == set(range(1000))
Ejemplo n.º 4
0
def test_project_binary_data_invalid_buffer_size(
        requests, valid_project_params):
    new_project_data = requests.post(url + '/projects',
                                     json={
                                         'schema': {},
                                         **valid_project_params
                                     }).json()
    pid = new_project_data['project_id']
    file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata/clks_128B_1k.bin')

    upload_binary_data_from_file(requests, file_path, pid, new_project_data['update_tokens'][0], -1, expected_status_code=400)

    # Now try upload with valid hash-count but doesn't match actual size:
    upload_binary_data_from_file(requests, file_path, pid, new_project_data['update_tokens'][0], 1000000, expected_status_code=400)
    upload_binary_data_from_file(requests, file_path, pid, new_project_data['update_tokens'][0], 3, expected_status_code=400)

    # Now try the minimum upload size (1 clk)
    file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'testdata/single_clk.bin')
    upload_binary_data_from_file(requests, file_path, pid, new_project_data['update_tokens'][0], 1, expected_status_code=201)