def test_case_0_prove_of_concept(config: ConfigParser, **kwargs: Any) -> None:
    """
    1. Spawn MiddleMan and SigningService processes.
    2. Client sends GolemMessageFrame with correct TransactionSigningRequest to MiddleMan
       and receives response to prove that connection works.
    """

    try:
        middleman_process = run_middleman()
        signing_service_process = run_signing_service()

        # Waiting for MiddleMan and SigningService to start.
        sleep(SLEEP_TIME_AFTER_SPAWNING_PROCESS)

        # Create GolemMessageFrame with correct TransactionSigningRequest.
        golem_message_frame = create_golem_message_frame_with_correct_transaction_signing_request(
            request_id=get_current_utc_timestamp(),
        )

        # Send message through wrapper and receive deserialized response.
        response = send_message_to_middleman_and_receive_response(
            message=golem_message_frame,
            config=config,
            concent_private_key=CONCENT_PRIVATE_KEY,
            concent_public_key=CONCENT_PUBLIC_KEY,
        )

        # Check response.
        assert_condition(
            type(response),
            GolemMessageFrame,
            f'Deserialized response type is {type(response)} instead of GolemMessageFrame.'
        )
        assert_condition(
            type(response.payload),
            SignedTransaction,
            f'Deserialized response payload type is {type(response.payload)} instead of SignedTransaction.'
        )
        assert_condition(
            response.request_id,
            golem_message_frame.request_id,
            f'Deserialized response request_id is {response.request_id} instead of {golem_message_frame.request_id}.'
        )

    finally:
        middleman_process.kill()
        signing_service_process.kill()
Beispiel #2
0
def test_case_1_middleman_recovery(config: ConfigParser,
                                   **kwargs: Any) -> None:
    """
    1. Spawn MiddleMan and SigningService processes.
    2. Client sends GolemMessageFrame with correct TransactionSigningRequest to MiddleMan.
    3. Middleman is restarted. The connection and latest message is lost.
    4. Client sends GolemMessageFrame with correct TransactionSigningRequest to MiddleMan.
    5. Client receives response for latest message.
    """

    try:
        middleman_process = run_middleman()
        signing_service_process = run_signing_service()

        # Waiting for MiddleMan and SigningService to start.
        sleep(SLEEP_TIME_AFTER_SPAWNING_PROCESS)

        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        client_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)

        # Create GolemMessageFrame with correct TransactionSigningRequest.
        golem_message_frame = create_golem_message_frame_with_correct_transaction_signing_request(
            request_id=get_current_utc_timestamp(), )

        client_socket.connect((
            config.get(Components.MIDDLEMAN.value, 'host'),
            int(config.get(Components.MIDDLEMAN.value, 'port')),
        ))
        send_over_stream(
            connection=client_socket,
            raw_message=golem_message_frame,
            private_key=CONCENT_PRIVATE_KEY,
        )

        middleman_process.kill()

        # Waiting for MiddleMan to finish.
        sleep(SLEEP_TIME_AFTER_KILLING_PROCESS)

        middleman_process = run_middleman()

        # Waiting for MiddleMan to start.
        sleep(SLEEP_TIME_AFTER_SPAWNING_PROCESS)

        receive_frame_generator = unescape_stream(connection=client_socket)
        try:
            next(receive_frame_generator)
        except socket.error as exception:
            assert_condition(
                exception.args[0], socket.errno.ECONNRESET,
                f'Connection should be reset by peer.')  # type: ignore

        # Create GolemMessageFrame with correct TransactionSigningRequest.
        golem_message_frame = create_golem_message_frame_with_correct_transaction_signing_request(
            request_id=get_current_utc_timestamp(), )

        # Send message through wrapper and receive deserialized response.
        response = send_message_to_middleman_and_receive_response(
            message=golem_message_frame,
            config=config,
            concent_private_key=CONCENT_PRIVATE_KEY,
            concent_public_key=CONCENT_PUBLIC_KEY,
        )

        # Check response.
        assert_condition(
            type(response), GolemMessageFrame,
            f'Deserialized response type is {type(response)} instead of GolemMessageFrame.'
        )
        assert_condition(
            type(response.payload), SignedTransaction,
            f'Deserialized response payload type is {type(response.payload)} instead of SignedTransaction.'
        )
        assert_condition(
            response.request_id, golem_message_frame.request_id,
            f'Deserialized response request_id is {response.request_id} instead of {golem_message_frame.request_id}.'
        )

    finally:
        middleman_process.kill()
        signing_service_process.kill()
Beispiel #3
0
def test_case_1_test_for_existing_file(cluster_consts: ProtocolConstants, cluster_url: str) -> None:
    current_time = get_current_utc_timestamp()

    file_content = 'test'
    file_size = len(file_content)
    file_check_sum = 'sha1:' + hashlib.sha1(file_content.encode()).hexdigest()

    force_get_task_result = get_force_get_task_result(
        current_time,
        size=file_size,
        package_hash=file_check_sum,
    )

    file_path = get_storage_result_file_path(
        task_id=force_get_task_result.task_id,
        subtask_id=force_get_task_result.subtask_id,
    )

    # Case 1 - test for existing file
    api_request(
        cluster_url,
        'send',
        REQUESTOR_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        force_get_task_result,
        expected_status=200,
        expected_message_type=message.concents.AckForceGetTaskResult,
        expected_content_type='application/octet-stream',
    )

    force_get_task_result_upload = api_request(
        cluster_url,
        'receive',
        PROVIDER_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(PROVIDER_PRIVATE_KEY, PROVIDER_PUBLIC_KEY, CONCENT_PUBLIC_KEY),
        expected_status=200,
        expected_message_type=message.concents.ForceGetTaskResultUpload,
        expected_content_type='application/octet-stream',
    )

    response = upload_file_to_storage_cluster(
        file_content,
        file_path,
        force_get_task_result_upload.file_transfer_token,  # type: ignore
        PROVIDER_PRIVATE_KEY,
        PROVIDER_PUBLIC_KEY,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )

    assert_condition(response.status_code, 200, 'File has not been stored on cluster')
    print('\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'.format(
        force_get_task_result.task_id,
        file_check_sum,
        file_size
    ))
    time.sleep(0.5)

    api_request(
        cluster_url,
        'receive',
        REQUESTOR_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(REQUESTOR_PRIVATE_KEY, REQUESTOR_PUBLIC_KEY, CONCENT_PUBLIC_KEY),
        expected_status=200,
        expected_message_type=message.concents.ForceGetTaskResultDownload,
        expected_content_type='application/octet-stream',
    )
def test_case_1_test_for_positive_case(cluster_consts: ProtocolConstants, cluster_url: str) -> None:  # pylint: disable=unused-argument
    receive_pending_messages_for_requestor_and_provider(
        cluster_url,
        sci_base,
        CONCENT_PUBLIC_KEY
    )
    current_time = get_current_utc_timestamp()
    provider_deposit_value = sci_base.get_provider_gntb_balance()
    requestor_deposit_value = sci_base.get_requestor_deposit_value()

    current_dir = os.path.dirname(os.path.abspath(__file__))
    with open(os.path.join(current_dir, 'tests_resources', 'source.zip'), 'rb') as archive:
        source_file_content = archive.read()
    with open(os.path.join(current_dir, 'tests_resources', 'result.zip'), 'rb') as archive:
        result_file_content = archive.read()

    result_file_size = len(result_file_content)
    source_file_size = len(source_file_content)
    result_file_checksum = 'sha1:' + hashlib.sha1(result_file_content).hexdigest()
    source_file_checksum = 'sha1:' + hashlib.sha1(source_file_content).hexdigest()

    subtask_results_verify = get_subtask_results_verify(
        current_time,
        reason=message.tasks.SubtaskResultsRejected.REASON.VerificationNegative,
        report_computed_task_size=result_file_size,
        report_computed_task_package_hash=result_file_checksum,
        task_to_compute_size=source_file_size,
        task_to_compute_package_hash=source_file_checksum,
        price=10000,
        render_parameters=get_render_params()
    )

    ack_subtask_results_verify = api_request(
        cluster_url,
        'send',
        sci_base.provider_private_key,
        CONCENT_PUBLIC_KEY,
        subtask_results_verify,
        expected_status=200,
        expected_message_type=message.concents.AckSubtaskResultsVerify,
        expected_content_type='application/octet-stream',
    )

    response = upload_file_to_storage_cluster(
        result_file_content,
        ack_subtask_results_verify.file_transfer_token.files[0]['path'],  # type: ignore
        ack_subtask_results_verify.file_transfer_token,  # type: ignore
        sci_base.provider_private_key,
        sci_base.provider_public_key,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )
    assert_condition(response.status_code, 200, 'File has not been stored on cluster')
    print('\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'.format(
        subtask_results_verify.task_id,
        result_file_checksum,
        result_file_size
    ))

    response = upload_file_to_storage_cluster(
        source_file_content,
        ack_subtask_results_verify.file_transfer_token.files[1]['path'],  # type: ignore
        ack_subtask_results_verify.file_transfer_token,  # type: ignore
        sci_base.provider_private_key,
        sci_base.provider_public_key,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )
    assert_condition(response.status_code, 200, 'File has not been stored on cluster')
    print('\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'.format(
        subtask_results_verify.task_id,
        source_file_checksum,
        source_file_size
    ))

    # Adding calculated number of seconds to time sleep makes us sure that subtask is after deadline.
    sleep_time = calculate_verification_deadline(
        subtask_results_verify.subtask_results_rejected.timestamp,
        cluster_consts.additional_verification_call_time,
        subtask_results_verify.subtask_results_rejected.report_computed_task.size,
        cluster_consts.minimum_upload_rate,
    ) - current_time
    print(f"Going to sleep for {sleep_time} secs...")
    time.sleep(
        sleep_time
    )

    api_request(
        cluster_url,
        'receive',
        sci_base.requestor_private_key,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(sci_base.requestor_private_key, sci_base.requestor_public_key, CONCENT_PUBLIC_KEY),
        expected_status=200,
        expected_message_type=message.concents.SubtaskResultsSettled,
        expected_content_type='application/octet-stream',
    )

    api_request(
        cluster_url,
        'receive',
        sci_base.provider_private_key,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(sci_base.provider_private_key, sci_base.provider_public_key, CONCENT_PUBLIC_KEY),
        expected_status=200,
        expected_message_type=message.concents.SubtaskResultsSettled,
        expected_content_type='application/octet-stream',
    )
    sci_base.ensure_that_provider_has_specific_gntb_balance(value=provider_deposit_value + 10000)
    sci_base.ensure_that_requestor_has_specific_deposit_balance(value=requestor_deposit_value - 10000)
Beispiel #5
0
def test_case_1_test_for_existing_file(cluster_consts, cluster_url, test_id):
    current_time = get_current_utc_timestamp()
    (subtask_id, task_id) = get_task_id_and_subtask_id(test_id,
                                                       'existing_file')

    file_content = task_id
    file_size = len(file_content)
    file_check_sum = 'sha1:' + hashlib.sha1(file_content.encode()).hexdigest()
    file_path = get_storage_result_file_path(
        task_id=task_id,
        subtask_id=subtask_id,
    )

    # Case 1 - test for existing file
    api_request(
        cluster_url,
        'send',
        REQUESTOR_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        get_force_get_task_result(
            task_id,
            subtask_id,
            current_time,
            cluster_consts,
            size=file_size,
            package_hash=file_check_sum,
        ),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.AckForceGetTaskResult.TYPE,
        expected_content_type='application/octet-stream',
    )

    force_get_task_result_upload = api_request(
        cluster_url,
        'receive',
        PROVIDER_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(PROVIDER_PRIVATE_KEY, PROVIDER_PUBLIC_KEY,
                                   CONCENT_PUBLIC_KEY),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.ForceGetTaskResultUpload.TYPE,
        expected_content_type='application/octet-stream',
    )

    response = upload_file_to_storage_cluster(
        file_content, file_path,
        force_get_task_result_upload.file_transfer_token)

    assert_condition(response.status_code, 200,
                     'File has not been stored on cluster')
    print(
        '\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'
        .format(task_id, file_check_sum, file_size))

    api_request(
        cluster_url,
        'receive',
        REQUESTOR_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(REQUESTOR_PRIVATE_KEY, REQUESTOR_PUBLIC_KEY,
                                   CONCENT_PUBLIC_KEY),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.ForceGetTaskResultDownload.TYPE,
        expected_content_type='application/octet-stream',
    )
def test_case_1_test_for_positive_case(cluster_consts, cluster_url, test_id):  # pylint: disable=unused-argument
    current_time = get_current_utc_timestamp()
    (subtask_id, task_id) = get_task_id_and_subtask_id(test_id,
                                                       'existing_file')

    current_dir = os.path.dirname(os.path.abspath(__file__))
    with open(os.path.join(current_dir, 'tests_resources', 'source.zip'),
              'rb') as archive:
        source_file_content = archive.read()
    with open(os.path.join(current_dir, 'tests_resources', 'result.zip'),
              'rb') as archive:
        result_file_content = archive.read()

    result_file_size = len(result_file_content)
    source_file_size = len(source_file_content)
    result_file_checksum = 'sha1:' + hashlib.sha1(
        result_file_content).hexdigest()
    source_file_checksum = 'sha1:' + hashlib.sha1(
        source_file_content).hexdigest()

    ack_subtask_results_verify = api_request(
        cluster_url,
        'send',
        PROVIDER_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        get_subtask_results_verify(
            task_id,
            subtask_id,
            current_time,
            reason=message.tasks.SubtaskResultsRejected.REASON.
            VerificationNegative,
            report_computed_task_size=result_file_size,
            report_computed_task_package_hash=result_file_checksum,
            task_to_compute_size=source_file_size,
            task_to_compute_package_hash=source_file_checksum,
            script_src=
            '# This template is rendered by\n# apps.blender.resources.scenefileeditor.generate_blender_crop_file(),\n# written to tempfile and passed as arg to blender.\nimport bpy\n\nclass EngineWarning(bpy.types.Operator):\n    bl_idname = "wm.engine_warning"\n    bl_label = "Inform about not supported rendering engine"\n\n    def execute(self, context):\n        self.report({"ERROR"}, "Engine " + bpy.context.scene.render.engine + \\\n                               " not supported by Golem")\n        return {"FINISHED"}\n\nclass ShowInformation(bpy.types.Operator):\n    bl_idname = "wm.scene_information"\n    bl_label = "Inform user about scene settings"\n\n\n    def execute(self, context):\n        self.report({"INFO"}, "Resolution: " +\n                              str(bpy.context.scene.render.resolution_x) +\n                               " x " +\n                               str(bpy.context.scene.render.resolution_y))\n        self.report({"INFO"}, "File format: " +\n                               str(bpy.context.scene.render.file_extension))\n        self.report({"INFO"}, "Filepath: " +\n                              str(bpy.context.scene.render.filepath))\n        self.report({"INFO"}, "Frames: " +\n                              str(bpy.context.scene.frame_start) + "-" +\n                              str(bpy.context.scene.frame_end) + ";" +\n                              str(bpy.context.scene.frame_step))\n\n        return {"FINISHED"}\n\n\nbpy.utils.register_class(EngineWarning)\nengine = bpy.context.scene.render.engine\nif engine not in ("BLENDER_RENDER", "CYCLES"):\n    bpy.ops.wm.engine_warning()\n\nbpy.utils.register_class(ShowInformation)\nbpy.ops.wm.scene_information()\n\n\nfor scene in bpy.data.scenes:\n\n    scene.render.tile_x = 0\n    scene.render.tile_y = 0\n    scene.render.resolution_x = 1024\n    scene.render.resolution_y = 768\n    scene.render.resolution_percentage = 100\n    scene.render.use_border = True\n    scene.render.use_crop_to_border = True\n    scene.render.border_max_x = 1.0\n    scene.render.border_min_x = 0.0\n    scene.render.border_min_y = 0.0\n    scene.render.border_max_y = 1.0\n    scene.render.use_compositing = bool(False)\n\n#and check if additional files aren\'t missing\nbpy.ops.file.report_missing_files()\n',
        ),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.AckSubtaskResultsVerify.TYPE,
        expected_content_type='application/octet-stream',
    )

    response = upload_file_to_storage_cluster(
        result_file_content,
        ack_subtask_results_verify.file_transfer_token.files[0]['path'],
        ack_subtask_results_verify.file_transfer_token,
        PROVIDER_PRIVATE_KEY,
        PROVIDER_PUBLIC_KEY,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )
    assert_condition(response.status_code, 200,
                     'File has not been stored on cluster')
    print(
        '\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'
        .format(task_id, result_file_checksum, result_file_size))

    response = upload_file_to_storage_cluster(
        source_file_content,
        ack_subtask_results_verify.file_transfer_token.files[1]['path'],
        ack_subtask_results_verify.file_transfer_token,
        PROVIDER_PRIVATE_KEY,
        PROVIDER_PUBLIC_KEY,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )
    assert_condition(response.status_code, 200,
                     'File has not been stored on cluster')
    print(
        '\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'
        .format(task_id, source_file_checksum, source_file_size))

    # Adding 10 seconds to time sleep makes us sure that subtask is after deadline.
    time.sleep(CALCULATED_VERIFICATION_TIME *
               (ADDITIONAL_VERIFICATION_TIME_MULTIPLIER / BLENDER_THREADS))

    api_request(
        cluster_url,
        'receive-out-of-band',
        REQUESTOR_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(REQUESTOR_PRIVATE_KEY, REQUESTOR_PUBLIC_KEY,
                                   CONCENT_PUBLIC_KEY),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.SubtaskResultsSettled.TYPE,
        expected_content_type='application/octet-stream',
    )

    api_request(
        cluster_url,
        'receive-out-of-band',
        PROVIDER_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(PROVIDER_PRIVATE_KEY, PROVIDER_PUBLIC_KEY,
                                   CONCENT_PUBLIC_KEY),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.SubtaskResultsSettled.TYPE,
        expected_content_type='application/octet-stream',
    )
def test_case_6_test_without_script_src_in(cluster_consts, cluster_url,
                                           test_id):  # pylint: disable=unused-argument
    current_time = get_current_utc_timestamp()
    (subtask_id, task_id) = get_task_id_and_subtask_id(test_id,
                                                       'without_script_src')

    current_dir = os.path.dirname(os.path.abspath(__file__))
    with open(os.path.join(current_dir, 'tests_resources', 'source.zip'),
              'rb') as archive:
        source_file_content = archive.read()
    with open(os.path.join(current_dir, 'tests_resources', 'result.zip'),
              'rb') as archive:
        result_file_content = archive.read()

    result_file_size = len(result_file_content)
    source_file_size = len(source_file_content)
    result_file_checksum = 'sha1:' + hashlib.sha1(
        result_file_content).hexdigest()
    source_file_checksum = 'sha1:' + hashlib.sha1(
        source_file_content).hexdigest()

    ack_subtask_results_verify = api_request(
        cluster_url,
        'send',
        PROVIDER_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        get_subtask_results_verify(
            task_id,
            subtask_id,
            current_time,
            reason=message.tasks.SubtaskResultsRejected.REASON.
            VerificationNegative,
            report_computed_task_size=result_file_size,
            report_computed_task_package_hash=result_file_checksum,
            task_to_compute_size=source_file_size,
            task_to_compute_package_hash=source_file_checksum,
        ),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.AckSubtaskResultsVerify.TYPE,
        expected_content_type='application/octet-stream',
    )

    response = upload_file_to_storage_cluster(
        result_file_content,
        ack_subtask_results_verify.file_transfer_token.files[0]['path'],
        ack_subtask_results_verify.file_transfer_token,
        PROVIDER_PRIVATE_KEY,
        PROVIDER_PUBLIC_KEY,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )
    assert_condition(response.status_code, 200,
                     'File has not been stored on cluster')
    print(
        '\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'
        .format(task_id, result_file_checksum, result_file_size))

    response = upload_file_to_storage_cluster(
        source_file_content,
        ack_subtask_results_verify.file_transfer_token.files[1]['path'],
        ack_subtask_results_verify.file_transfer_token,
        PROVIDER_PRIVATE_KEY,
        PROVIDER_PUBLIC_KEY,
        CONCENT_PUBLIC_KEY,
        STORAGE_CLUSTER_ADDRESS,
    )
    assert_condition(response.status_code, 200,
                     'File has not been stored on cluster')
    print(
        '\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'
        .format(task_id, source_file_checksum, source_file_size))

    # Adding 10 seconds to time sleep makes us sure that subtask is after deadline.
    time.sleep(CALCULATED_VERIFICATION_TIME *
               (ADDITIONAL_VERIFICATION_TIME_MULTIPLIER / BLENDER_THREADS))

    api_request(
        cluster_url,
        'receive-out-of-band',
        REQUESTOR_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(REQUESTOR_PRIVATE_KEY, REQUESTOR_PUBLIC_KEY,
                                   CONCENT_PUBLIC_KEY),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.SubtaskResultsSettled.TYPE,
        expected_content_type='application/octet-stream',
    )

    api_request(
        cluster_url,
        'receive-out-of-band',
        PROVIDER_PRIVATE_KEY,
        CONCENT_PUBLIC_KEY,
        create_client_auth_message(PROVIDER_PRIVATE_KEY, PROVIDER_PUBLIC_KEY,
                                   CONCENT_PUBLIC_KEY),
        headers={
            'Content-Type': 'application/octet-stream',
        },
        expected_status=200,
        expected_message_type=message.concents.SubtaskResultsSettled.TYPE,
        expected_content_type='application/octet-stream',
    )