def test_that_upload_acknowledged_task_should_change_upload_finished_on_existing_related_verification_request_to_true_and_call_blender_verification_order_task(self): store_subtask( task_id=self.report_computed_task.task_to_compute.compute_task_def['task_id'], subtask_id=self.report_computed_task.task_to_compute.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, ) with mock.patch('conductor.tasks.blender_verification_order.delay') as mock_blender_verification_order: upload_acknowledged( subtask_id=self.report_computed_task.subtask_id, source_file_size=self.report_computed_task.task_to_compute.size, source_package_hash=self.report_computed_task.task_to_compute.package_hash, result_file_size=self.report_computed_task.size, result_package_hash=self.report_computed_task.package_hash, ) self.verification_request.refresh_from_db() self.assertTrue(self.verification_request.upload_acknowledged) mock_blender_verification_order.assert_called_once_with( subtask_id=self.verification_request.subtask_id, source_package_path=self.verification_request.source_package_path, source_size=self.report_computed_task.task_to_compute.size, source_package_hash=self.report_computed_task.task_to_compute.package_hash, result_package_path=self.verification_request.result_package_path, result_size=self.report_computed_task.size, result_package_hash=self.report_computed_task.package_hash, output_format=self.verification_request.blender_subtask_definition.output_format, scene_file=self.verification_request.blender_subtask_definition.scene_file, )
def test_api_view_should_rollback_changes_on_400_error(self): store_subtask( task_id=self.compute_task_def['task_id'], subtask_id=self.compute_task_def['subtask_id'], provider_public_key=hex_to_bytes_convert( self.report_computed_task.task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert( self.report_computed_task.task_to_compute.requestor_public_key ), state=Subtask.SubtaskState.FORCING_RESULT_TRANSFER, next_deadline=(get_current_utc_timestamp() - 10), task_to_compute=self.task_to_compute, report_computed_task=self.report_computed_task, force_get_task_result=self.force_get_task_result, ) self.assertEqual(Client.objects.count(), 2) with mock.patch('core.subtask_helpers.verify_file_status', side_effect=_create_client_and_raise_http400_error_mock ) as _create_client_and_raise_error_mock_function: self.client.post( reverse('core:send'), data=dump(self.force_report_computed_task, PROVIDER_PRIVATE_KEY, CONCENT_PUBLIC_KEY), content_type='application/octet-stream', HTTP_X_GOLEM_MESSAGES=settings.GOLEM_MESSAGES_VERSION, ) _create_client_and_raise_error_mock_function.assert_called() self.assertEqual(Client.objects.count(), 2)
def test_that_send_should_refuse_request_if_all_stored_messages_have_incompatible_protocol_version(self): with override_settings(GOLEM_MESSAGES_VERSION='1.11.0'): store_subtask( task_id=self.task_to_compute.compute_task_def['task_id'], subtask_id=self.task_to_compute.compute_task_def['subtask_id'], provider_public_key=self.provider_public_key, requestor_public_key=self.requestor_public_key, state=Subtask.SubtaskState.FORCING_REPORT, next_deadline=int(self.task_to_compute.compute_task_def['deadline']) + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.task_to_compute, report_computed_task=self.report_computed_task, ) with mock.patch('core.views.logging.log_message_received') as log_not_called_mock, \ mock.patch('core.subtask_helpers.log') as log_called_mock: response = self.send_request( url='core:send', data=dump( self.force_report_computed_task, self.PROVIDER_PRIVATE_KEY, CONCENT_PUBLIC_KEY), ) self._test_response( response, status=200, key=self.PROVIDER_PRIVATE_KEY, message_type=message.concents.ServiceRefused, fields={ 'reason': message.concents.ServiceRefused.REASON.UnsupportedProtocolVersion, } ) log_called_mock.assert_called() log_not_called_mock.assert_not_called() self.assertIn(f'Version stored in database is 1.11.0, Concent version is {settings.GOLEM_MESSAGES_VERSION}', str(log_called_mock.call_args))
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.compute_task_def = self.task_to_compute.compute_task_def self.blender_crop_script_parameters = dict( resolution=self.compute_task_def['extra_data']['resolution'], samples=self.compute_task_def['extra_data']['samples'], use_compositing=self.compute_task_def['extra_data'] ['use_compositing'], borders_x=self.compute_task_def['extra_data']['crops'][0] ['borders_x'], borders_y=self.compute_task_def['extra_data']['crops'][0] ['borders_y'], ) self.source_package_path = get_storage_source_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute) store_subtask( task_id=self.compute_task_def['task_id'], subtask_id=self.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.REPORTED, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, next_deadline=None)
def test_that_storing_subtask_with_task_to_compute_nested_in_another_messages_will_not_raise_exception_when_messages_are_equal( self, task_to_compute, report_computed_task, ack_report_computed_task, reject_report_computed_task, force_get_task_result, subtask_results_rejected, ): try: store_subtask( task_id=task_to_compute.task_id, subtask_id=task_to_compute.subtask_id, provider_public_key=hex_to_bytes_convert(task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert(task_to_compute.requestor_public_key), state=Subtask.SubtaskState.ACCEPTED, task_to_compute=task_to_compute, report_computed_task=report_computed_task, next_deadline=None, ack_report_computed_task=ack_report_computed_task, reject_report_computed_task=reject_report_computed_task, force_get_task_result=force_get_task_result, subtask_results_rejected=subtask_results_rejected, ) Subtask.objects.get(subtask_id=task_to_compute.subtask_id).delete() except Exception: # pylint: disable=broad-except pytest.fail()
def test_that_upload_acknowledged_task_should_change_upload_finished_on_existing_related_verification_request_to_true_and_call_blender_verification_order_task( self): with freeze_time(): store_subtask( task_id=self.report_computed_task.task_to_compute. compute_task_def['task_id'], subtask_id=self.report_computed_task.task_to_compute. compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, ) with mock.patch('conductor.tasks.transaction.on_commit') as transaction_on_commit, \ mock.patch('conductor.tasks.filter_frames_by_blender_subtask_definition', return_value=[1]) as mock_frames_filtering: # noqa: E125 upload_acknowledged( subtask_id=self.report_computed_task.subtask_id, source_file_size=self.report_computed_task.task_to_compute. size, source_package_hash=self.report_computed_task. task_to_compute.package_hash, result_file_size=self.report_computed_task.size, result_package_hash=self.report_computed_task.package_hash, ) self.verification_request.refresh_from_db() self.assertTrue(self.verification_request.upload_acknowledged) transaction_on_commit.assert_called_once() mock_frames_filtering.assert_called_once()
def create_n_deposits_with_subtasks(self, n=1, amount=2): for _ in range(n): task_to_compute = self._get_deserialized_task_to_compute( provider_public_key=self._get_provider_hex_public_key(), requestor_public_key=self._get_requestor_hex_public_key(), price=amount, ) store_subtask( task_id=task_to_compute.task_id, subtask_id=task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ACCEPTED, next_deadline=None, task_to_compute=task_to_compute, report_computed_task=factories.tasks.ReportComputedTaskFactory( task_to_compute=task_to_compute)) deposit_claim = DepositClaim() deposit_claim.subtask_id = task_to_compute.subtask_id deposit_claim.payer_deposit_account = self.deposit_account deposit_claim.payee_ethereum_address = task_to_compute.provider_ethereum_address deposit_claim.concent_use_case = ConcentUseCase.FORCED_ACCEPTANCE deposit_claim.amount = amount deposit_claim.clean() deposit_claim.save() return deposit_claim, task_to_compute
def store_report_computed_task_as_subtask(report_computed_task): store_subtask( task_id=report_computed_task.task_to_compute.task_id, subtask_id=report_computed_task.task_to_compute.subtask_id, provider_public_key=hex_to_bytes_convert( report_computed_task.task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert( report_computed_task.task_to_compute.requestor_public_key), state=Subtask.SubtaskState.FORCING_REPORT, next_deadline=get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME, task_to_compute=report_computed_task.task_to_compute, report_computed_task=report_computed_task, )
def test_that_concent_responds_with_service_refused_when_verification_for_this_subtask_is_duplicated( self): """ Provider -> Concent: SubtaskResultsVerify Concent -> Provider: ServiceRefused (DuplicateRequest) """ # given (serialized_subtask_results_verify, subtask_results_verify_time_str ) = self._create_serialized_subtask_results_verify() store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=parse_iso_date_to_timestamp( subtask_results_verify_time_str) + (self.compute_task_def['deadline'] - self.task_to_compute.timestamp), task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, ) self._assert_stored_message_counter_increased(3) # when with mock.patch("core.message_handlers.bankster.claim_deposit", side_effect=self.claim_deposit_true_mock): with freeze_time(subtask_results_verify_time_str): response = self.send_request( url='core:send', data=serialized_subtask_results_verify, HTTP_CONCENT_CLIENT_PUBLIC_KEY=self. _get_encoded_provider_public_key(), HTTP_CONCENT_OTHER_PARTY_PUBLIC_KEY=self. _get_encoded_requestor_public_key(), ) # then self._test_response( response, status=200, key=self.PROVIDER_PRIVATE_KEY, message_type=message.concents.ServiceRefused, fields={ 'reason': message.concents.ServiceRefused.REASON.DuplicateRequest, }) self._assert_stored_message_counter_not_increased()
def test_that_update_subtask_passes_validations_from_passive_to_active_state( self): subtask = store_subtask( task_id=self.task_to_compute.compute_task_def['task_id'], subtask_id=self.task_to_compute.compute_task_def['subtask_id'], provider_public_key=self.provider_public_key, requestor_public_key=self.requestor_public_key, state=Subtask.SubtaskState.REPORTED, next_deadline=None, task_to_compute=self.task_to_compute, report_computed_task=self.report_computed_task, ) subtask_state = Subtask.objects.get( subtask_id=self.task_to_compute.compute_task_def['subtask_id'] ).state self.assertEqual(subtask_state, Subtask.SubtaskState.REPORTED.name) # pylint: disable=no-member self.assertEqual(subtask.next_deadline, None) next_deadline = int(self.task_to_compute.compute_task_def['deadline'] ) + settings.CONCENT_MESSAGING_TIME update_and_return_updated_subtask( subtask=subtask, state=Subtask.SubtaskState.FORCING_RESULT_TRANSFER, next_deadline=next_deadline, force_get_task_result=self.force_get_task_result, set_next_deadline=True, ) subtask_state = Subtask.objects.get( subtask_id=self.task_to_compute.compute_task_def['subtask_id'] ).state self.assertEqual(subtask_state, Subtask.SubtaskState.FORCING_RESULT_TRANSFER.name) # pylint: disable=no-member self.assertEqual(subtask.next_deadline, parse_timestamp_to_utc_datetime(next_deadline))
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.subtask = store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ACCEPTED, next_deadline=None, task_to_compute=self.task_to_compute, report_computed_task=factories.tasks.ReportComputedTaskFactory( task_to_compute=self.task_to_compute)) self.subtask.full_clean() self.subtask.save() self.deposit_account = DepositAccount() self.deposit_account.client = self.subtask.requestor self.deposit_account.ethereum_address = self.task_to_compute.requestor_ethereum_address self.deposit_account.clean() self.deposit_account.save() self.deposit_claim = DepositClaim() self.deposit_claim.subtask_id = self.task_to_compute.subtask_id self.deposit_claim.payer_deposit_account = self.deposit_account self.deposit_claim.payee_ethereum_address = self.task_to_compute.provider_ethereum_address self.deposit_claim.concent_use_case = ConcentUseCase.FORCED_ACCEPTANCE self.deposit_claim.amount = 2 self.deposit_claim.clean() self.deposit_claim.save()
def setUp(self): super().setUp() self.source_package_path = 'blender/source/ef0dc1/ef0dc1.zzz523.zip' self.result_package_path = 'blender/result/ef0dc1/ef0dc1.zzz523.zip' self.scene_file = 'blender/scene/ef0dc1/ef0dc1.zzz523.zip' self.compute_task_def = message.ComputeTaskDef() self.compute_task_def['task_id'] = 'ef0dc1' self.compute_task_def['subtask_id'] = 'zzz523' self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self._get_deserialized_task_to_compute( compute_task_def=self.compute_task_def)) store_subtask( task_id=self.compute_task_def['task_id'], subtask_id=self.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.REPORTED, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, next_deadline=None)
def store_report_computed_task_as_subtask(self, current_time, task_id, deadline, next_deadline, subtask_state): # pylint: disable=no-self-use subtask_id = task_id + '1' file_content = '1' file_size = len(file_content) file_check_sum = 'sha1:' + hashlib.sha1( file_content.encode()).hexdigest() task_to_compute = self._get_deserialized_task_to_compute( task_id=task_id, subtask_id=subtask_id, deadline=deadline, price=0, timestamp=parse_timestamp_to_utc_datetime(current_time), signer_private_key=self.REQUESTOR_PRIVATE_KEY) report_computed_task = ReportComputedTaskFactory( task_to_compute=task_to_compute, size=file_size, package_hash=file_check_sum, subtask_id=subtask_id, sign__privkey=self.PROVIDER_PRIVATE_KEY, ) force_get_task_result = ForceGetTaskResult( report_computed_task=report_computed_task, ) store_subtask( task_id=report_computed_task.task_to_compute.task_id, subtask_id=report_computed_task.task_to_compute.subtask_id, provider_public_key=hex_to_bytes_convert( report_computed_task.task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert( report_computed_task.task_to_compute.requestor_public_key), state=subtask_state, next_deadline=next_deadline, task_to_compute=report_computed_task.task_to_compute, report_computed_task=report_computed_task, force_get_task_result=force_get_task_result, )
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.compute_task_def = self.task_to_compute.compute_task_def self.source_package_path = get_storage_source_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute) store_subtask( task_id=self.compute_task_def['task_id'], subtask_id=self.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.REPORTED, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, next_deadline=None)
def test_that_storing_subtask_with_task_to_compute_nested_in_another_messages_will_raise_exception_when_it_is_different_from_original_task_to_compute( self, task_to_compute, report_computed_task, ack_report_computed_task, reject_report_computed_task, force_get_task_result, subtask_results_rejected, ): with pytest.raises(ValidationError): store_subtask( task_id=task_to_compute.task_id, subtask_id=task_to_compute.subtask_id, provider_public_key=hex_to_bytes_convert(task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert(task_to_compute.requestor_public_key), state=Subtask.SubtaskState.ACCEPTED, task_to_compute=task_to_compute, report_computed_task=report_computed_task, next_deadline=None, ack_report_computed_task=ack_report_computed_task, reject_report_computed_task=reject_report_computed_task, force_get_task_result=force_get_task_result, subtask_results_rejected=subtask_results_rejected, )
def test_that_incorrect_version_of_golem_messages_in_stored_message_should_raise_validation_error(self): with override_settings(GOLEM_MESSAGES_VERSION=self.second_communication_protocol_version): task_to_compute = tasks.TaskToComputeFactory() report_computed_task=tasks.ReportComputedTaskFactory(task_to_compute=task_to_compute) with self.assertRaises(ValidationError) as error: with mock.patch('core.message_handlers.store_message', side_effect=self.store_message_with_custom_protocol_version): store_subtask( task_id=task_to_compute.task_id, subtask_id=task_to_compute.subtask_id, task_to_compute=task_to_compute, provider_public_key=hex_to_bytes_convert(task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert(task_to_compute.requestor_public_key), report_computed_task=report_computed_task, state=Subtask.SubtaskState.REPORTED, next_deadline=None, ) self.assertIn( f"Unsupported Golem Message version. Version in: `task_to_compute` is {self.first_communication_protocol_version}, " f"Version in Concent is {self.second_communication_protocol_version}", str(error.exception) )
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.subtask = store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.task_to_compute, report_computed_task=self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute, ))
def test_that_messages_are_stored_with_correct_timestamps(self): subtask = store_subtask( task_id=self.task_to_compute.compute_task_def['task_id'], subtask_id=self.task_to_compute.compute_task_def['subtask_id'], provider_public_key=self.provider_public_key, requestor_public_key=self.requestor_public_key, state=Subtask.SubtaskState.FORCING_REPORT, next_deadline=int(self.task_to_compute.compute_task_def['deadline']) + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.task_to_compute, report_computed_task=self.report_computed_task, ) self.assertEqual( parse_iso_date_to_timestamp(subtask.task_to_compute.timestamp.isoformat()), parse_iso_date_to_timestamp(self.task_to_compute_timestamp) ) self.assertEqual( parse_iso_date_to_timestamp(subtask.report_computed_task.timestamp.isoformat()), parse_iso_date_to_timestamp(self.report_computed_task_timestamp) )
def test_that_result_upload_finished_should_raise_exception_for_subtask_with_failed_state( self): self.task_to_compute = self._get_deserialized_task_to_compute() self.subtask = store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.FAILED, next_deadline=None, task_to_compute=self.task_to_compute, report_computed_task=self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute, )) with mock.patch('core.tasks.logging.log') as log_info: result_upload_finished(self.subtask.subtask_id) # pylint: disable=no-value-for-parameter self.assertIn( 'result_upload_finished called for Subtask, but it has status FAILED', str(log_info.call_args))
def setUp(self): super().setUp() (self.PROVIDER_PRIVATE_KEY, self.PROVIDER_PUBLIC_KEY) = generate_ecc_key_pair() (self.REQUESTOR_PRIVATE_KEY, self.REQUESTOR_PUBLIC_KEY) = generate_ecc_key_pair() self.task_to_compute = TaskToComputeFactory() self.subtask = store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.task_to_compute, report_computed_task=ReportComputedTaskFactory( task_to_compute=self.task_to_compute, ))
def store_report_computed_task_as_subtask(): task_to_compute = factories.tasks.TaskToComputeFactory() report_computed_task = factories.tasks.ReportComputedTaskFactory(task_to_compute=task_to_compute) ack_report_computed_task = factories.tasks.AckReportComputedTaskFactory(report_computed_task=report_computed_task) force_get_task_result = factories.concents.ForceGetTaskResultFactory(report_computed_task=report_computed_task) subtask_results_rejected = factories.tasks.SubtaskResultsRejectedFactory(report_computed_task=report_computed_task) subtask = store_subtask( task_id=task_to_compute.task_id, subtask_id=task_to_compute.subtask_id, provider_public_key=hex_to_bytes_convert(task_to_compute.provider_public_key), requestor_public_key=hex_to_bytes_convert(task_to_compute.requestor_public_key), state=Subtask.SubtaskState.ACCEPTED, task_to_compute=task_to_compute, report_computed_task=report_computed_task, next_deadline=None, ack_report_computed_task=ack_report_computed_task, reject_report_computed_task=None, force_get_task_result=force_get_task_result, subtask_results_rejected=subtask_results_rejected, ) return subtask
def setUp(self) -> None: super().setUp() self.compute_task_def = self._get_deserialized_compute_task_def() self.task_to_compute = self._get_deserialized_task_to_compute( compute_task_def=self.compute_task_def, ) self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute, ) self.subtask = store_subtask( task_id=self.task_to_compute.compute_task_def['task_id'], subtask_id=self.task_to_compute.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.FORCING_REPORT, next_deadline=int( self.task_to_compute.compute_task_def['deadline']) + settings.CONCENT_MESSAGING_TIME, task_to_compute=self.task_to_compute, report_computed_task=self.report_computed_task, )
def test_that_concent_should_change_subtask_state_if_verification_is_after_deadline( self): """ Tests that Concent should change subtask state if verification is after deadline. To achieve changing state by working queue mechanism, a duplicated SubtaskResultsVerify is being sent. Provider -> Concent: SubtaskResultsVerify Concent -> Provider: SubtaskResultsSettled Concent -> Requestor: SubtaskResultsSettled """ with freeze_time("2018-04-01 10:30:00"): subtask = store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=get_current_utc_timestamp() + (self.compute_task_def['deadline'] - self.task_to_compute.timestamp), task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, ) self._assert_stored_message_counter_increased(3) subtask_results_rejected = self._get_deserialized_subtask_results_rejected( reason=message.tasks.SubtaskResultsRejected.REASON. VerificationNegative, report_computed_task=self.report_computed_task, ) with freeze_time( parse_timestamp_to_utc_datetime( parse_datetime_to_timestamp(subtask.next_deadline) + 1)): serialized_subtask_results_verify = self._get_serialized_subtask_results_verify( subtask_results_verify=self. _get_deserialized_subtask_results_verify( subtask_results_rejected=subtask_results_rejected)) response = self.send_request( url='core:send', data=serialized_subtask_results_verify, ) assert response.status_code == 200 subtask.refresh_from_db() self.assertEqual(subtask.state_enum, Subtask.SubtaskState.ACCEPTED) self.assertEqual(subtask.next_deadline, None) self._test_undelivered_pending_responses( subtask_id=subtask.subtask_id, client_public_key=self._get_encoded_provider_public_key(), client_public_key_out_of_band=self. _get_encoded_provider_public_key(), expected_pending_responses_receive_out_of_band=[ PendingResponse.ResponseType.SubtaskResultsSettled, ]) self._test_undelivered_pending_responses( subtask_id=subtask.subtask_id, client_public_key=self._get_encoded_requestor_public_key(), client_public_key_out_of_band=self. _get_encoded_requestor_public_key(), expected_pending_responses_receive_out_of_band=[ PendingResponse.ResponseType.SubtaskResultsSettled, ]) response_2 = self.send_request( url='core:receive', data=self._create_requestor_auth_message(), ) self._test_response( response_2, status=200, key=self.REQUESTOR_PRIVATE_KEY, message_type=message.concents.SubtaskResultsSettled, fields={ 'origin': message.concents.SubtaskResultsSettled.Origin.ResultsRejected, 'task_to_compute': self.report_computed_task.task_to_compute, }) response_3 = self.send_request( url='core:receive', data=self._create_provider_auth_message(), ) self._test_response( response_3, status=200, key=self.PROVIDER_PRIVATE_KEY, message_type=message.concents.SubtaskResultsSettled, fields={ 'origin': message.concents.SubtaskResultsSettled.Origin.ResultsRejected, 'task_to_compute': self.report_computed_task.task_to_compute, })
def test_in_soft_shutdown_mode_concent_should_accept_messages_that_would_cause_transition_to_passive_state( self): """ Tests if in soft shutdown mode Concent will accept new messages which update subtasks in passive state. It also checks if email to admins is sent when all subtasks are turned into passive states. """ compute_task_def = self._get_deserialized_compute_task_def( task_id='1', subtask_id='8', deadline="2017-12-01 11:00:00") task_to_compute = self._get_deserialized_task_to_compute( timestamp="2017-12-01 10:00:00", compute_task_def=compute_task_def, ) report_computed_task = self._get_deserialized_report_computed_task( timestamp="2017-12-01 10:59:00", task_to_compute=task_to_compute, ) with freeze_time("2017-12-01 11:00:00"): config.SOFT_SHUTDOWN_MODE = False store_subtask( task_id='1', subtask_id='8', provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.FORCING_REPORT, next_deadline=get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME, task_to_compute=task_to_compute, report_computed_task=report_computed_task, ) config.SOFT_SHUTDOWN_MODE = True self.stored_message_counter = 2 serialized_ack_report_computed_task = self._get_serialized_ack_report_computed_task( timestamp="2017-12-01 11:00:05", ack_report_computed_task=self. _get_deserialized_ack_report_computed_task( timestamp="2017-12-01 11:00:05", subtask_id='8', report_computed_task=report_computed_task), requestor_private_key=self.REQUESTOR_PRIVATE_KEY) with freeze_time("2017-12-01 11:00:05"): response = self.client.post( reverse('core:send'), data=serialized_ack_report_computed_task, content_type='application/octet-stream', HTTP_CONCENT_CLIENT_PUBLIC_KEY=self. _get_encoded_requestor_public_key(), ) self.assertEqual(response.status_code, 202) self.assertEqual(len(response.content), 0) self._assert_stored_message_counter_increased(increased_by=1) self._test_subtask_state( task_id='1', subtask_id='8', subtask_state=Subtask.SubtaskState.REPORTED, provider_key=self._get_encoded_provider_public_key(), requestor_key=self._get_encoded_requestor_public_key(), expected_nested_messages={ 'task_to_compute', 'report_computed_task', 'ack_report_computed_task' }, ) self._test_last_stored_messages(expected_messages=[ message.AckReportComputedTask, ], task_id='1', subtask_id='8', timestamp="2017-12-01 11:00:05") self._test_undelivered_pending_responses( subtask_id='8', client_public_key=self._get_encoded_provider_public_key(), expected_pending_responses_receive=[ PendingResponse.ResponseType.ForceReportComputedTaskResponse, ]) self.assertEqual(len(mail.outbox), len(settings.ADMINS))