def setUp(self): self.time = datetime.datetime.strptime("2017-11-17 10:00:00", "%Y-%m-%d %H:%M:%S") self.deadline = 10 self.authorized_client_public_key = b'7' * 64 self.report_computed_task = ReportComputedTaskFactory( task_to_compute__compute_task_def__deadline=parse_datetime_to_timestamp(self.time) + self.deadline )
def test_that_download_deadline_query_gives_correct_value_without_protocol_custom_times( self): with override_settings(CUSTOM_PROTOCOL_TIMES=False): assert_that( Subtask.objects_with_timing_columns.get( subtask_id=self.report_computed_task.task_to_compute. subtask_id).download_deadline ).is_equal_to( parse_datetime_to_timestamp( Subtask.objects_with_timing_columns.get( subtask_id=self.report_computed_task.task_to_compute. subtask_id).computation_deadline) + Subtask.objects_with_timing_columns.get( subtask_id=self.report_computed_task.task_to_compute. subtask_id).subtask_verification_time, )
def test_that_verification_result_after_deadline_should_add_pending_messages_subtask_results_settled_and_change_subtask_state_to_accepted(self): with freeze_time( parse_timestamp_to_utc_datetime( parse_datetime_to_timestamp(self.subtask.next_deadline) + 1 ) ): verification_result( # pylint: disable=no-value-for-parameter self.subtask.subtask_id, VerificationResult.MATCH.name, ) self.subtask.refresh_from_db() self.assertEqual(self.subtask.state_enum, Subtask.SubtaskState.ACCEPTED) self.assertEqual(self.subtask.next_deadline, None) self.assertEqual(PendingResponse.objects.count(), 2) self.assertTrue(PendingResponse.objects.filter(client=self.subtask.provider).exists()) self.assertTrue(PendingResponse.objects.filter(client=self.subtask.requestor).exists())
def test_that_scheduling_task_for_subtask_after_deadline_should_process_timeout(self): datetime = parse_timestamp_to_utc_datetime(get_current_utc_timestamp() + settings.CONCENT_MESSAGING_TIME + 1) with freeze_time(datetime): with mock.patch('core.tasks.payments_service.make_force_payment_to_provider', autospec=True) as payment_function_mock: upload_finished(self.subtask.subtask_id) # pylint: disable=no-value-for-parameter self.subtask.refresh_from_db() self.assertEqual(self.subtask.state_enum, Subtask.SubtaskState.FAILED) self.assertEqual(self.subtask.next_deadline, None) self.assertEqual(PendingResponse.objects.count(), 2) self.assertTrue(PendingResponse.objects.filter(client=self.subtask.provider).exists()) self.assertTrue(PendingResponse.objects.filter(client=self.subtask.requestor).exists()) payment_function_mock.assert_called_once_with( requestor_eth_address=self.task_to_compute.requestor_ethereum_address, provider_eth_address=self.task_to_compute.provider_ethereum_address, value=self.task_to_compute.price, payment_ts=parse_datetime_to_timestamp(datetime), )
def test_that_download_deadline_query_gives_correct_value( self, minimum_upload_rate, download_leadin_time, concent_messaging_time, custom_protocol_times): with override_settings( MINIMUM_UPLOAD_RATE=minimum_upload_rate, DOWNLOAD_LEADIN_TIME=download_leadin_time, CONCENT_MESSAGING_TIME=concent_messaging_time, CUSTOM_PROTOCOL_TIMES=custom_protocol_times, ): assert_that( Subtask.objects_with_timing_columns.get( subtask_id=self.report_computed_task.task_to_compute. subtask_id).download_deadline ).is_equal_to( parse_datetime_to_timestamp( Subtask.objects_with_timing_columns.get( subtask_id=self.report_computed_task.task_to_compute. subtask_id).computation_deadline) + Subtask.objects_with_timing_columns.get( subtask_id=self.report_computed_task.task_to_compute. subtask_id).subtask_verification_time, )
def call_blender_verification_order() -> None: blender_crop_script_parameters = verification_request.blender_subtask_definition.blender_crop_script_parameters blender_verification_order.delay( subtask_id=verification_request.subtask_id, source_package_path=verification_request.source_package_path, source_size=source_file_size, source_package_hash=source_package_hash, result_package_path=verification_request.result_package_path, result_size=result_file_size, result_package_hash=result_package_hash, output_format=verification_request.blender_subtask_definition. output_format, scene_file=verification_request.blender_subtask_definition. scene_file, verification_deadline=parse_datetime_to_timestamp( verification_request.verification_deadline), frames=frames, blender_crop_script_parameters= parse_blender_crop_script_parameters_to_dict_from_query( blender_crop_script_parameters), )
def test_that_scheduling_task_for_subtask_before_deadline_should_change_subtask_state_and_schedule_upload_acknowledged_task( self): with freeze_time( parse_timestamp_to_utc_datetime( parse_datetime_to_timestamp(self.subtask.next_deadline) - 1)): with mock.patch('core.tasks.tasks.upload_acknowledged.delay' ) as upload_acknowledged_delay_mock: upload_finished(self.subtask.subtask_id) # pylint: disable=no-value-for-parameter self.subtask.refresh_from_db() self.assertEqual(self.subtask.state_enum, Subtask.SubtaskState.ADDITIONAL_VERIFICATION) upload_acknowledged_delay_mock.assert_called_once_with( subtask_id=self.subtask.subtask_id, source_file_size=self.report_computed_task.task_to_compute.size, source_package_hash=self.report_computed_task.task_to_compute. package_hash, result_file_size=self.report_computed_task.size, result_package_hash=self.report_computed_task.package_hash, )
def test_that_concent_should_change_subtask_state_if_verification_is_after_deadline( self): """ Tests that Concent should change subtask state if verification is after deadline. To achieve changing state by working queue mechanism, a duplicated SubtaskResultsVerify is being sent. Provider -> Concent: SubtaskResultsVerify Concent -> Provider: SubtaskResultsSettled Concent -> Requestor: SubtaskResultsSettled """ with freeze_time("2018-04-01 10:30:00"): subtask = store_subtask( task_id=self.task_to_compute.task_id, subtask_id=self.task_to_compute.subtask_id, provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION, next_deadline=get_current_utc_timestamp() + (self.compute_task_def['deadline'] - self.task_to_compute.timestamp), task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, ) self._assert_stored_message_counter_increased(3) subtask_results_rejected = self._get_deserialized_subtask_results_rejected( reason=message.tasks.SubtaskResultsRejected.REASON. VerificationNegative, report_computed_task=self.report_computed_task, ) with freeze_time( parse_timestamp_to_utc_datetime( parse_datetime_to_timestamp(subtask.next_deadline) + 1)): serialized_subtask_results_verify = self._get_serialized_subtask_results_verify( subtask_results_verify=self. _get_deserialized_subtask_results_verify( subtask_results_rejected=subtask_results_rejected)) response = self.send_request( url='core:send', data=serialized_subtask_results_verify, ) assert response.status_code == 200 subtask.refresh_from_db() self.assertEqual(subtask.state_enum, Subtask.SubtaskState.ACCEPTED) self.assertEqual(subtask.next_deadline, None) self._test_undelivered_pending_responses( subtask_id=subtask.subtask_id, client_public_key=self._get_encoded_provider_public_key(), client_public_key_out_of_band=self. _get_encoded_provider_public_key(), expected_pending_responses_receive_out_of_band=[ PendingResponse.ResponseType.SubtaskResultsSettled, ]) self._test_undelivered_pending_responses( subtask_id=subtask.subtask_id, client_public_key=self._get_encoded_requestor_public_key(), client_public_key_out_of_band=self. _get_encoded_requestor_public_key(), expected_pending_responses_receive_out_of_band=[ PendingResponse.ResponseType.SubtaskResultsSettled, ]) response_2 = self.send_request( url='core:receive', data=self._create_requestor_auth_message(), ) self._test_response( response_2, status=200, key=self.REQUESTOR_PRIVATE_KEY, message_type=message.concents.SubtaskResultsSettled, fields={ 'origin': message.concents.SubtaskResultsSettled.Origin.ResultsRejected, 'task_to_compute': self.report_computed_task.task_to_compute, }) response_3 = self.send_request( url='core:receive', data=self._create_provider_auth_message(), ) self._test_response( response_3, status=200, key=self.PROVIDER_PRIVATE_KEY, message_type=message.concents.SubtaskResultsSettled, fields={ 'origin': message.concents.SubtaskResultsSettled.Origin.ResultsRejected, 'task_to_compute': self.report_computed_task.task_to_compute, })
from middleman_protocol.message import AuthenticationResponseFrame from middleman_protocol.message import ErrorFrame from middleman_protocol.message import GolemMessageFrame from middleman_protocol.message import HeartbeatFrame from middleman_protocol.registry import create_middleman_protocol_message from middleman_protocol.stream import append_frame_separator from middleman_protocol.stream import escape_encode_raw_message from middleman_protocol.tests.utils import async_stream_actor_mock from middleman_protocol.tests.utils import prepare_mocked_reader from middleman_protocol.tests.utils import prepare_mocked_writer (CONCENT_PRIVATE_KEY, CONCENT_PUBLIC_KEY) = generate_ecc_key_pair() (SIGNING_SERVICE_PRIVATE_KEY, SIGNING_SERVICE_PUBLIC_KEY) = generate_ecc_key_pair() (WRONG_SIGNING_SERVICE_PRIVATE_KEY, WRONG_SIGNING_SERVICE_PUBLIC_KEY) = generate_ecc_key_pair() FROZEN_DATE_AND_TIME = "2012-01-14 12:00:01" FROZEN_TIMESTAMP = parse_datetime_to_timestamp(datetime.datetime.strptime(FROZEN_DATE_AND_TIME, "%Y-%m-%d %H:%M:%S")) async def get_item(queue): await sleep(0.000001) item = await queue.get() queue.task_done() return item def _get_mocked_reader(message, request_id, sign_as, **kwargs): protocol_message = create_middleman_protocol_message(PayloadType.GOLEM_MESSAGE, message, request_id) data_to_send = append_frame_separator( escape_encode_raw_message( protocol_message.serialize(sign_as) )
def upload_finished(subtask_id: str) -> None: try: subtask = Subtask.objects.select_for_update().get(subtask_id=subtask_id) except Subtask.DoesNotExist: logging.log( logger, f'Task `upload_finished` tried to get Subtask object, but it does not exist.', subtask_id=subtask_id, logging_level=logging.LoggingLevel.ERROR, ) return report_computed_task = deserialize_message(subtask.report_computed_task.data.tobytes()) # Check subtask state, if it's VERIFICATION FILE TRANSFER, proceed with the task. if subtask.state_enum == Subtask.SubtaskState.VERIFICATION_FILE_TRANSFER: # If subtask is past the deadline, processes the timeout. if parse_datetime_to_timestamp(subtask.next_deadline) < get_current_utc_timestamp(): # Worker makes a payment from requestor's deposit just like in the forced acceptance use case. def finalize_claims() -> None: finalize_deposit_claim( subtask_id=subtask_id, concent_use_case=ConcentUseCase.ADDITIONAL_VERIFICATION, ethereum_address=report_computed_task.task_to_compute.requestor_ethereum_address, ) finalize_deposit_claim( subtask_id=subtask_id, concent_use_case=ConcentUseCase.ADDITIONAL_VERIFICATION, ethereum_address=report_computed_task.task_to_compute.provider_ethereum_address, ) transaction.on_commit( finalize_claims, using='control', ) update_subtask_state( subtask=subtask, state=Subtask.SubtaskState.FAILED.name, # pylint: disable=no-member ) # Worker adds SubtaskResultsSettled to provider's and requestor's receive queues (both out-of-band) for public_key in [subtask.provider.public_key_bytes, subtask.requestor.public_key_bytes]: store_pending_message( response_type=PendingResponse.ResponseType.SubtaskResultsSettled, client_public_key=public_key, queue=PendingResponse.Queue.ReceiveOutOfBand, subtask=subtask, ) return # Change subtask state to ADDITIONAL VERIFICATION. update_subtask_state( subtask=subtask, state=Subtask.SubtaskState.ADDITIONAL_VERIFICATION.name, # pylint: disable=no-member next_deadline=( parse_datetime_to_timestamp(subtask.next_deadline) + calculate_concent_verification_time(report_computed_task.task_to_compute) ) ) # Add upload_acknowledged task to the work queue. tasks.upload_acknowledged.delay( subtask_id=subtask_id, source_file_size=report_computed_task.task_to_compute.size, source_package_hash=report_computed_task.task_to_compute.package_hash, result_file_size=report_computed_task.size, result_package_hash=report_computed_task.package_hash, ) # If it's ADDITIONAL VERIFICATION, ACCEPTED or FAILED, log a warning and ignore the notification. # Processing ends here. This means that it's a duplicate notification. elif subtask.state_enum in [ Subtask.SubtaskState.ADDITIONAL_VERIFICATION, Subtask.SubtaskState.ACCEPTED, Subtask.SubtaskState.FAILED ]: logging.log( logger, f'Subtask is expected to be in `VERIFICATION_FILE_TRANSFER` state, but was in {subtask.state}.', subtask_id=subtask_id, logging_level=logging.LoggingLevel.WARNING, ) # If it's one of the states that can precede verification, report an error. Processing ends here. else: logging.log( logger, f'Subtask is expected to be in `VERIFICATION_FILE_TRANSFER` state, but was in {subtask.state}.', subtask_id=subtask_id, logging_level=logging.LoggingLevel.ERROR, )
def clean(self) -> None: super().clean() # Concent should not accept anything that cause a transition to an active state in soft shutdown mode. if config.SOFT_SHUTDOWN_MODE is True and self.state_enum in self.ACTIVE_STATES: raise ConcentInSoftShutdownMode # next_deadline must be datetime only for active states if (not self._state.adding and not isinstance(self.next_deadline, datetime.datetime) and self.state_enum in self.ACTIVE_STATES): raise ValidationError({ 'next_deadline': 'next_deadline must be datetime for active state.' }) # next_deadline must be None in passive states if not self._state.adding and self.next_deadline is not None and self.state_enum in self.PASSIVE_STATES: raise ValidationError({ 'next_deadline': 'next_deadline must be None for passive state.' }) # State transition can happen only by defined rule # but not when we create new object # and not when state is not being changed if (not self._state.adding and self._current_state_enum != self.state_enum and self._current_state_enum not in self.POSSIBLE_TRANSITIONS_TO[self.state_enum] # type: ignore ): raise ValidationError({ 'state': 'Subtask cannot change its state from {} to {}.'.format( self._current_state_name, self.state, ) }) else: self._current_state_name = self.state # Both ack_report_computed_task and reject_report_computed_task cannot set at the same time. if self.ack_report_computed_task is not None and self.reject_report_computed_task is not None: raise ValidationError( 'Both ack_report_computed_task and reject_report_computed_task cannot be set at the same time.' ) # Requestor and provider cannot be the same clients if self.requestor_id == self.provider_id: raise ValidationError( 'Requestor and provided are the same client.') # Check if all required related messages are not None in current state. for stored_message_name, states in Subtask.REQUIRED_RELATED_MESSAGES_IN_STATES.items( ): if self.state_enum in states and getattr( self, stored_message_name) is None: raise ValidationError({ stored_message_name: '{} cannot be None in state {}.'.format( stored_message_name, self.state, ) }) # Check if all related messages which must be None are None in current state. for stored_message_name, states in Subtask.UNSET_RELATED_MESSAGES_IN_STATES.items( ): if self.state_enum in states and getattr( self, stored_message_name) is not None: raise ValidationError({ stored_message_name: '{} must be None in state {}.'.format( stored_message_name, self.state, ) }) deserialized_report_computed_task = self._deserialize_database_message( self.report_computed_task) if not self.result_package_size == deserialized_report_computed_task.size: raise ValidationError( {'result_package_size': "ReportComputedTask size mismatch"}) deserialized_task_to_compute = self._deserialize_database_message( self.task_to_compute) if not parse_datetime_to_timestamp( self.computation_deadline ) == deserialized_task_to_compute.compute_task_def['deadline']: raise ValidationError( {'computation_deadline': "TaskToCompute deadline mismatch"}) for related_message_name in Subtask.MESSAGE_FOR_FIELD: related_message = getattr(self, related_message_name) assert isinstance(related_message, StoredMessage) or related_message is None if related_message is not None and related_message.protocol_version != self.protocol_version: raise ValidationError( f'Unsupported Golem Message version. Version in: `{related_message_name}` is {related_message.protocol_version}, ' f'Version in Concent is {self.protocol_version}')
def clean(self) -> None: super().clean() # Concent should not accept anything that cause a transition to an active state in soft shutdown mode. if config.SOFT_SHUTDOWN_MODE is True and self.state_enum in self.ACTIVE_STATES: raise ConcentInSoftShutdownMode # next_deadline must be datetime only for active states if (not self._state.adding and not isinstance(self.next_deadline, datetime.datetime) and self.state_enum in self.ACTIVE_STATES): raise ValidationError({ 'next_deadline': 'next_deadline must be datetime for active state.' }) # next_deadline must be None in passive states if not self._state.adding and self.next_deadline is not None and self.state_enum in self.PASSIVE_STATES: raise ValidationError({ 'next_deadline': 'next_deadline must be None for passive state.' }) # State transition can happen only by defined rule # but not when we create new object # and not when state is not being changed if (not self._state.adding and self._current_state_enum != self.state_enum and self._current_state_enum not in self.POSSIBLE_TRANSITIONS_TO[self.state_enum] # type: ignore ): raise ValidationError({ 'state': 'Subtask cannot change its state from {} to {}.'.format( self._current_state_name, self.state, ) }) else: self._current_state_name = self.state # Both ack_report_computed_task and reject_report_computed_task cannot set at the same time. if self.ack_report_computed_task is not None and self.reject_report_computed_task is not None: raise ValidationError( 'Both ack_report_computed_task and reject_report_computed_task cannot be set at the same time.' ) # Requestor and provider cannot be the same clients if self.requestor_id == self.provider_id: raise ValidationError( 'Requestor and provided are the same client.') # Check if all required related messages are not None in current state. for stored_message_name, states in Subtask.REQUIRED_RELATED_MESSAGES_IN_STATES.items( ): if self.state_enum in states and getattr( self, stored_message_name) is None: raise ValidationError({ stored_message_name: '{} cannot be None in state {}.'.format( stored_message_name, self.state, ) }) # Check if all related messages which must be None are None in current state. for stored_message_name, states in Subtask.UNSET_RELATED_MESSAGES_IN_STATES.items( ): if self.state_enum in states and getattr( self, stored_message_name) is not None: raise ValidationError({ stored_message_name: '{} must be None in state {}.'.format( stored_message_name, self.state, ) }) if isinstance(self.report_computed_task.data, bytes): deserialized_report_computed_task = deserialize_message( self.report_computed_task.data) else: deserialized_report_computed_task = deserialize_message( self.report_computed_task.data.tobytes()) # pylint: disable=no-member # If available, the report_computed_task nested in force_get_task_result must match report_computed_task. if (self.force_get_task_result is not None and deserialize_message( self.force_get_task_result.data).report_computed_task != deserialized_report_computed_task): raise ValidationError({ 'force_get_task_result': "ReportComputedTask nested in ForceGetTaskResult must match Subtask's ReportComputedTask." }) if not self.result_package_size == deserialized_report_computed_task.size: raise ValidationError( {'result_package_size': "ReportComputedTask size mismatch"}) if isinstance(self.task_to_compute.data, bytes): deserialized_task_to_compute = deserialize_message( self.task_to_compute.data) else: deserialized_task_to_compute = deserialize_message( self.task_to_compute.data.tobytes()) # pylint: disable=no-member if not parse_datetime_to_timestamp( self.computation_deadline ) == deserialized_task_to_compute.compute_task_def['deadline']: raise ValidationError( {'computation_deadline': "TaskToCompute deadline mismatch"}) # Validation for every nested message which is stored in Control database # Every nested message must be the same as message stored separately. MESSAGES_TO_VALIDATE_TASK_TO_COMPUTE = [ self.report_computed_task, self.subtask_results_accepted, self.reject_report_computed_task, ] MESSAGES_TO_VALIDATE_REPORT_COMPUTED_TASK = [ self.ack_report_computed_task, self.subtask_results_rejected, self.force_get_task_result, ] for task_to_compute_to_validate in MESSAGES_TO_VALIDATE_TASK_TO_COMPUTE: if task_to_compute_to_validate is not None: validate_database_task_to_compute( task_to_compute=deserialized_task_to_compute, message_to_compare=deserialize_database_message( task_to_compute_to_validate), ) for report_computed_task_to_validate in MESSAGES_TO_VALIDATE_REPORT_COMPUTED_TASK: if report_computed_task_to_validate is not None: validate_database_report_computed_task( report_computed_task=deserialized_report_computed_task, message_to_compare=deserialize_database_message( report_computed_task_to_validate), ) for related_message_name in Subtask.MESSAGE_FOR_FIELD: related_message = getattr(self, related_message_name) assert isinstance(related_message, StoredMessage) or related_message is None if related_message is not None and related_message.protocol_version != settings.GOLEM_MESSAGES_VERSION: raise ValidationError( f'Unsupported Golem Message version. Version in: `{related_message_name}` is {related_message.protocol_version}, ' f'Version in Concent is {settings.GOLEM_MESSAGES_VERSION}')
def test_parse_datetime_to_timestamp_should_return_correct_utc_timestamp( self): """ Tests if parse_datetime_to_timestamp function works as expected. """ timestamp = 946684800 # 2000-01-01 00:00 assert datetime.datetime.fromtimestamp(timestamp) == datetime.datetime( 2000, 1, 1, 0, 0) for date_time in [ datetime.datetime(2000, 1, 1, 0, 0), datetime.datetime(2000, 1, 1, 0, 0, tzinfo=timezone.pytz.timezone('UTC')), datetime.datetime(2000, 1, 1, 4, 37, tzinfo=timezone.pytz.timezone('Asia/Kabul')), datetime.datetime(1999, 12, 31, 19, 4, tzinfo=timezone.pytz.timezone('US/Eastern')), ]: self.assertEqual( parse_datetime_to_timestamp(date_time), timestamp, ) timestamp = 1321009860 # 2011-11-11 11:11 assert datetime.datetime.fromtimestamp(timestamp) == datetime.datetime( 2011, 11, 11, 11, 11) for date_time in [ datetime.datetime(2011, 11, 11, 11, 11), datetime.datetime(2011, 11, 11, 11, 11, tzinfo=timezone.pytz.timezone('UTC')), datetime.datetime(2011, 11, 11, 20, 30, tzinfo=timezone.pytz.timezone('Asia/Tokyo')), datetime.datetime(2011, 11, 11, 1, 11, tzinfo=timezone.pytz.timezone('US/Alaska')), ]: self.assertEqual( parse_datetime_to_timestamp(date_time), timestamp, )
def upload_acknowledged( subtask_id: str, source_file_size: str, source_package_hash: str, result_file_size: str, result_package_hash: str, ) -> None: log(logger, f'Upload acknowledgment starts.', f'Source_file_size {source_file_size}', f'Source_package_hash: {source_package_hash}', f'Result_file_size: {result_file_size}', f'Result_package_hash: {result_package_hash}', subtask_id=subtask_id) assert isinstance(subtask_id, str) try: verification_request = VerificationRequest.objects.select_for_update( ).get(subtask_id=subtask_id) except VerificationRequest.DoesNotExist: log( logger, f'Task `upload_acknowledged` tried to get VerificationRequest object with ID {subtask_id} but it does not exist.', subtask_id=subtask_id, logging_level=LoggingLevel.ERROR, ) return if verification_request.upload_acknowledged is True: log( logger, f'Task `upload_acknowledged` scheduled but VerificationRequest with with ID {subtask_id} is already acknowledged.', subtask_id=subtask_id, ) raise VerificationRequestAlreadyAcknowledgedError( f'Task `upload_acknowledged` scheduled but VerificationRequest with with ID {subtask_id} is already acknowledged.', ErrorCode.CONDUCTOR_VERIFICATION_REQUEST_ALREADY_ACKNOWLEDGED) else: verification_request.upload_acknowledged = True verification_request.full_clean() verification_request.save() frames = filter_frames_by_blender_subtask_definition( verification_request.blender_subtask_definition) blender_verification_order.delay( subtask_id=verification_request.subtask_id, source_package_path=verification_request.source_package_path, source_size=source_file_size, source_package_hash=source_package_hash, result_package_path=verification_request.result_package_path, result_size=result_file_size, result_package_hash=result_package_hash, output_format=verification_request.blender_subtask_definition. output_format, scene_file=verification_request.blender_subtask_definition.scene_file, verification_deadline=parse_datetime_to_timestamp( verification_request.verification_deadline), frames=frames, blender_crop_script=verification_request.blender_subtask_definition. blender_crop_script, ) log(logger, f'Upload acknowledgment finished.', f'Source_file_size {source_file_size}', f'Source_package_hash: {source_package_hash}', f'Result_file_size: {result_file_size}', f'Result_package_hash: {result_package_hash}', subtask_id=subtask_id)