def test_upload_should_return_401_if_specific_file_info_data(self): file = FileTransferToken.FileInfo( path=get_storage_result_file_path(task_id=self._get_uuid(), subtask_id=self._get_uuid()), size=1, checksum='sha1:356a192b7913b04c54574d18c28d46e6395428ab\n', ) self.upload_token.files = [file] golem_upload_token = dump(self.upload_token, settings.CONCENT_PRIVATE_KEY, settings.CONCENT_PUBLIC_KEY) encoded_token = b64encode(golem_upload_token).decode() response = self.client.post( '{}{}'.format( reverse('gatekeeper:upload'), get_storage_result_file_path( task_id=self._get_uuid(), subtask_id=self._get_uuid(), ), ), HTTP_AUTHORIZATION='Golem ' + encoded_token, HTTP_CONCENT_AUTH=self.header_concent_auth, content_type='application/x-www-form-urlencoded', HTTP_X_Golem_Messages=settings.GOLEM_MESSAGES_VERSION, ) self.assertIsInstance(response, JsonResponse) self.assertEqual(response.status_code, 401) self.assertIn('message', response.json().keys()) self.assertEqual("application/json", response["Content-Type"])
def create_file_transfer_token_for_verification_use_case( subtask_results_verify: message.concents.SubtaskResultsVerify, authorized_client_public_key: bytes, ) -> FileTransferToken: """ Due to a different token expiration deadline, this function (and not create_file_transfer_token_for_golem_client) should be used in verification UC. """ subtask_id = subtask_results_verify.subtask_id task_id = subtask_results_verify.task_id return _create_file_transfer_token( subtask_id=subtask_id, source_package_path=get_storage_source_file_path( subtask_id=subtask_id, task_id=task_id, ), source_size=subtask_results_verify.task_to_compute.size, source_package_hash=subtask_results_verify.task_to_compute. package_hash, result_package_path=get_storage_result_file_path( subtask_id=subtask_id, task_id=task_id, ), result_size=subtask_results_verify.subtask_results_rejected. report_computed_task.size, result_package_hash=subtask_results_verify.subtask_results_rejected. report_computed_task.package_hash, authorized_client_public_key=authorized_client_public_key, operation=FileTransferToken.Operation.upload, token_expiration_deadline= calculate_token_expiration_deadline_for_verification_case( subtask_results_verify))
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.compute_task_def = self.task_to_compute.compute_task_def self.blender_crop_script_parameters = dict( resolution=self.compute_task_def['extra_data']['resolution'], samples=self.compute_task_def['extra_data']['samples'], use_compositing=self.compute_task_def['extra_data'] ['use_compositing'], borders_x=self.compute_task_def['extra_data']['crops'][0] ['borders_x'], borders_y=self.compute_task_def['extra_data']['crops'][0] ['borders_y'], ) self.source_package_path = get_storage_source_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute) store_subtask( task_id=self.compute_task_def['task_id'], subtask_id=self.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.REPORTED, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, next_deadline=None)
def create_file_transfer_token_for_golem_client( report_computed_task: message.tasks.ReportComputedTask, authorized_client_public_key: bytes, operation: FileTransferToken.Operation, ) -> FileTransferToken: subtask_id = report_computed_task.task_to_compute.compute_task_def[ 'subtask_id'] task_id = report_computed_task.task_to_compute.compute_task_def['task_id'] return _create_file_transfer_token( subtask_id=subtask_id, source_package_path=get_storage_source_file_path( subtask_id=subtask_id, task_id=task_id, ), source_size=report_computed_task.task_to_compute.size, source_package_hash=report_computed_task.task_to_compute.package_hash, result_package_path=get_storage_result_file_path( subtask_id=subtask_id, task_id=task_id, ), result_size=report_computed_task.size, result_package_hash=report_computed_task.package_hash, authorized_client_public_key=authorized_client_public_key, operation=operation, token_expiration_deadline=calculate_token_expiration_deadline( operation, report_computed_task))
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.compute_task_def = self.task_to_compute.compute_task_def self.source_package_path = get_storage_source_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.report_computed_task = self._get_deserialized_report_computed_task(task_to_compute=self.task_to_compute) self.verification_request = VerificationRequest( subtask_id=self.report_computed_task.subtask_id, source_package_path=self.source_package_path, result_package_path=self.result_package_path, verification_deadline=self._get_verification_deadline_as_datetime( get_current_utc_timestamp(), self.report_computed_task.task_to_compute, ) ) self.verification_request.full_clean() self.verification_request.save() blender_subtask_definition = BlenderSubtaskDefinition( verification_request=self.verification_request, output_format=BlenderSubtaskDefinition.OutputFormat.JPG.name, # pylint: disable=no-member scene_file=self.compute_task_def['extra_data']['scene_file'], blender_crop_script=self.compute_task_def['extra_data']['script_src'], ) blender_subtask_definition.full_clean() blender_subtask_definition.save()
def send_blender_verification_request(compute_task_def: ComputeTaskDef, verification_deadline: int) -> None: task_id = compute_task_def['task_id'] subtask_id = compute_task_def['subtask_id'] source_package_path = get_storage_source_file_path( subtask_id=subtask_id, task_id=task_id, ) result_package_path = get_storage_result_file_path( subtask_id=subtask_id, task_id=task_id, ) output_format = compute_task_def['extra_data']['output_format'] scene_file = compute_task_def['extra_data']['scene_file'] frames = compute_task_def['extra_data']['frames'] blender_crop_script = compute_task_def['extra_data'].get('script_src') blender_verification_request.delay( subtask_id=subtask_id, source_package_path=source_package_path, result_package_path=result_package_path, output_format=output_format, scene_file=scene_file, verification_deadline=verification_deadline, frames=frames, blender_crop_script=blender_crop_script, )
def send_blender_verification_request(compute_task_def: ComputeTaskDef, verification_deadline: int) -> None: task_id = compute_task_def['task_id'] subtask_id = compute_task_def['subtask_id'] source_package_path = get_storage_source_file_path( subtask_id=subtask_id, task_id=task_id, ) result_package_path = get_storage_result_file_path( subtask_id=subtask_id, task_id=task_id, ) output_format = compute_task_def['extra_data']['output_format'] scene_file_path = compute_task_def['extra_data']['scene_file'] frames = compute_task_def['extra_data']['frames'] blender_crop_script = compute_task_def['extra_data'].get('script_src') # Verifier needs to get scene_file path without golem's resource directory prefix # Function below cuts off beginning prefix and pass to `blender_verification_request` scene_file without it. scene_file = extract_name_from_scene_file_path(scene_file_path) assert scene_file is not None blender_verification_request.delay( subtask_id=subtask_id, source_package_path=source_package_path, result_package_path=result_package_path, output_format=output_format, scene_file=scene_file, verification_deadline=verification_deadline, frames=frames, blender_crop_script=blender_crop_script, )
def setUp(self): super().setUp() self.subtask_result_rejected_time_str = "2018-04-01 10:30:00" self.report_computed_task = self._create_report_computed_task() self.source_package_path = get_storage_source_file_path( subtask_id=self.task_to_compute.subtask_id, task_id=self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( subtask_id=self.task_to_compute.subtask_id, task_id=self.task_to_compute.task_id, )
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.compute_task_def = self.task_to_compute.compute_task_def self.blender_crop_script_parameters = extract_blender_parameters_from_compute_task_def( self.compute_task_def['extra_data']) self.source_package_path = get_storage_source_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.frames = [1] self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute) self.subtask_id = self.compute_task_def['subtask_id'] self.scene_file = self.compute_task_def['extra_data']['scene_file'] self.output_format = self.compute_task_def['extra_data'][ 'output_format'] self.mock_image1 = mock.create_autospec(spec=ndarray, spec_set=True) self.mock_image2 = mock.create_autospec(spec=ndarray, spec_set=True) self.loaded_images = (self.mock_image1, self.mock_image2) self.mock_verification_result = mock.create_autospec( spec=verification_result, spec_set=True) self.parsed_all_files = { 1: [ '/tmp/result_0001.png', '/tmp/out_scene-Helicopter-27-internal.blend_0001.png' ] } self.blender_output_file_name_list = [ '/tmp/out_scene-Helicopter-27-internal.blend_0001.png', ] self.parsed_multi_frames_files = { 1: [ '/tmp/result_0001.png', '/tmp/out_scene-Helicopter-27-internal.blend_0001.png' ], 2: [ '/tmp/result_0002.png', '/tmp/out_scene-Helicopter-27-internal.blend_0002.png' ] } self.multi_frames_blender_output_file_name_list = [ '/tmp/out_scene-Helicopter-27-internal.blend_0001.png', '/tmp/out_scene-Helicopter-27-internal.blend_0002.png' ] self.multi_frames = [1, 2]
def request_upload_status( report_computed_task: message.ReportComputedTask) -> bool: slash = '/' assert settings.STORAGE_CLUSTER_ADDRESS.endswith(slash) file_transfer_token = create_file_transfer_token_for_concent( subtask_id=report_computed_task.subtask_id, result_package_path=get_storage_result_file_path( subtask_id=report_computed_task.subtask_id, task_id=report_computed_task.task_id, ), result_size=report_computed_task.size, result_package_hash=report_computed_task.package_hash, operation=FileTransferToken.Operation.download) assert len(file_transfer_token.files) == 1 assert not file_transfer_token.files[0]['path'].startswith(slash) file_transfer_token.sig = None dumped_file_transfer_token = shortcuts.dump(file_transfer_token, settings.CONCENT_PRIVATE_KEY, settings.CONCENT_PUBLIC_KEY) headers = { 'Authorization': 'Golem ' + b64encode(dumped_file_transfer_token).decode(), 'Concent-Auth': b64encode( shortcuts.dump( message.concents.ClientAuthorization( client_public_key=settings.CONCENT_PUBLIC_KEY, ), settings.CONCENT_PRIVATE_KEY, settings.CONCENT_PUBLIC_KEY), ).decode(), } request_http_address = settings.STORAGE_CLUSTER_ADDRESS + CLUSTER_DOWNLOAD_PATH + file_transfer_token.files[ 0]['path'] storage_cluster_response = send_request_to_storage_cluster( headers, request_http_address) if storage_cluster_response.status_code == 200: return True elif storage_cluster_response.status_code == 404: return False else: raise exceptions.UnexpectedResponse( f'Cluster storage returned HTTP {storage_cluster_response.status_code}' )
def test_that_file_transfer_token_for_concent_is_never_out_of_date(self): report_computed_task = ReportComputedTaskFactory() token = create_file_transfer_token_for_concent( subtask_id=report_computed_task.subtask_id, source_package_path=get_storage_source_file_path( subtask_id=report_computed_task.subtask_id, task_id=report_computed_task.task_id, ), source_size=report_computed_task.task_to_compute.size, source_package_hash=report_computed_task.task_to_compute.package_hash, result_package_path=get_storage_result_file_path( subtask_id=report_computed_task.subtask_id, task_id=report_computed_task.task_id, ), result_size=report_computed_task.size, result_package_hash=report_computed_task.package_hash, operation=FileTransferToken.Operation.download, ) self.assertTrue(token.timestamp < token.token_expiration_deadline)
def test_upload_should_return_401_if_previously_causing_500_error_authorization_header_is_used( self): response = self.client.post( '{}{}'.format( reverse('gatekeeper:upload'), get_storage_result_file_path( task_id=1, subtask_id=2, ), ), HTTP_AUTHORIZATION= 'Golem D6UAAAAAWoG9YgGsJib/zgj2cAHGXunyxI7t2NYnHKPvrdzVkdT/B58TpQHpdfonuWy8sWq9nrpc9+/1nUTm8O9szLOrFrCPKL7hAQRWLO4JCR6cVGILFbqRKX6abR1AKMLqRUa/ucH5t0YrLe/OPEp6+2swgbRgcnu0dlvfaupn9bwRPZhjVc2hJlDlkz+7aRx+NDEFWQeRHt3q7b8vA0xd/UUvPGudSnzGR6DaM1+Ji4PifQ7AUdYkQHmRNP4yZH+xjCq706J8mftrySj2geoP+TLKZFgpqHhng5I9v0xKpjOnZk9MRTWkzPyxIMwl535ZVLte0J5VRIIaZFEyYFRXgZGVyGinnEIfXZKZdUdRpRELUBK086A/w4aG3shpEPXEzfo42hjdrDEfyx5bZTANyrGwj1hTLKPoVaPMN9wb3MdQ1D1B5Os3+5YdfASnQRZfZmaEJqNAHNlZveLHpA2DcPFNvltcwUy3Jj1gTI43IbbuXNsIXhMKgNaZrNgJKKpQpc+qF9D7CwfugtiD6y/g71UrrUgvVIcZ9UXVTu5OJg2agGiaIvRWrGxfhyzv/HyHR530p7fNTt/dJBCDO55Mx3uhxA/XGYxmz2uk/xIQMR8QU7Cc/tOdvzdHJ+WHhNBo2fe5oLk03AXIhpqOOgJb8nnM', HTTP_CONCENT_AUTH=self.header_concent_auth, content_type='application/x-www-form-urlencoded', ) self.assertIsInstance(response, JsonResponse) self.assertEqual(response.status_code, 401) self.assertIn('message', response.json().keys()) self.assertEqual("application/json", response["Content-Type"])
def setUp(self): super().setUp() self.task_to_compute = self._get_deserialized_task_to_compute() self.compute_task_def = self.task_to_compute.compute_task_def self.source_package_path = get_storage_source_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.result_package_path = get_storage_result_file_path( self.task_to_compute.subtask_id, self.task_to_compute.task_id, ) self.report_computed_task = self._get_deserialized_report_computed_task( task_to_compute=self.task_to_compute) store_subtask( task_id=self.compute_task_def['task_id'], subtask_id=self.compute_task_def['subtask_id'], provider_public_key=self.PROVIDER_PUBLIC_KEY, requestor_public_key=self.REQUESTOR_PUBLIC_KEY, state=Subtask.SubtaskState.REPORTED, task_to_compute=self.report_computed_task.task_to_compute, report_computed_task=self.report_computed_task, next_deadline=None)
def test_case_1_test_for_existing_file(cluster_consts: ProtocolConstants, cluster_url: str) -> None: current_time = get_current_utc_timestamp() file_content = 'test' file_size = len(file_content) file_check_sum = 'sha1:' + hashlib.sha1(file_content.encode()).hexdigest() force_get_task_result = get_force_get_task_result( current_time, size=file_size, package_hash=file_check_sum, ) file_path = get_storage_result_file_path( task_id=force_get_task_result.task_id, subtask_id=force_get_task_result.subtask_id, ) # Case 1 - test for existing file api_request( cluster_url, 'send', REQUESTOR_PRIVATE_KEY, CONCENT_PUBLIC_KEY, force_get_task_result, expected_status=200, expected_message_type=message.concents.AckForceGetTaskResult, expected_content_type='application/octet-stream', ) force_get_task_result_upload = api_request( cluster_url, 'receive', PROVIDER_PRIVATE_KEY, CONCENT_PUBLIC_KEY, create_client_auth_message(PROVIDER_PRIVATE_KEY, PROVIDER_PUBLIC_KEY, CONCENT_PUBLIC_KEY), expected_status=200, expected_message_type=message.concents.ForceGetTaskResultUpload, expected_content_type='application/octet-stream', ) response = upload_file_to_storage_cluster( file_content, file_path, force_get_task_result_upload.file_transfer_token, # type: ignore PROVIDER_PRIVATE_KEY, PROVIDER_PUBLIC_KEY, CONCENT_PUBLIC_KEY, STORAGE_CLUSTER_ADDRESS, ) assert_condition(response.status_code, 200, 'File has not been stored on cluster') print('\nUploaded file with task_id {}. Checksum of this file is {}, and size of this file is {}.\n'.format( force_get_task_result.task_id, file_check_sum, file_size )) time.sleep(0.5) api_request( cluster_url, 'receive', REQUESTOR_PRIVATE_KEY, CONCENT_PUBLIC_KEY, create_client_auth_message(REQUESTOR_PRIVATE_KEY, REQUESTOR_PUBLIC_KEY, CONCENT_PUBLIC_KEY), expected_status=200, expected_message_type=message.concents.ForceGetTaskResultDownload, expected_content_type='application/octet-stream', )