def test_get_error_500_401_token_invalid(self): """ Return a 500 if the BaseResource._transfer_resource method running on the server gets a 401 error because the token is invalid. """ self.headers['HTTP_PRESQT_DESTINATION_TOKEN'] = 'bad_token' self.client.post(self.url, { "source_target_name": "github", "source_resource_id": self.resource_id, "keywords": []}, **self.headers, format='json') self.ticket_number = '{}_{}'.format(hash_tokens( self.source_token), hash_tokens('bad_token')) process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(self.ticket_number) process_info = read_file(process_info_path, True) url = reverse('job_status', kwargs={'action': 'transfer'}) while process_info['resource_transfer_in']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass response = self.client.get(url, **self.headers) self.assertEqual(response.status_code, 500) self.assertEqual(response.data['message'], "Token is invalid. Response returned a 401 status code.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_error_500_404_bad_project_id(self): """ Return a 500 if an invalid resource_id is provided. """ url = reverse('resource', kwargs={ 'target_name': self.target_name, 'resource_id': 'bad', 'resource_format': 'zip' }) response = self.client.get(url, **self.header) download_url = response.data['download_job_zip'] process_info_path = 'mediafiles/jobs/{}/process_info.json'.format( hash_tokens(self.token)) process_info = read_file(process_info_path, True) while process_info['resource_download']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass download_response = self.client.get(download_url, **self.header) # The endpoint lumps all errors into a 500 status code self.assertEqual(download_response.status_code, 500) self.assertEqual(download_response.data['status_code'], 404) self.assertEqual( download_response.data['message'], "The resource could not be found by the requesting user.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(hash_tokens(self.token)))
def transfer_patch(self): """ Attempt to cancel a transfer job. """ # Perform token validation. Read data from the process_info file. try: destination_token = get_destination_token(self.request) source_token = get_source_token(self.request) self.ticket_number = '{}_{}'.format(hash_tokens(source_token), hash_tokens(destination_token)) process_data = get_process_info_data(self.ticket_number) except PresQTValidationError as e: return Response(data={'error': e.data}, status=e.status_code) # Wait until the spawned off process has started to cancel the transfer while process_data['resource_transfer_in'][ 'function_process_id'] is None: try: process_data = get_process_info_data(self.ticket_number) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass transfer_process_data = process_data['resource_transfer_in'] # If transfer is still in progress then cancel the subprocess if transfer_process_data['status'] == 'in_progress': for process in multiprocessing.active_children(): if process.pid == transfer_process_data['function_process_id']: process.kill() process.join() transfer_process_data['status'] = 'failed' transfer_process_data[ 'message'] = 'Transfer was cancelled by the user' transfer_process_data['status_code'] = '499' transfer_process_data['expiration'] = str(timezone.now() + relativedelta( hours=1)) update_or_create_process_info(transfer_process_data, 'resource_transfer_in', self.ticket_number) return Response(data={ 'status_code': transfer_process_data['status_code'], 'message': transfer_process_data['message'] }, status=status.HTTP_200_OK) # If transfer is finished then don't attempt to cancel subprocess else: return Response(data={ 'status_code': transfer_process_data['status_code'], 'message': transfer_process_data['message'] }, status=status.HTTP_406_NOT_ACCEPTABLE)
def setUp(self): self.client = APIClient() self.destination_token = OSF_UPLOAD_TEST_USER_TOKEN self.source_token = GITHUB_TEST_USER_TOKEN self.ticket_number = '{}_{}'.format(hash_tokens( self.source_token), hash_tokens(self.destination_token)) self.headers = {'HTTP_PRESQT_DESTINATION_TOKEN': self.destination_token, 'HTTP_PRESQT_SOURCE_TOKEN': self.source_token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_KEYWORD_ACTION': 'manual', 'HTTP_PRESQT_EMAIL_OPT_IN': ''} self.resource_id = '209373660' self.url = reverse('resource_collection', kwargs={'target_name': 'osf'})
def test_call_transfer_success_finite_depth(self): """ Make a POST request to `resource` to begin transferring a resource. """ self.url = reverse('resource_collection', kwargs={'target_name': 'zenodo'}) self.headers = {'HTTP_PRESQT_DESTINATION_TOKEN': ZENODO_TEST_USER_TOKEN, 'HTTP_PRESQT_SOURCE_TOKEN': self.source_token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_KEYWORD_ACTION': 'automatic', 'HTTP_PRESQT_EMAIL_OPT_IN': '*****@*****.**', 'HTTP_PRESQT_FAIRSHARE_EVALUATOR_OPT_IN': 'no'} self.ticket_number = "{}_{}".format(hash_tokens( self.source_token), hash_tokens(ZENODO_TEST_USER_TOKEN)) response = self.client.post(self.url, {"source_target_name": "github", "source_resource_id": self.resource_id, "keywords": []}, **self.headers, format='json') self.process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(self.ticket_number) self.transfer_job = response.data['transfer_job'] process_info = read_file(self.process_info_path, True) response = self.client.get(self.transfer_job, **self.headers) self.assertEqual(response.data['message'], 'Transfer is being processed on the server') # Wait until the spawned off process finishes in the background while process_info['resource_transfer_in']['status'] == 'in_progress': try: process_info = read_file(self.process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass # Check that transfer was successful response = self.client.get(self.transfer_job, **self.headers) self.assertEqual(response.data['status_code'], '200') # Fixity errors because we're dealing with GitHub self.assertEqual(response.data['message'], 'Transfer successful but with fixity errors.') # Ensure we did not run the 12 FAIRshare tests self.assertEqual(response.data['fairshare_evaluation_results'], []) test_user_projects = requests.get('https://zenodo.org/api/deposit/depositions', params={'access_token': ZENODO_TEST_USER_TOKEN}).json() for project in test_user_projects: if project['title'] == 'ProjectTwelve': requests.delete(project['links']['self'], params={ 'access_token': ZENODO_TEST_USER_TOKEN}) # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_error_500_401(self): """ Return a 500 if an invalid token is provided. """ url = reverse('resource', kwargs={'target_name': self.target_name, 'resource_id': '209373160', 'resource_format': 'zip'}) response = self.client.get( url, **{'HTTP_PRESQT_SOURCE_TOKEN': 'eggs', 'HTTP_PRESQT_EMAIL_OPT_IN': ''}) ticket_number = hash_tokens('eggs') download_url = response.data['download_job_zip'] process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number) process_info = read_file(process_info_path, True) while process_info['resource_download']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass download_response = self.client.get(download_url, **{'HTTP_PRESQT_SOURCE_TOKEN': 'eggs'}) # The endpoint lumps all errors into a 500 status code self.assertEqual(download_response.status_code, 500) self.assertEqual(download_response.data['status_code'], 401) self.assertEqual(download_response.data['message'], "Token is invalid. Response returned a 401 status code.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))
def test_get_error_404_osf(self): """ Return a 404 if the ticket_number provided is not a valid ticket number. """ self.url = reverse('resource_collection', kwargs={'target_name': 'osf'}) self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip' self.call_upload_resources() headers = { 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_DESTINATION_TOKEN': 'bad_ticket', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } url = reverse('job_status', kwargs={'action': 'upload'}) response = self.client.get(url, **headers) # Verify the status code and content self.assertEqual(response.status_code, 404) self.assertEqual( response.data['error'], "PresQT Error: Invalid ticket number, '{}'.".format( hash_tokens('bad_ticket'))) # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_success_download_private_project(self): """ Return a 200 along with a zip file of the private project requested. """ resource_id = '83375' shared_call_get_resource_zip(self, resource_id) url = reverse('job_status', kwargs={ 'action': 'download', 'response_format': 'zip' }) response = self.client.get(url, **self.header) # Verify the status code self.assertEqual(response.status_code, 200) zip_file = zipfile.ZipFile(io.BytesIO(response.content)) # Verify the name of the zip file self.assertEquals( response._headers['content-disposition'][1], 'attachment; filename={}_download_{}.zip'.format( self.target_name, resource_id)) # Verify content type self.assertEqual(response._headers['content-type'][1], 'application/zip') # Verify the number of resources in the zip is correct self.assertEqual(len(zip_file.namelist()), 15) # Verify the fixity file is empty as there was nothing to check. with zip_file.open('figshare_download_{}/fixity_info.json'.format( resource_id)) as fixityfile: zip_json = json.load(fixityfile) self.assertEqual(len(zip_json), 1) with zip_file.open( 'figshare_download_{}/PRESQT_FTS_METADATA.json'.format( resource_id)) as metadatafile: metadata = json.load(metadatafile) # Make sure the results of extra are what we expect self.assertEqual(metadata['extra_metadata']['description'], "This is actually just eggs.") self.assertEqual(metadata['extra_metadata']['title'], 'Hello World') self.assertEqual(metadata['extra_metadata']['creators'], [{ 'first_name': 'Prometheus', 'last_name': 'Test', 'ORCID': None }]) file_path = "{}_download_{}/data/Hello World/Ecoute/ecoute.png".format( self.target_name, resource_id) # Verify that the folder exists self.assertIn(file_path, zip_file.namelist()) count_of_file_references = zip_file.namelist().count(file_path) self.assertEqual(count_of_file_references, 1) # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(hash_tokens(self.token)))
def setUp(self): self.client = APIClient() self.header = {'HTTP_PRESQT_SOURCE_TOKEN': OSF_TEST_USER_TOKEN, 'HTTP_PRESQT_EMAIL_OPT_IN': ''} self.resource_id = 'cmn5z' self.target_name = 'osf' self.ticket_number = hash_tokens(OSF_TEST_USER_TOKEN) self.token = OSF_TEST_USER_TOKEN
def test_failed_upload_to_existing_project(self): # Mock a server error for when a put request is made. class MockResponse: def __init__(self, json_data, status_code): self.json_data = json_data self.status_code = status_code mock_req = MockResponse({'error': 'The server is down.'}, 500) # 202 when uploading a new top level repo sleep(30) shared_upload_function_gitlab(self) # Verify the new repo exists on the PresQT Resource Collection endpoint. url = reverse('resource_collection', kwargs={'target_name': 'gitlab'}) project_id = self.client.get( url, **{'HTTP_PRESQT_SOURCE_TOKEN': GITLAB_UPLOAD_TEST_USER_TOKEN}).json()['resources'][0]['id'] self.resource_id = project_id shutil.rmtree(self.ticket_path) # Now I'll make an explicit call to our metadata function with a mocked server error and ensure # it is raising an exception. with patch('requests.post') as mock_request: mock_request.return_value = mock_req # Upload to the newly created project self.url = reverse('resource', kwargs={ 'target_name': 'gitlab', 'resource_id': project_id}) self.headers['HTTP_PRESQT_FILE_DUPLICATE_ACTION'] = self.duplicate_action response = self.client.post(self.url, {'presqt-file': open( self.file, 'rb')}, **self.headers) ticket_number = hash_tokens(self.token) self.ticket_path = 'mediafiles/uploads/{}'.format(ticket_number) # Verify status code and message self.assertEqual(response.status_code, 202) self.assertEqual( response.data['message'], 'The server is processing the request.') # Verify process_info file status is 'in_progress' initially process_info = read_file('{}/process_info.json'.format(self.ticket_path), True) self.assertEqual(process_info['status'], 'in_progress') # Wait until the spawned off process finishes in the background to do further validation process_wait(process_info, self.ticket_path) # Verify process_info.json file data process_info = read_file('{}/process_info.json'.format(self.ticket_path), True) self.assertEqual(process_info['status'], 'failed') self.assertEqual(process_info['message'], "Upload failed with a status code of 500") self.assertEqual(process_info['status_code'], 400) # Delete upload folder shutil.rmtree(self.ticket_path) # Delete GitLab Project delete_gitlab_project(project_id, GITLAB_UPLOAD_TEST_USER_TOKEN)
def test_error_500_404(self): """ Return a 500 if an invalid resource_id is provided. """ # First we will check an invalid project id url = reverse('resource', kwargs={'target_name': self.target_name, 'resource_id': '8219237', 'resource_format': 'zip'}) response = self.client.get(url, **self.header) ticket_number = hash_tokens(self.token) download_url = response.data['download_job_zip'] process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number) process_info = read_file(process_info_path, True) while process_info['resource_download']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass download_response = self.client.get(download_url, **self.header) # The endpoint lumps all errors into a 500 status code self.assertEqual(download_response.status_code, 500) self.assertEqual(download_response.data['status_code'], 404) self.assertEqual(download_response.data['message'], "The resource with id, 8219237, does not exist for this user.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number)) # Now we will check an invalid file id url = reverse('resource', kwargs={'target_name': self.target_name, 'resource_id': '127eqdid-WQD2EQDWS-dw234dwd8', 'resource_format': 'zip'}) response = self.client.get(url, **self.header) download_url = response.data['download_job_zip'] process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(ticket_number) process_info = read_file(process_info_path, True) while process_info['resource_download']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass download_response = self.client.get(download_url, **self.header) # The endpoint lumps all errors into a 500 status code self.assertEqual(download_response.status_code, 500) self.assertEqual(download_response.data['status_code'], 404) self.assertEqual(download_response.data['message'], "The resource with id, 127eqdid-WQD2EQDWS-dw234dwd8, does not exist for this user.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))
def setUp(self): self.client = APIClient() self.token = OSF_UPLOAD_TEST_USER_TOKEN self.headers = { 'HTTP_PRESQT_DESTINATION_TOKEN': self.token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } self.ticket_number = hash_tokens(OSF_UPLOAD_TEST_USER_TOKEN)
def test_error_404(self): """ Return a 404 if the ticket_number provided is not a valid ticket number. """ header = {'HTTP_PRESQT_SOURCE_TOKEN': 'bad_token'} url = reverse('job_status', kwargs={'action': 'download'}) response = self.client.get(url, **header) # Verify the status code and content self.assertEqual(response.status_code, 404) self.assertEqual(response.data['error'], "PresQT Error: Invalid ticket number, '{}'.".format( hash_tokens('bad_token')))
def setUp(self): self.client = APIClient() self.header = {'HTTP_PRESQT_SOURCE_TOKEN': OSF_TEST_USER_TOKEN, 'HTTP_PRESQT_EMAIL_OPT_IN': ''} self.resource_id = '5cd98510f244ec001fe5632f' self.target_name = 'osf' self.hashes = { "sha256": "3e517cda95ddbfcb270ab273201517f5ae0ee1190a9c5f6f7e6662f97868366f", "md5": "9e79fdd9032629743fca52634ecdfd86"} self.ticket_number = hash_tokens(OSF_TEST_USER_TOKEN) self.token = OSF_TEST_USER_TOKEN
def setUp(self): self.client = APIClient() self.token = OSF_UPLOAD_TEST_USER_TOKEN self.headers = { 'HTTP_PRESQT_DESTINATION_TOKEN': self.token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } self.upload_url = reverse('resource_collection', kwargs={'target_name': 'osf'}) self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip' self.ticket_number = hash_tokens(self.token)
def setUp(self): self.client = APIClient() self.token = FIGSHARE_TEST_USER_TOKEN self.ticket_number = hash_tokens(self.token) self.headers = { 'HTTP_PRESQT_DESTINATION_TOKEN': self.token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } self.good_zip_file = 'presqt/api_v1/tests/resources/upload/GoodBagIt.zip' self.resource_id = None self.duplicate_action = 'ignore' self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip'
def get(self, request, ticket_number): """ Check in on the resource's transfer process state. Parameters ---------- ticket_number : str The ticket number of the transfer being prepared. Returns ------- 200: OK """ # Perform token validation. Read data from the process_info file. try: destination_token = get_destination_token(request) source_token = get_source_token(request) process_data = get_process_info_data('transfers', ticket_number) process_token_validation(hash_tokens(destination_token), process_data, 'presqt-destination-token') process_token_validation(hash_tokens(source_token), process_data, 'presqt-source-token') except PresQTValidationError as e: return Response(data={'error': e.data}, status=e.status_code) transfer_status = process_data['status'] data = {'status_code': process_data['status_code'], 'message': process_data['message']} if transfer_status == 'finished': http_status = status.HTTP_200_OK data['failed_fixity'] = process_data['failed_fixity'] data['resources_ignored'] = process_data['resources_ignored'] data['resources_updated'] = process_data['resources_updated'] else: if transfer_status == 'in_progress': http_status = status.HTTP_202_ACCEPTED else: http_status = status.HTTP_500_INTERNAL_SERVER_ERROR return Response(status=http_status, data=data)
def upload_get(self): """ Get the status of an upload job. """ # Perform token validation. Read data from the process_info file. try: destination_token = get_destination_token(self.request) self.ticket_number = hash_tokens(destination_token) self.process_data = get_process_info_data(self.ticket_number) except PresQTValidationError as e: return Response(data={'error': e.data}, status=e.status_code) try: upload_process_data = self.process_data['resource_upload'] except KeyError: return Response(data={ 'error': 'PresQT Error: "resource_upload" not found in process_info file.' }, status=status.HTTP_400_BAD_REQUEST) upload_status = upload_process_data['status'] total_files = upload_process_data['upload_total_files'] files_finished = upload_process_data['upload_files_finished'] job_percentage = calculate_job_percentage(total_files, files_finished) data = { 'status_code': upload_process_data['status_code'], 'status': upload_status, 'message': upload_process_data['message'], 'job_percentage': job_percentage } if upload_status == 'finished': http_status = status.HTTP_200_OK data['status'] = upload_status data['failed_fixity'] = upload_process_data['failed_fixity'] data['resources_ignored'] = upload_process_data[ 'resources_ignored'] data['resources_updated'] = upload_process_data[ 'resources_updated'] data['link_to_resource'] = upload_process_data['link_to_resource'] data['job_percentage'] = 99 else: if upload_status == 'in_progress': http_status = status.HTTP_202_ACCEPTED data['job_percentage'] = job_percentage else: http_status = status.HTTP_500_INTERNAL_SERVER_ERROR return Response(status=http_status, data=data)
def test_get_error_500_400_metadata_file(self): """ Return a 400 if the user attempts to transfer the PRESQT_FTS_METADATA.json file """ headers = {'HTTP_PRESQT_DESTINATION_TOKEN': GITHUB_TEST_USER_TOKEN, 'HTTP_PRESQT_SOURCE_TOKEN': OSF_TEST_USER_TOKEN, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_KEYWORD_ACTION': 'automatic', 'HTTP_PRESQT_EMAIL_OPT_IN': ''} self.ticket_number = '{}_{}'.format(hash_tokens( OSF_TEST_USER_TOKEN), hash_tokens(GITHUB_TEST_USER_TOKEN)) url = reverse('resource_collection', kwargs={'target_name': 'github'}) self.client.post(url, { "source_target_name": "osf", "source_resource_id": '5db70f51f3bb87000c853575', "keywords": []}, **headers, format='json') process_info_path = 'mediafiles/jobs/{}/process_info.json'.format(self.ticket_number) process_info = read_file(process_info_path, True) while process_info['resource_transfer_in']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass url = reverse('job_status', kwargs={'action': 'transfer'}) response = self.client.get(url, **headers) # Verify the status code and content self.assertEqual(response.status_code, 500) self.assertEqual(response.data['message'], "PresQT Error: PresQT FTS metadata cannot not be transferred by itself.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def setUp(self): self.client = APIClient() self.token = GITLAB_UPLOAD_TEST_USER_TOKEN self.ticket_number = hash_tokens(self.token) self.headers = {'HTTP_PRESQT_DESTINATION_TOKEN': self.token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': ''} self.resource_id = None self.duplicate_action = 'ignore' self.url = reverse('resource_collection', kwargs={'target_name': 'gitlab'}) self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip' self.resources_ignored = [] self.resources_updated = [] self.hash_algorithm = 'sha256' self.success_message = 'Upload successful.'
def test_error_upload_to_file(self): """ Test that we will get an error when attempting to upload to a file. """ sleep(30) shared_upload_function_gitlab(self) # Verify the new repo exists on the PresQT Resource Collection endpoint. url = reverse('resource_collection', kwargs={'target_name': 'gitlab'}) project_id = self.client.get( url, **{'HTTP_PRESQT_SOURCE_TOKEN': GITLAB_UPLOAD_TEST_USER_TOKEN}).json()['resources'][0]['id'] shutil.rmtree(self.ticket_path) # Upload to existing repo self.resource_id = '{}:funnyfunnyimages%2FScreen Shot 2019-07-15 at 3%2E26%2E49 PM%2Epng'.format( project_id) self.url = reverse('resource', kwargs={ 'target_name': 'gitlab', 'resource_id': self.resource_id}) self.headers['HTTP_PRESQT_FILE_DUPLICATE_ACTION'] = self.duplicate_action response = self.client.post(self.url, {'presqt-file': open( self.file, 'rb')}, **self.headers) ticket_number = hash_tokens(self.token) self.ticket_path = 'mediafiles/uploads/{}'.format(ticket_number) # Verify status code and message self.assertEqual(response.status_code, 202) self.assertEqual( response.data['message'], 'The server is processing the request.') # Verify process_info file status is 'in_progress' initially process_info = read_file('{}/process_info.json'.format(self.ticket_path), True) self.assertEqual(process_info['status'], 'in_progress') # Wait until the spawned off process finishes in the background to do further validation process_wait(process_info, self.ticket_path) # Verify process_info.json file data process_info = read_file('{}/process_info.json'.format(self.ticket_path), True) self.assertEqual(process_info['status'], 'failed') self.assertEqual( process_info['message'], 'Resource with id, {}, belongs to a file.'.format(self.resource_id)) self.assertEqual(process_info['status_code'], 400) shutil.rmtree(self.ticket_path) # Delete upload folder and project delete_gitlab_project(project_id, GITLAB_UPLOAD_TEST_USER_TOKEN)
def test_500_server_error(self): """ If GitHub is having server issues, we want to make the user aware. """ class MockResponse: def __init__(self, json_data, status_code): self.json_data = json_data self.status_code = status_code mock_req = MockResponse({'error': 'The server is down.'}, 500) with patch('requests.post') as fake_post: fake_post.return_value = mock_req self.headers[ 'HTTP_PRESQT_FILE_DUPLICATE_ACTION'] = self.duplicate_action response = self.client.post(self.url, {'presqt-file': open(self.file, 'rb')}, **self.headers) ticket_number = hash_tokens(self.token) ticket_path = 'mediafiles/jobs/{}'.format(ticket_number) # Wait until the spawned off process finishes in the background # to do validation on the resulting files process_info = read_file( '{}/process_info.json'.format(ticket_path), True) while process_info['resource_upload']['status'] == 'in_progress': try: process_info = read_file( '{}/process_info.json'.format(ticket_path), True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass upload_job_response = self.client.get(response.data['upload_job'], **self.headers) self.assertEqual(upload_job_response.data['status_code'], 400) self.assertEqual( upload_job_response.data['message'], 'Response has status code 500 while creating repository {}'. format(self.repo_title)) # Delete the upload folder shutil.rmtree(ticket_path)
def test_success_download_public_file(self): """ Return a 200 along with a zip file of the private article requested. """ resource_id = '82529:12541559:23316914' shared_call_get_resource_zip(self, resource_id) url = reverse('job_status', kwargs={ 'action': 'download', 'response_format': 'zip' }) response = self.client.get(url, **self.header) # Verify the status code self.assertEqual(response.status_code, 200) zip_file = zipfile.ZipFile(io.BytesIO(response.content)) # Verify the name of the zip file self.assertEquals( response._headers['content-disposition'][1], 'attachment; filename={}_download_{}.zip'.format( self.target_name, resource_id)) # Verify content type self.assertEqual(response._headers['content-type'][1], 'application/zip') # Verify the number of resources in the zip is correct self.assertEqual(len(zip_file.namelist()), 13) # Verify the fixity file is empty as there was nothing to check. with zip_file.open('figshare_download_{}/fixity_info.json'.format( resource_id)) as fixityfile: zip_json = json.load(fixityfile) self.assertEqual(len(zip_json), 1) file_path = "{}_download_{}/data/PPFP Choices_Charurat_IRB7462_Facility Assessment Tool_V6_28 Feb.doc".format( self.target_name, resource_id) # Verify that the folder exists self.assertIn(file_path, zip_file.namelist()) count_of_file_references = zip_file.namelist().count(file_path) self.assertEqual(count_of_file_references, 1) # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(hash_tokens(self.token)))
def test_success_download_public_project(self): """ Return a 200 along with a zip file of the public project requested. """ resource_id = '82718' shared_call_get_resource_zip(self, resource_id) url = reverse('job_status', kwargs={ 'action': 'download', 'response_format': 'zip' }) response = self.client.get(url, **self.header) # Verify the status code self.assertEqual(response.status_code, 200) zip_file = zipfile.ZipFile(io.BytesIO(response.content)) # Verify the name of the zip file self.assertEquals( response._headers['content-disposition'][1], 'attachment; filename={}_download_{}.zip'.format( self.target_name, resource_id)) # Verify content type self.assertEqual(response._headers['content-type'][1], 'application/zip') # Verify the number of resources in the zip is correct self.assertEqual(len(zip_file.namelist()), 72) # Verify the fixity file is empty as there was nothing to check. with zip_file.open('figshare_download_{}/fixity_info.json'.format( resource_id)) as fixityfile: zip_json = json.load(fixityfile) self.assertEqual(len(zip_json), 57) file_path = "{}_download_{}/data/ASFV alignment/Multiple sequence alignment for identification of divergent selection of MGF505/MGF360/MGF505-2R-4R.aln.fasta".format( self.target_name, resource_id) # Verify that the folder exists self.assertIn(file_path, zip_file.namelist()) count_of_file_references = zip_file.namelist().count(file_path) self.assertEqual(count_of_file_references, 1) # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(hash_tokens(self.token)))
def test_get_error_500_401_token_invalid_osf(self): """ Return a 500 if the BaseResource._upload_resource method running on the server gets a 401 error because the token is invalid. """ self.url = reverse('resource_collection', kwargs={'target_name': 'osf'}) self.headers['HTTP_PRESQT_DESTINATION_TOKEN'] = 'bad_token' self.ticket_number = hash_tokens('bad_token') self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip' self.call_upload_resources() url = reverse('job_status', kwargs={'action': 'upload'}) response = self.client.get(url, **self.headers) self.assertEqual(response.status_code, 500) self.assertEqual( response.data['message'], "Token is invalid. Response returned a 401 status code.") # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(self.ticket_number))
def test_success_202_empty_folder(self): """ If an empty directory is included in the uploaded project, we want to ensure the user is made aware. """ bag_with_empty_directory = 'presqt/api_v1/tests/resources/upload/Empty_Directory_Bag.zip' self.headers[ 'HTTP_PRESQT_FILE_DUPLICATE_ACTION'] = self.duplicate_action response = self.client.post( self.url, {'presqt-file': open(bag_with_empty_directory, 'rb')}, **self.headers) ticket_number = hash_tokens(self.token) ticket_path = 'mediafiles/jobs/{}'.format(ticket_number) # Wait until the spawned off process finishes in the background # to do validation on the resulting files process_info = read_file('{}/process_info.json'.format(ticket_path), True) while process_info['resource_upload']['status'] == 'in_progress': try: process_info = read_file( '{}/process_info.json'.format(ticket_path), True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass upload_job_response = self.client.get(response.data['upload_job'], **self.headers) # Verify status code and message self.assertEqual(upload_job_response.data['resources_ignored'], ['/Egg/Empty_Folder']) delete_github_repo( 'presqt-test-user', 'Egg', {'Authorization': 'token {}'.format(GITHUB_TEST_USER_TOKEN)}) # Delete upload folder shutil.rmtree(ticket_path)
def test_401_unauthorized_user(self): """ If a user does not have a valid GitHub API token, we should return a 401 unauthorized status. """ headers = { 'HTTP_PRESQT_DESTINATION_TOKEN': 'eggyboi', 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } response = self.client.post(self.url, {'presqt-file': open(self.file, 'rb')}, **headers) ticket_number = hash_tokens('eggyboi') ticket_path = 'mediafiles/jobs/{}'.format(ticket_number) # Wait until the spawned off process finishes in the background # to do validation on the resulting files process_info = read_file('{}/process_info.json'.format(ticket_path), True) while process_info['resource_upload']['status'] == 'in_progress': try: process_info = read_file( '{}/process_info.json'.format(ticket_path), True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass upload_job_response = self.client.get(response.data['upload_job'], **headers) # Ensure the response is what we expect self.assertEqual(upload_job_response.data['status_code'], 401) self.assertEqual( upload_job_response.data['message'], 'Token is invalid. Response returned a 401 status code.') # Delete upload folder shutil.rmtree(ticket_path)
def setUp(self): self.client = APIClient() self.token = GITHUB_TEST_USER_TOKEN self.ticket_number = hash_tokens(self.token) self.headers = { 'HTTP_PRESQT_DESTINATION_TOKEN': self.token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } self.repo_title = 'NewProject' self.resource_id = None self.duplicate_action = 'ignore' self.url = reverse('resource_collection', kwargs={'target_name': 'github'}) self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip' self.resources_ignored = [] self.failed_fixity = [ '/NewProject/funnyfunnyimages/Screen Shot 2019-07-15 at 3.26.49 PM.png' ] self.resources_updated = [] self.hash_algorithm = 'md5' self.process_message = "Upload successful. Fixity can't be determined because GitHub may not have provided a file checksum. See PRESQT_FTS_METADATA.json for more details."
def setUp(self): self.client = APIClient() self.token = ZENODO_TEST_USER_TOKEN self.ticket_number = hash_tokens(self.token) self.headers = { 'HTTP_PRESQT_DESTINATION_TOKEN': self.token, 'HTTP_PRESQT_FILE_DUPLICATE_ACTION': 'ignore', 'HTTP_PRESQT_EMAIL_OPT_IN': '' } self.project_title = 'NewProject' self.resource_id = None self.duplicate_action = 'ignore' self.url = reverse('resource_collection', kwargs={'target_name': 'zenodo'}) self.file = 'presqt/api_v1/tests/resources/upload/ProjectBagItToUpload.zip' self.resources_ignored = [] self.resources_updated = [] self.hash_algorithm = 'md5' self.auth_params = {'access_token': self.token} self.metadata_dict = { "allKeywords": [], "actions": [{ "id": "uuid", "actionDateTime": "2019-11-11 15:18:39.596797+00:00", "actionType": "resource_upload", "sourceTargetName": "Local Machine", "destinationTargetName": "zenodo", "sourceUsername": "******", "destinationUsername": "******", "keywords": {}, "files": { "created": [], "updated": [], "ignored": [] } }] }
def test_error_500_404_file(self): """ Return a 500 if an invalid resource_id (file) is provided. """ url = reverse('resource', kwargs={ 'target_name': self.target_name, 'resource_id': '17993268:TheEggBasketMetaphor%2Emp4', 'resource_format': 'zip' }) response = self.client.get(url, **self.header) ticket_number = hash_tokens(self.token) download_url = response.data['download_job_zip'] process_info_path = 'mediafiles/jobs/{}/process_info.json'.format( ticket_number) process_info = read_file(process_info_path, True) while process_info['resource_download']['status'] == 'in_progress': try: process_info = read_file(process_info_path, True) except json.decoder.JSONDecodeError: # Pass while the process_info file is being written to pass download_response = self.client.get(download_url, **self.header) # The endpoint lumps all errors into a 500 status code self.assertEqual(download_response.status_code, 500) self.assertEqual(download_response.data['status_code'], 404) self.assertEqual( download_response.data['message'], "The resource with id, 17993268:TheEggBasketMetaphor%2Emp4, does not exist for this user." ) # Delete corresponding folder shutil.rmtree('mediafiles/jobs/{}'.format(ticket_number))