def test_save_to_cache(self, track_return_values): """Test that the form data can be saved to the cache.""" tracker = track_return_values(file_form, 'token_urlsafe') file = make_csv_file_from_dicts( *make_matched_rows(1), filename='cache-test.csv', ) form = InteractionCSVForm( files={ 'csv_file': SimpleUploadedFile(file.name, file.getvalue()), }, ) assert form.is_valid() form.save_to_cache() assert len(tracker.return_values) == 1 token = tracker.return_values[0] contents_key = _cache_key_for_token(token, CacheKeyType.file_contents) name_key = _cache_key_for_token(token, CacheKeyType.file_name) file.seek(0) assert gzip.decompress(cache.get(contents_key)) == file.read() assert cache.get(name_key) == file.name
def test_save_returns_unmatched_rows(self, num_unmatched, num_multiple_matches): """Test that save() returns an UnmatchedRowCollector with the expected rows.""" num_matching = 2 matched_rows = make_matched_rows(num_matching) unmatched_rows = make_unmatched_rows(num_unmatched) multiple_matches_rows = make_multiple_matches_rows(num_multiple_matches) user = AdviserFactory(first_name='Admin', last_name='User') file = make_csv_file_from_dicts( *matched_rows, *unmatched_rows, *multiple_matches_rows, ) file_contents = file.getvalue() form = InteractionCSVForm( files={ 'csv_file': SimpleUploadedFile(file.name, file_contents), }, ) assert form.is_valid() _, unmatched_row_collector = form.save(user) assert unmatched_row_collector.rows == [ *unmatched_rows, *multiple_matches_rows, ]
def test_save_creates_versions(self): """Test that save() creates versions using django-reversion.""" num_matching = 5 matched_rows = make_matched_rows(num_matching) user = AdviserFactory(first_name='Admin', last_name='User') file = make_csv_file_from_dicts(*matched_rows) file_contents = file.getvalue() form = InteractionCSVForm( files={ 'csv_file': SimpleUploadedFile(file.name, file_contents), }, ) assert form.is_valid() form.save(user) created_interactions = list(Interaction.objects.all()) assert len(created_interactions) == num_matching # Single revision created assert Revision.objects.count() == 1 assert Revision.objects.first().get_comment() == REVISION_COMMENT # Versions were created (list used rather than a generator for useful failure messages) assert all([ Version.objects.get_for_object(interaction).count() == 1 for interaction in created_interactions ])
def test_get_matching_summary_with_invalid_rows(self): """ Test that get_matching_summary() raises an exception if one of the CSV rows fails validation. """ file = make_csv_file_from_dicts( { 'theme': 'invalid', 'kind': 'invalid', 'date': 'invalid', 'adviser_1': 'invalid', 'contact_email': 'invalid', 'service': 'invalid', 'communication_channel': 'invalid', }, ) form = InteractionCSVForm( files={ 'csv_file': SimpleUploadedFile(file.name, file.getvalue()), }, ) assert form.is_valid() with pytest.raises(DataHubError): form.get_matching_summary(50)
def test_save_returns_correct_counts(self, num_unmatched, num_multiple_matches): """Test that save() returns the expected counts for each matching status.""" num_matching = 1 matched_rows = make_matched_rows(num_matching) unmatched_rows = make_unmatched_rows(num_unmatched) multiple_matches_rows = make_multiple_matches_rows(num_multiple_matches) user = AdviserFactory(first_name='Admin', last_name='User') file = make_csv_file_from_dicts( *matched_rows, *unmatched_rows, *multiple_matches_rows, ) file_contents = file.getvalue() form = InteractionCSVForm( files={ 'csv_file': SimpleUploadedFile(file.name, file_contents), }, ) assert form.is_valid() matching_counts, _ = form.save(user) assert matching_counts == { ContactMatchingStatus.matched: num_matching, ContactMatchingStatus.unmatched: num_unmatched, ContactMatchingStatus.multiple_matches: num_multiple_matches, }
def test_shows_preview_if_matches( self, num_input_rows, expected_num_omitted_rows, monkeypatch, # _, ): """ Test that if a valid file is uploaded and some records are matched to contacts, the import_preview.html template is used with an appropriate context. """ max_preview_rows = 5 monkeypatch.setattr( 'datahub.interaction.admin_csv_import.views.MAX_PREVIEW_ROWS_TO_DISPLAY', max_preview_rows, ) csv_rows = make_matched_rows(num_input_rows) file = make_csv_file_from_dicts(*csv_rows) response = self.client.post( import_interactions_url, data={ 'csv_file': file, }, ) assert response.status_code == status.HTTP_200_OK assert response.template_name.endswith('/import_preview.html') assert response.context['num_matched'] == num_input_rows assert response.context['num_unmatched'] == 0 assert response.context['num_multiple_matches'] == 0 assert len(response.context['matched_rows']) == min(max_preview_rows, num_input_rows) assert response.context['num_matched_omitted'] == expected_num_omitted_rows
def test_save_creates_interactions(self, num_unmatched, num_multiple_matches): """Test that save() creates interactions.""" num_matching = 3 matched_rows = make_matched_rows(num_matching) unmatched_rows = make_unmatched_rows(num_unmatched) multiple_matches_rows = make_multiple_matches_rows( num_multiple_matches) user = AdviserFactory(first_name='Admin', last_name='User') file = make_csv_file_from_dicts( *matched_rows, *unmatched_rows, *multiple_matches_rows, ) file_contents = file.getvalue() form = InteractionCSVForm(files={ 'csv_file': SimpleUploadedFile(file.name, file_contents), }, ) assert form.is_valid() form.save(user) created_interactions = list(Interaction.objects.all()) assert len(created_interactions) == num_matching expected_contact_emails = { row['contact_email'] for row in matched_rows } actual_contact_emails = { interaction.contacts.first().email for interaction in created_interactions } # Make sure the test was correctly set up with unique contact emails assert len(actual_contact_emails) == num_matching # Check that the interactions created are the ones we expect # Note: the full saving logic for a row is tested in the InteractionCSVRowForm tests assert expected_contact_emails == actual_contact_emails expected_source = { 'file': { 'name': file.name, 'size': len(file_contents), 'sha256': hashlib.sha256(file_contents).hexdigest(), }, } # `source` has been set (list used rather than a generator for useful failure messages) assert all([ interaction.source == expected_source for interaction in created_interactions ])
def test_get_matching_summary( self, num_matching, num_unmatched, num_multiple_matches, max_returned_rows, ): """Test get_matching_summary() with various inputs.""" input_matched_rows = make_matched_rows(num_matching) unmatched_rows = make_unmatched_rows(num_unmatched) multiple_matches_rows = make_multiple_matches_rows( num_multiple_matches) file = make_csv_file_from_dicts( *input_matched_rows, *unmatched_rows, *multiple_matches_rows, ) form = InteractionCSVForm(files={ 'csv_file': SimpleUploadedFile(file.name, file.getvalue()), }, ) assert form.is_valid() matching_counts, returned_matched_rows = form.get_matching_summary( max_returned_rows) assert matching_counts == { ContactMatchingStatus.matched: num_matching, ContactMatchingStatus.unmatched: num_unmatched, ContactMatchingStatus.multiple_matches: num_multiple_matches, } expected_num_returned_rows = min(num_matching, max_returned_rows) assert len(returned_matched_rows) == expected_num_returned_rows # Check the the rows returned are the ones we expect expected_contact_emails = [ row['contact_email'] for row in input_matched_rows[:expected_num_returned_rows] ] actual_contact_emails = [ row['contacts'][0].email for row in returned_matched_rows ] assert expected_contact_emails == actual_contact_emails
def _create_file_in_cache(token, num_matching, num_unmatched, num_multiple_matches): matched_rows = make_matched_rows(num_matching) unmatched_rows = make_unmatched_rows(num_unmatched) multiple_matches_rows = make_multiple_matches_rows(num_multiple_matches) file = make_csv_file_from_dicts( *matched_rows, *unmatched_rows, *multiple_matches_rows, filename='cache-test.csv', ) with file: compressed_contents = gzip.compress(file.read()) contents_key = _cache_key_for_token(token, CacheKeyType.file_contents) name_key = _cache_key_for_token(token, CacheKeyType.file_name) cache.set(contents_key, compressed_contents) cache.set(name_key, file.name) return matched_rows
def test_save_rolls_back_on_error(self): """Test that save() rolls back if one row can't be saved.""" user = AdviserFactory(first_name='Admin', last_name='User') file = make_csv_file_from_dicts( *make_matched_rows(5), # an invalid row {}, ) file_contents = file.getvalue() form = InteractionCSVForm(files={ 'csv_file': SimpleUploadedFile(file.name, file_contents), }, ) assert form.is_valid() with pytest.raises(DataHubException): form.save(user) assert not Interaction.objects.count()
def test_from_token_with_valid_token(self): """Test that a form can be restored from the cache.""" token = 'test-token' contents_key = _cache_key_for_token(token, CacheKeyType.file_contents) name_key = _cache_key_for_token(token, CacheKeyType.file_name) file = make_csv_file_from_dicts( *make_matched_rows(1), filename='cache-test.csv', ) compressed_data = gzip.compress(file.read()) cache.set(contents_key, compressed_data) cache.set(name_key, file.name) form = InteractionCSVForm.from_token(token) assert form.is_valid() file.seek(0) assert file.read() == form.cleaned_data['csv_file'].read() assert file.name == form.cleaned_data['csv_file'].name
def test_get_row_errors_with_duplicate_rows(self): """Test that duplicate rows are tracked and errors returned when encountered.""" matched_rows = make_matched_rows(5) file = make_csv_file_from_dicts( # Duplicate the first row matched_rows[0], *matched_rows, *make_unmatched_rows(5), *make_multiple_matches_rows(5), ) form = InteractionCSVForm( files={ 'csv_file': SimpleUploadedFile(file.name, file.getvalue()), }, ) assert form.is_valid() row_errors = list(form.get_row_error_iterator()) assert row_errors == [ CSVRowError(1, NON_FIELD_ERRORS, '', DUPLICATE_OF_ANOTHER_ROW_MESSAGE), ]