def test_get_autoimporter_names_returns_configured_autoimporters( self, create): importer_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} importer = Importer() importer.register_flickr_importer(importer_params) assert 'flickr' in importer.get_autoimporter_names()
def test_get_all_importers_returns_configured_importers(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names() assert 'twitter' in importer.get_all_importer_names()
def test_get_autoimporter_names_returns_configured_autoimporters(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_autoimporter_names() assert 'twitter' in importer.get_autoimporter_names() assert 'dropbox' not in importer.get_autoimporter_names()
def test_get_autoimporter_names_returns_configured_autoimporters(self, create): flickr_params = {"api_key": self.flask_app.config["FLICKR_API_KEY"]} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert "flickr" in importer.get_autoimporter_names() assert "twitter" in importer.get_autoimporter_names() assert "dropbox" not in importer.get_autoimporter_names()
def test_get_all_importers_returns_configured_importers(self, create): importer_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} importer = Importer() importer.register_flickr_importer(importer_params) importer.register_dropbox_importer() assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names()
class TestImporterValidation(Test): importer = Importer() @with_context def test_invalid_n_answer(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question1'}, 'n_answers': ''}, {'info': {'question': 'question2'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com') result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert '1 task import failed due to invalid n_answers' in result.message, result.message importer_factory.assert_called_with(**form_data) @with_context def test_invalid_priority_0(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question1'}, 'priority_0': ''}, {'info': {'question': 'question2'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com') result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert '1 task import failed due to invalid priority' in result.message, result.message importer_factory.assert_called_with(**form_data) @with_context def test_invalid_bucket(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'https://s3.amazonaws.com/invalid/hey'}}, {'info': {'question': 'question2'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com') with patch.dict(self.flask_app.config, { 'ALLOWED_S3_BUCKETS': ['valid'] }): result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert '1 task import failed due to invalid s3 bucket' in result.message, result.message importer_factory.assert_called_with(**form_data)
class TestImporterPublicMethods(Test): importer = Importer() @with_context def test_create_tasks_creates_them_correctly(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question', 'url': 'url' }, 'n_answers': 20 }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='csv', csv_url='http://fakecsv.com') self.importer.create_tasks(task_repo, project, **form_data) task = task_repo.get_task(1) assert task is not None assert task.project_id == project.id, task.project_id assert task.n_answers == 20, task.n_answers assert task.info == {'question': 'question', 'url': 'url'}, task.info importer_factory.assert_called_with(**form_data) mock_importer.tasks.assert_called_with() @with_context def test_create_tasks_creates_many_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question1' } }, { 'info': { 'question': 'question2' } }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com') result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 2, len(tasks) assert result.message == '2 new tasks were imported successfully ', result importer_factory.assert_called_with(**form_data) @with_context def test_create_tasks_not_creates_duplicated_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() TaskFactory.create(project=project, info={'question': 'question'}) form_data = dict(type='flickr', album_id='1234') result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert result.message == 'It looks like there were no new records to import. ', result.message importer_factory.assert_called_with(**form_data) @with_context def test_create_tasks_returns_task_report(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] metadata = {"metadata": 123} mock_importer.import_metadata.return_value = metadata importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='flickr', album_id='1234') result = self.importer.create_tasks(task_repo, project, **form_data) assert result.message == '1 new task was imported successfully ', result.message assert result.total == 1, result.total assert result.metadata == metadata, result.metadata @with_context def test_create_tasks_save_exception(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] metadata = {"metadata": 123} mock_importer.import_metadata.return_value = metadata importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='flickr', album_id='1234') with patch.object(task_repo, 'save', side_effect=Exception('a')): result = self.importer.create_tasks(task_repo, project, **form_data) assert '1 task import failed due to a' in result.message, result.message @with_context def test_count_tasks_to_import_returns_number_of_tasks_to_import( self, importer_factory): mock_importer = Mock() mock_importer.count_tasks.return_value = 2 importer_factory.return_value = mock_importer form_data = dict(type='epicollect', epicollect_project='project', epicollect_form='form') number_of_tasks = self.importer.count_tasks_to_import(**form_data) assert number_of_tasks == 2, number_of_tasks importer_factory.assert_called_with(**form_data) @with_context def test_get_all_importer_names_returns_default_importer_names( self, create): importers = self.importer.get_all_importer_names() expected_importers = [ 'csv', 'gdocs', 'epicollect', 's3', 'localCSV', 'iiif' ] assert set(importers) == set(expected_importers) @with_context def test_get_all_importers_returns_configured_importers(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} youtube_params = { 'youtube_api_server_key': self.flask_app.config['YOUTUBE_API_SERVER_KEY'] } importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) importer.register_youtube_importer(youtube_params) assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names() assert 'twitter' in importer.get_all_importer_names() assert 'youtube' in importer.get_all_importer_names() @with_context def test_get_autoimporter_names_returns_default_autoimporter_names( self, create): importers = self.importer.get_autoimporter_names() expected_importers = ['csv', 'gdocs', 'epicollect', 'localCSV', 'iiif'] assert set(importers) == set(expected_importers) @with_context def test_get_autoimporter_names_returns_configured_autoimporters( self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_autoimporter_names() assert 'twitter' in importer.get_autoimporter_names() assert 'dropbox' not in importer.get_autoimporter_names()
class TestImporterPublicMethods(Test): importer = Importer() @with_context def test_create_tasks_creates_them_correctly(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question', 'url': 'url' }, 'n_answers': 20 }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='csv', csv_url='http://fakecsv.com') self.importer.create_tasks(task_repo, project, **form_data) task = task_repo.get_task(1) assert task is not None assert task.project_id == project.id, task.project_id assert task.n_answers == 20, task.n_answers assert task.info == {'question': 'question', 'url': 'url'}, task.info importer_factory.assert_called_with(**form_data) mock_importer.tasks.assert_called_with() @with_context def test_create_tasks_creates_many_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question1' } }, { 'info': { 'question': 'question2' } }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com') result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 2, len(tasks) assert result.message == '2 new tasks were imported successfully ', result importer_factory.assert_called_with(**form_data) @with_context def test_create_tasks_not_creates_duplicated_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() TaskFactory.create(project=project, info={'question': 'question'}) form_data = dict(type='flickr', album_id='1234') result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert result.message == 'It looks like there were no new records to import. ', result.message importer_factory.assert_called_with(**form_data) @with_context def test_create_tasks_returns_task_report(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] metadata = {"metadata": 123} mock_importer.import_metadata.return_value = metadata importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='flickr', album_id='1234') result = self.importer.create_tasks(task_repo, project, **form_data) assert result.message == '1 new task was imported successfully ', result.message assert result.total == 1, result.total assert result.metadata == metadata, result.metadata @with_context def test_create_tasks_save_exception(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] metadata = {"metadata": 123} mock_importer.import_metadata.return_value = metadata importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='flickr', album_id='1234') with patch.object(task_repo, 'save', side_effect=Exception('a')): result = self.importer.create_tasks(task_repo, project, **form_data) assert '1 task import failed due to a' in result.message, result.message @with_context def test_count_tasks_to_import_returns_number_of_tasks_to_import( self, importer_factory): mock_importer = Mock() mock_importer.count_tasks.return_value = 2 importer_factory.return_value = mock_importer form_data = dict(type='epicollect', epicollect_project='project', epicollect_form='form') number_of_tasks = self.importer.count_tasks_to_import(**form_data) assert number_of_tasks == 2, number_of_tasks importer_factory.assert_called_with(**form_data) @with_context def test_get_all_importer_names_returns_default_importer_names( self, create): importers = self.importer.get_all_importer_names() expected_importers = [ 'csv', 'gdocs', 'epicollect', 's3', 'localCSV', 'iiif' ] assert set(importers) == set(expected_importers) @with_context def test_get_all_importers_returns_configured_importers(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} youtube_params = { 'youtube_api_server_key': self.flask_app.config['YOUTUBE_API_SERVER_KEY'] } importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) importer.register_youtube_importer(youtube_params) assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names() assert 'twitter' in importer.get_all_importer_names() assert 'youtube' in importer.get_all_importer_names() @with_context def test_get_autoimporter_names_returns_default_autoimporter_names( self, create): importers = self.importer.get_autoimporter_names() expected_importers = ['csv', 'gdocs', 'epicollect', 'localCSV', 'iiif'] assert set(importers) == set(expected_importers) @with_context def test_get_autoimporter_names_returns_configured_autoimporters( self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_autoimporter_names() assert 'twitter' in importer.get_autoimporter_names() assert 'dropbox' not in importer.get_autoimporter_names() @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) @patch('pybossa.importers.csv.data_access_levels') def test_create_tasks_creates_private_regular_and_gold_fields( self, mock_data_access, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_data_access = True mock_importer.tasks.return_value = [{ 'info': { u'Foo': u'a' }, 'private_fields': { u'Bar2': u'd', u'Bar': u'c' }, 'private_gold_answers': { u'ans2': u'e', u'ans': u'b' }, 'calibration': 1, 'exported': True }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='localCSV', csv_filename='fakefile.csv') with patch.dict(self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev' }): result = self.importer.create_tasks(task_repo, project, **form_data) importer_factory.assert_called_with(**form_data) upload_from_string.assert_called() assert result.message == '1 new task was imported successfully ', result # validate task created has private fields url, gold_answers url # calibration and exported flag set tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) task = tasks[0] assert task.info[ 'private_json__upload_url'] == u'http://localhost/fileproxy/encrypted/dev/mybucket/1/f53d27fe2a2e52930a9846a1c66312a2/task_private_data.json' assert task.gold_answers == u'http://localhost/fileproxy/encrypted/dev/mybucket/1/f53d27fe2a2e52930a9846a1c66312a2/task_private_gold_answer.json' assert task.calibration and task.exported
class TestImporterPublicMethods(Test): importer = Importer() @with_context def test_create_tasks_creates_them_correctly(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question', 'url': 'url' }, 'n_answers': 20 }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='csv', csv_url='http://fakecsv.com', validate_tp=False) self.importer.create_tasks(task_repo, project, **form_data) task = task_repo.get_task(1) assert task is not None assert task.project_id == project.id, task.project_id assert task.n_answers == 20, task.n_answers assert task.info == {'question': 'question', 'url': 'url'}, task.info importer_factory.assert_called_with(**form_data) mock_importer.tasks.assert_called_with() @with_context def test_create_tasks_creates_many_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question1' } }, { 'info': { 'question': 'question2' } }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com', validate_tp=False) result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 2, len(tasks) assert result.message == '2 new tasks were imported successfully ', result importer_factory.assert_called_with(**form_data) @with_context def test_create_tasks_not_creates_duplicated_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() TaskFactory.create(project=project, info={'question': 'question'}) form_data = dict(type='flickr', album_id='1234', validate_tp=False) result = self.importer.create_tasks(task_repo, project, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert result.message == 'It looks like there were no new records to import. ', result.message importer_factory.assert_called_with(**form_data) @with_context def test_create_tasks_returns_task_report(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] metadata = {"metadata": 123} mock_importer.import_metadata.return_value = metadata importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='flickr', album_id='1234', validate_tp=False) result = self.importer.create_tasks(task_repo, project, **form_data) assert result.message == '1 new task was imported successfully ', result.message assert result.total == 1, result.total assert result.metadata == metadata, result.metadata @with_context def test_create_tasks_save_exception(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] metadata = {"metadata": 123} mock_importer.import_metadata.return_value = metadata importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='flickr', album_id='1234', validate_tp=False) with patch.object(task_repo, 'save', side_effect=Exception('a')): result = self.importer.create_tasks(task_repo, project, **form_data) assert '1 task import failed due to a' in result.message, result.message @with_context def test_count_tasks_to_import_returns_number_of_tasks_to_import( self, importer_factory): mock_importer = Mock() mock_importer.count_tasks.return_value = 2 importer_factory.return_value = mock_importer form_data = dict(type='epicollect', epicollect_project='project', epicollect_form='form') number_of_tasks = self.importer.count_tasks_to_import(**form_data) assert number_of_tasks == 2, number_of_tasks importer_factory.assert_called_with(**form_data) @with_context def test_get_all_importer_names_returns_default_importer_names( self, create): importers = self.importer.get_all_importer_names() expected_importers = [ 'csv', 'gdocs', 'epicollect', 's3', 'localCSV', 'iiif' ] assert set(importers) == set(expected_importers) @with_context def test_get_all_importers_returns_configured_importers(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} youtube_params = { 'youtube_api_server_key': self.flask_app.config['YOUTUBE_API_SERVER_KEY'] } importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) importer.register_youtube_importer(youtube_params) assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names() assert 'twitter' in importer.get_all_importer_names() assert 'youtube' in importer.get_all_importer_names() @with_context def test_get_autoimporter_names_returns_default_autoimporter_names( self, create): importers = self.importer.get_autoimporter_names() expected_importers = ['csv', 'gdocs', 'epicollect', 'localCSV', 'iiif'] assert set(importers) == set(expected_importers) @with_context def test_get_autoimporter_names_returns_configured_autoimporters( self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_autoimporter_names() assert 'twitter' in importer.get_autoimporter_names() assert 'dropbox' not in importer.get_autoimporter_names() @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_create_tasks_creates_private_regular_and_gold_fields( self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { u'Foo': u'a' }, 'private_fields': { u'Bar2': u'd', u'Bar': u'c' }, 'gold_answers': { u'ans2': u'e', u'ans': u'b' }, 'calibration': 1, 'exported': True }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='localCSV', csv_filename='fakefile.csv', validate_tp=False) with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): result = self.importer.create_tasks(task_repo, project, **form_data) importer_factory.assert_called_with(**form_data) upload_from_string.assert_called() assert result.message == '1 new task was imported successfully ', result # validate task created has private fields url, gold_answers url # calibration and exported flag set tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) task = tasks[0] private_json_url = task.info['private_json__upload_url'] localhost, fileproxy, encrypted, env, bucket, project_id, hash_key, filename = private_json_url.split( '/', 2)[2].split('/') assert localhost == 'localhost', localhost assert fileproxy == 'fileproxy', fileproxy assert encrypted == 'encrypted', encrypted assert env == 'dev', env assert bucket == 'mybucket', bucket assert project_id == '1', project_id assert filename == 'task_private_data.json', filename gold_ans__upload_url = task.gold_answers['gold_ans__upload_url'] localhost, fileproxy, encrypted, env, bucket, project_id, hash_key, filename = gold_ans__upload_url.split( '/', 2)[2].split('/') assert localhost == 'localhost', localhost assert fileproxy == 'fileproxy', fileproxy assert encrypted == 'encrypted', encrypted assert env == 'dev', env assert bucket == 'mybucket', bucket assert project_id == '1', project_id assert filename == 'task_private_gold_answer.json', filename assert task.calibration and task.exported assert task.state == 'ongoing', task.state @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_create_tasks_creates_private_regular_and_gold_fields_with_enrichment( self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { u'Foo': u'a' }, 'private_fields': { u'Bar2': u'd', u'Bar': u'c' }, 'gold_answers': { u'ans2': u'e', u'ans': u'b' }, 'calibration': 1, 'exported': True, 'state': u'enrich' }] importer_factory.return_value = mock_importer project = ProjectFactory.create( info={'enrichments': [{ 'out_field_name': 'enriched' }]}) form_data = dict(type='localCSV', csv_filename='fakefile.csv', validate_tp=False) with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): result = self.importer.create_tasks(task_repo, project, **form_data) importer_factory.assert_called_with(**form_data) upload_from_string.assert_called() assert result.message == '1 new task was imported successfully ', result.message # validate task created has private fields url, gold_answers url # calibration and exported flag set tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) task = tasks[0] private_json_file_url = task.info['private_json__upload_url'] private_json_url = private_json_file_url['externalUrl'] localhost, fileproxy, encrypted, env, bucket, project_id, hash_key, filename = private_json_url.split( '/', 2)[2].split('/') assert localhost == 'localhost', localhost assert fileproxy == 'fileproxy', fileproxy assert encrypted == 'encrypted', encrypted assert env == 'dev', env assert bucket == 'mybucket', bucket assert project_id == '1', project_id assert filename == 'task_private_data.json', filename gold_ans__upload_url = task.gold_answers['gold_ans__upload_url'] localhost, fileproxy, encrypted, env, bucket, project_id, hash_key, filename = gold_ans__upload_url.split( '/', 2)[2].split('/') assert localhost == 'localhost', localhost assert fileproxy == 'fileproxy', fileproxy assert encrypted == 'encrypted', encrypted assert env == 'dev', env assert bucket == 'mybucket', bucket assert project_id == '1', project_id assert filename == 'task_private_gold_answer.json', filename assert task.calibration and task.exported assert task.state == 'enrich', task.state @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_enrich_task_requires_enrichment_config(self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { u'Foo': u'a' }, 'private_fields': { u'Bar2': u'd', u'Bar': u'c' }, 'gold_answers': { u'ans2': u'e', u'ans': u'b' }, 'calibration': 1, 'exported': True, 'state': u'enrich' }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='localCSV', csv_filename='fakefile.csv', validate_tp=False) with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): import_report = self.importer.create_tasks(task_repo, project, **form_data) print import_report.message assert 'task import failed' in import_report.message @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_enrich_task_with_enrichment_output_fails(self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { u'Foo': u'a', u'enriched': 1 }, 'private_fields': { u'Bar2': u'd', u'Bar': u'c' }, 'gold_answers': { u'ans2': u'e', u'ans': u'b' }, 'calibration': 1, 'exported': True, 'state': u'enrich' }] importer_factory.return_value = mock_importer project = ProjectFactory.create( info={'enrichments': [{ 'out_field_name': 'enriched' }]}) form_data = dict(type='localCSV', csv_filename='fakefile.csv', validate_tp=False) with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): import_report = self.importer.create_tasks(task_repo, project, **form_data) print import_report.message assert 'task import failed' in import_report.message @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_invalid_state_fails(self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { u'Foo': u'a' }, 'private_fields': { u'Bar2': u'd', u'Bar': u'c' }, 'gold_answers': { u'ans2': u'e', u'ans': u'b' }, 'calibration': 1, 'exported': True, 'state': u'enriched' }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='localCSV', csv_filename='fakefile.csv', validate_tp=False) with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): import_report = self.importer.create_tasks(task_repo, project, **form_data) print import_report.message assert 'task import failed' in import_report.message @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_task_presenter_validation_fails_with_missing_import_field( self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.fields.return_value = {'Foo', 'Bar2', 'Bar'} project = ProjectFactory.create( info={'task_presenter': 'task.info.bar'}) form_data = dict(type='localCSV', csv_filename='fakefile.csv') with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): import_report = self.importer._validate_headers( mock_importer, project, **form_data) print import_report.message assert import_report.message @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_task_presenter_validation_ignores_enrichment_output( self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.fields.return_value = {'Foo', 'Bar2', 'Bar'} project = ProjectFactory.create( info={ 'enrichments': [{ 'out_field_name': 'enriched' }], 'task_presenter': 'task.info.enriched task.info.Bar' }) form_data = dict(type='localCSV', csv_filename='fakefile.csv') with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): import_report = self.importer._validate_headers( mock_importer, project, **form_data) assert import_report is None @with_context @patch('pybossa.cloud_store_api.s3.s3_upload_from_string', return_value='https:/s3/task.json') @patch('pybossa.importers.importer.delete_import_csv_file', return_value=None) def test_task_presenter_validation_can_be_turned_off( self, mock_del, upload_from_string, importer_factory): mock_importer = Mock() mock_importer.fields.return_value = {'Foo', 'Bar2', 'Bar'} project = ProjectFactory.create( info={'task_presenter': 'task.info.enriched task.info.bar'}) form_data = dict(type='localCSV', csv_filename='fakefile.csv', validate_tp=False) with patch.dict( self.flask_app.config, { 'S3_REQUEST_BUCKET': 'mybucket', 'S3_CONN_TYPE': 'dev', 'ENABLE_ENCRYPTION': True }): import_report = self.importer._validate_headers( mock_importer, project, **form_data) assert import_report is None
def test_get_all_importers_returns_configured_importers(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} youtube_params = {'youtube_api_server_key': self.flask_app.config['YOUTUBE_API_SERVER_KEY']} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) importer.register_youtube_importer(youtube_params) assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names() assert 'twitter' in importer.get_all_importer_names() assert 'youtube' in importer.get_all_importer_names()
class TestImporterPublicMethods(Test): importer = Importer() def test_create_tasks_creates_them_correctly(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question', 'url': 'url' }, 'n_answers': 20 }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='csv', csv_url='http://fakecsv.com') self.importer.create_tasks(task_repo, project.id, **form_data) task = task_repo.get_task(1) assert task is not None assert task.project_id == project.id, task.project_id assert task.n_answers == 20, task.n_answers assert task.info == {'question': 'question', 'url': 'url'}, task.info importer_factory.assert_called_with('csv') mock_importer.tasks.assert_called_with(**form_data) def test_create_tasks_creates_many_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{ 'info': { 'question': 'question1' } }, { 'info': { 'question': 'question2' } }] importer_factory.return_value = mock_importer project = ProjectFactory.create() form_data = dict(type='gdocs', googledocs_url='http://ggl.com') result = self.importer.create_tasks(task_repo, project.id, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 2, len(tasks) assert result == '2 new tasks were imported successfully', result importer_factory.assert_called_with('gdocs') def test_create_tasks_not_creates_duplicated_tasks(self, importer_factory): mock_importer = Mock() mock_importer.tasks.return_value = [{'info': {'question': 'question'}}] importer_factory.return_value = mock_importer project = ProjectFactory.create() TaskFactory.create(project=project, info={'question': 'question'}) form_data = dict(type='flickr', album_id='1234') result = self.importer.create_tasks(task_repo, project.id, **form_data) tasks = task_repo.filter_tasks_by(project_id=project.id) assert len(tasks) == 1, len(tasks) assert result == 'It looks like there were no new records to import', result importer_factory.assert_called_with('flickr') def test_count_tasks_to_import_returns_what_expected( self, importer_factory): mock_importer = Mock() mock_importer.count_tasks.return_value = 2 importer_factory.return_value = mock_importer form_data = dict(type='epicollect', epicollect_project='project', epicollect_form='form') number_of_tasks = self.importer.count_tasks_to_import(**form_data) assert number_of_tasks == 2, number_of_tasks importer_factory.assert_called_with('epicollect') def test_get_all_importer_names_returns_default_importer_names( self, create): importers = self.importer.get_all_importer_names() expected_importers = ['csv', 'gdocs', 'epicollect'] assert set(importers) == set(expected_importers) def test_get_all_importers_returns_configured_importers(self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_all_importer_names() assert 'dropbox' in importer.get_all_importer_names() assert 'twitter' in importer.get_all_importer_names() def test_get_autoimporter_names_returns_default_autoimporter_names( self, create): importers = self.importer.get_autoimporter_names() expected_importers = ['csv', 'gdocs', 'epicollect'] assert set(importers) == set(expected_importers) def test_get_autoimporter_names_returns_configured_autoimporters( self, create): flickr_params = {'api_key': self.flask_app.config['FLICKR_API_KEY']} twitter_params = {} importer = Importer() importer.register_flickr_importer(flickr_params) importer.register_dropbox_importer() importer.register_twitter_importer(twitter_params) assert 'flickr' in importer.get_autoimporter_names() assert 'twitter' in importer.get_all_importer_names()