def setUp(self) -> None: self.settings.IMPORT_REPO_PATH = self.data_dir self.study = Study(name="some-study") self.study.save() self.study_import_manager = StudyImportManager(study=self.study, redis=False) return super().setUp()
def test_basket_protection(self): """A clean update should leave baskets intact.""" clean_import = False basket = BasketFactory(name="study_basket") manager = StudyImportManager(self.study, redis=False) update.update_single_study(self.study, True, clean_import=clean_import, manager=manager) variable = Variable.objects.get(name="some-variable") outdated_variable = Variable.objects.get(name="some-third-variable") basket_variable = BasketVariable(basket=basket, variable=variable) outdated_basket_variable = BasketVariable(basket=basket, variable=outdated_variable) outdated_basket_variable.save() basket.save() basket_variable.save() outdated_id = outdated_variable.id variable_id = variable.id basket_id = basket.id import_files = Path(self.patch_argument_dict["return_value"]) new_variables = """study_name,dataset_name,name,concept_name,image_url some-study,some-dataset,some-variable,some-concept,https://variable-image.de some-study,some-dataset,some-other-variable,some-concept,https://variable-other-image.de """ with open(import_files.joinpath("variables.csv"), "w", encoding="utf8") as file: file.write(new_variables) clean_import = True manager = StudyImportManager(self.study, redis=False) update.update_single_study(self.study, True, clean_import=clean_import, manager=manager) with self.assertRaises(ObjectDoesNotExist): Variable.objects.get(name="some-third-variable") variable = Variable.objects.get(name="some-variable") self.assertEqual(1, BasketVariable.objects.all().count()) self.assertEqual( 1, BasketVariable.objects.filter(variable_id=variable_id).count()) self.assertEqual( 0, BasketVariable.objects.filter(variable_id=outdated_id).count()) self.assertEqual(1, Basket.objects.filter(id=basket_id).count())
def update_single_study( # pylint: disable=R0913 study: Study, local: bool, entity: tuple = None, filename: str = None, clean_import=False, manager: StudyImportManager = None, ) -> None: """Update a single study""" backup_file = Path() if clean_import: backup_file = Basket.backup() study.delete() study.save() if not manager: StudyImportManager(study) if not local: manager.update_repo() if not entity: manager.import_all_entities() elif filename: manager.import_single_entity(entity[0], filename) else: update_study_partial(manager, entity) if backup_file.is_file(): call_command("loaddata", backup_file) BasketVariable.clean_basket_variables(study.name)
def update_all_studies_completely(local: bool, clean_import=False, redis=True) -> None: """Update all studies in the database""" for study in Study.objects.all(): manager = StudyImportManager(study, redis=redis) update_single_study(study, local, clean_import=clean_import, manager=manager) del manager
def test_variable_import_without_concept_csv(self): csv_path = Study().import_path() concept_path = csv_path.joinpath("concepts.csv") os.remove(concept_path) some_dataset = DatasetFactory(name="some-dataset") some_dataset.save() TEST_CASE.assertIsNone( StudyImportManager(study=some_dataset.study).fix_concepts_csv()) TEST_CASE.assertFalse(concept_path.exists())
def test_update_single_study_local(study): local = True with open(IMPORT_PATH.joinpath("variables.csv"), encoding="utf8") as variables_file: expected_variables = { row["name"] for row in csv.DictReader(variables_file) } manager = StudyImportManager(study, redis=False) update.update_single_study(study, local, (), None, manager=manager) result = {variable.name for variable in Variable.objects.all()} TEST_CASE.assertNotEqual(0, len(result)) TEST_CASE.assertEqual(expected_variables, result)
def test_update_single_study(study, mocker): with open(IMPORT_PATH.joinpath("variables.csv"), encoding="utf8") as variables_file: expected_variables = { row["name"] for row in csv.DictReader(variables_file) } mocked_update_repo = mocker.patch( "ddionrails.imports.manager.StudyImportManager.update_repo") manager = StudyImportManager(study, redis=False) update.update_single_study(study, False, (), None, manager=manager) mocked_update_repo.assert_called_once() result = {variable.name for variable in Variable.objects.all()} TEST_CASE.assertNotEqual(0, len(result)) TEST_CASE.assertEqual(expected_variables, result)
def test_update_command_with_valid_study_name_and_entity( study, mocked_update_single_study): with TEST_CASE.assertRaises(SystemExit) as error: call_command("update", study.name, ("periods")) manager = StudyImportManager(study) TEST_CASE.assertEqual(0, error.exception.code) call_args = mocked_update_single_study.call_args.args call_kwargs = mocked_update_single_study.call_args.kwargs TEST_CASE.assertEqual((study, False, tuple(("periods", )), None, False), call_args) TEST_CASE.assertEqual( (manager.study, manager.redis), (call_kwargs["manager"].study, call_kwargs["manager"].redis), )
def test_update_single_study_entity(study): entities = ("periods", ) local = True with open(IMPORT_PATH.joinpath("periods.csv"), encoding="utf8") as periods_file: expected_periods = { row["name"] for row in csv.DictReader(periods_file) } manager = StudyImportManager(study, redis=False) update.update_single_study(study, local, entities, None, manager=manager) result = {period.name for period in Period.objects.all()} TEST_CASE.assertNotEqual(0, len(result)) TEST_CASE.assertEqual(expected_periods, result)
def handle(self, *args, **options): study_name = options["study_name"] entity = set(options["entity"]) local = options["local"] filename = options["filename"] clean_import = options["clean_import"] redis = not options["no_redis"] # if no study_name is given, update all studies if study_name == "all": self.log_success("Updating all studies") update_all_studies_completely(local, clean_import, redis=redis) sys.exit(0) # if study_name is given, select study from database or exit try: study = Study.objects.get(name=study_name) except Study.DoesNotExist: self.log_error(f'Study "{study_name}" does not exist.') sys.exit(1) # if one or more entities are given, validate all are available manager = StudyImportManager(study, redis=redis) for single_entity in entity: if single_entity not in manager.import_order: self.log_error(f'Entity "{single_entity}" does not exist.') sys.exit(1) # if filename is given, validate that entity is "datasets.json" or "instruments" if filename and not entity.intersection( {"datasets.json", "instruments.json"}): out = ", ".join( entity.intersection({"datasets.json", "instruments.json"})) self.log_error( f'Support for single file import not available for entity "{out}".' ) sys.exit(1) update_single_study(study, local, tuple(entity), filename, clean_import, manager=manager) # Populate the search index from the database (indexes everything) sys.exit(0)
def test_clean_update(self): """Does a clean update remove study data before the update? The clean import should remove all entities related to a study before the import of the study. There is no data provided to import for this test. After the clean import without data only the study object itself should remain in the database. The test dataset should be gone. """ clean_import = True self.assertTrue(list(Dataset.objects.filter(id=self.dataset.id))) manager = StudyImportManager(self.study, redis=False) update.update_single_study(self.study, True, clean_import=clean_import, manager=manager) datasets_ids = [dataset.id for dataset in Dataset.objects.all()] self.assertNotIn(self.dataset.id, datasets_ids)
def test_variable_import_with_orphaned_concept(self): csv_path = Study().import_path() concept_path = csv_path.joinpath("concepts.csv") some_dataset = DatasetFactory(name="some-dataset") some_dataset.save() StudyImportManager(study=some_dataset.study).fix_concepts_csv() ConceptFactory(name="some-concept").save() variable_path = csv_path.joinpath("variables.csv") variable_path = variable_path.absolute() ConceptImport(concept_path).run_import(filename=concept_path) VariableImport.run_import(variable_path, study=some_dataset.study) with open(variable_path, "r", encoding="utf8") as csv_file: variable_names = {row["name"] for row in csv.DictReader(csv_file)} result = Variable.objects.filter(name__in=list(variable_names)) TEST_CASE.assertNotEqual(0, len(result)) TEST_CASE.assertEqual(len(variable_names), len(result))
def test_update_study_partial(study, mocked_import_single_entity): manager = StudyImportManager(study) entity = ("periods", ) update.update_study_partial(manager, entity) mocked_import_single_entity.assert_called_once_with(entity[0])
def update_study_partial(manager: StudyImportManager, entity: tuple): """Update only selected entitites for study""" for single_entity in entity: manager.import_single_entity(single_entity)
class TestStudyImportManagerUnittest(unittest.TestCase): data_dir: Path settings: Any study: Study def setUp(self) -> None: self.settings.IMPORT_REPO_PATH = self.data_dir self.study = Study(name="some-study") self.study.save() self.study_import_manager = StudyImportManager(study=self.study, redis=False) return super().setUp() def test_import_csv_topics_exception(self): import_path: Path = self.study_import_manager.study.import_path() faulty_row = { "study": "some-nonexistent-study", "name": "some-topic", "label": "some-label", "label_de": "some-german-label", "description": "Some description", "description_de": "Eine Beschreibung", "parent": "some-other-topic", } with open(import_path.joinpath("topics.csv"), "a", encoding="utf8") as topic_file: writer = csv.DictWriter(topic_file, fieldnames=list(faulty_row.keys())) writer.writerow(faulty_row) with self.assertRaises(Study.DoesNotExist): self.study_import_manager.import_single_entity("topics.csv") def test_import_attachments_exception(self): TEST_CASE.assertEqual(0, Attachment.objects.count()) import_path = self.study_import_manager.study.import_path().joinpath( "attachments.csv") header = ( "type", "study", "dataset", "variable", "instrument", "question", "url", "url_text", ) row = dict(type="dataset", dataset="Nonexistent-dataset") with open(import_path, "w", encoding="utf8") as attachements_file: writer = csv.DictWriter(attachements_file, fieldnames=header) writer.writeheader() writer.writerow(row) with TEST_CASE.assertRaises(Dataset.DoesNotExist) as error: self.study_import_manager.import_single_entity("attachments") error_dict = json.loads(error.exception.args[0]) TEST_CASE.assertDictContainsSubset(row, error_dict) def test_import_attachments(self): TEST_CASE.assertEqual(0, Attachment.objects.count()) self.study_import_manager.import_single_entity("attachments") TEST_CASE.assertEqual(1, Attachment.objects.count()) attachment = Attachment.objects.first() TEST_CASE.assertEqual(self.study, attachment.context_study) TEST_CASE.assertEqual("https://some-study.de", attachment.url) TEST_CASE.assertEqual("some-study", attachment.url_text)
def _study_import_manager(study, settings): settings.IMPORT_REPO_PATH = Path("tests/functional/test_data/") manager = StudyImportManager(study) return manager