def test_skips_type_validation_report_if_no_type_col_in_dm( mock_config, datamap_no_type_col_matches_test_template, template): mock_config.initialise() shutil.copy2(template, (Path(mock_config.PLATFORM_DOCS_DIR) / "input")) tmpl_repo = InMemoryPopulatedTemplatesRepository( mock_config.PLATFORM_DOCS_DIR / "input") dm_repo = InMemorySingleDatamapRepository( datamap_no_type_col_matches_test_template) output_repo = MasterOutputRepository uc = CreateMasterUseCase(dm_repo, tmpl_repo, output_repo) uc.execute("master.xlsx") pth = mock_config.FULL_PATH_OUTPUT f = list(pth.glob( "*.csv")) # we have to do this because filename includes timestamp assert len(f) == 0
def test_create_master_spreadsheet(mock_config, datamap_match_test_template, template): mock_config.initialise() shutil.copy2(template, (Path(mock_config.PLATFORM_DOCS_DIR) / "input")) tmpl_repo = InMemoryPopulatedTemplatesRepository( mock_config.PLATFORM_DOCS_DIR / "input") dm_repo = InMemorySingleDatamapRepository(datamap_match_test_template) output_repo = MasterOutputRepository uc = CreateMasterUseCase(dm_repo, tmpl_repo, output_repo) uc.execute("master.xlsx") wb = load_workbook( Path(mock_config.PLATFORM_DOCS_DIR) / "output" / "master.xlsx") ws = wb.active assert ws["A1"].value == "file name" assert ws["B1"].value == "test_template" assert ws["B2"].value == "2019-10-20T00:00:00" assert ws["B3"].value == "This is a string"
def test_create_master_spreadsheet(mock_config, org_test_files_dir): mock_config.initialise() for fl in os.listdir(org_test_files_dir): shutil.copy( Path.cwd() / "tests" / "resources" / "org_templates" / fl, (Path(mock_config.PLATFORM_DOCS_DIR) / "input"), ) tmpl_repo = InMemoryPopulatedTemplatesRepository( mock_config.PLATFORM_DOCS_DIR / "input") dm_file = mock_config.PLATFORM_DOCS_DIR / "input" / "dft_datamap.csv" dm_repo = InMemorySingleDatamapRepository(str(dm_file)) output_repo = MasterOutputRepository uc = CreateMasterUseCase(dm_repo, tmpl_repo, output_repo) uc.execute("master.xlsx") wb = load_workbook( Path(mock_config.PLATFORM_DOCS_DIR) / "output" / "master.xlsx") ws = wb.active assert ws["A1"].value == "file name" assert "dft1_tmp" in ws["B1"].value
def import_and_create_master(echo_funcs, datamap=None): """Import all spreadsheet files from input directory and process with datamap. echo_func - a function sent from the front-end interface allowing for suitable output (stdout, etc) echo_func_params - parameters to be used with echo_func Create master spreadsheet immediately. """ # patch ECHO_FUNC for datamap creation - hack! setattr(engine.use_cases.parsing, "ECHO_FUNC_GREEN", echo_funcs["click_echo_green"]) setattr(engine.use_cases.parsing, "ECHO_FUNC_RED", echo_funcs["click_echo_red"]) setattr(engine.use_cases.parsing, "ECHO_FUNC_YELLOW", echo_funcs["click_echo_yellow"]) setattr(engine.use_cases.parsing, "ECHO_FUNC_WHITE", echo_funcs["click_echo_white"]) tmpl_repo = InMemoryPopulatedTemplatesRepository(Config.PLATFORM_DOCS_DIR / "input") master_fn = Config.config_parser["DEFAULT"]["master file name"] if datamap: dm_fn = datamap else: dm_fn = Config.config_parser["DEFAULT"]["datamap file name"] dm = Path(tmpl_repo.directory_path) / dm_fn dm_repo = InMemorySingleDatamapRepository(dm) output_repo = MasterOutputRepository uc = CreateMasterUseCase(dm_repo, tmpl_repo, output_repo) try: uc.execute(master_fn) except FileNotFoundError as e: raise FileNotFoundError(e) except DatamapNotCSVException: raise logger.info("{} successfully created in {}\n".format( master_fn, Path(Config.PLATFORM_DOCS_DIR / "output")))
def import_and_create_master(echo_funcs, datamap=None, **kwargs): """Import all spreadsheet files from input directory and process with datamap. echo_func - a function sent from the front-end interface allowing for suitable output (stdout, etc) echo_func_params - parameters to be used with echo_func Create master spreadsheet immediately. """ # patch ECHO_FUNC for datamap creation - hack! setattr(engine.use_cases.parsing, "ECHO_FUNC_GREEN", echo_funcs["click_echo_green"]) setattr(engine.use_cases.parsing, "ECHO_FUNC_RED", echo_funcs["click_echo_red"]) setattr( engine.use_cases.parsing, "ECHO_FUNC_YELLOW", echo_funcs["click_echo_yellow"] ) setattr(engine.use_cases.parsing, "ECHO_FUNC_WHITE", echo_funcs["click_echo_white"]) master_fn = Config.config_parser["DEFAULT"]["master file name"] if kwargs.get("rowlimit"): Config.TEMPLATE_ROW_LIMIT = kwargs.get("rowlimit") if kwargs.get("inputdir"): inputdir = kwargs.get("inputdir") else: inputdir = Config.PLATFORM_DOCS_DIR / "input" if kwargs.get("validationonly"): output_repo = ValidationOnlyRepository master_fn = "" else: output_repo = MasterOutputRepository if kwargs.get("zipinput"): tmpl_repo = InMemoryPopulatedTemplatesZip(kwargs.get("zipinput")) else: tmpl_repo = InMemoryPopulatedTemplatesRepository(inputdir) if Config.TEMPLATE_ROW_LIMIT < 50: logger.warning( f"Row limit is set to {Config.TEMPLATE_ROW_LIMIT} (default is 500). This may be unintentionally low. Check datamaps import templates --help" ) else: logger.info(f"Row limit is set to {Config.TEMPLATE_ROW_LIMIT}.") if datamap: dm_fn = datamap else: dm_fn = Config.config_parser["DEFAULT"]["datamap file name"] dm = Path(tmpl_repo.directory_path) / dm_fn dm_repo = InMemorySingleDatamapRepository(dm) if dm_repo.is_typed: uc = CreateMasterUseCaseWithValidation(dm_repo, tmpl_repo, output_repo) else: if output_repo == ValidationOnlyRepository: logger.critical( "Cannot validate data. The datamap needs to have a 'type' column." ) sys.exit(1) uc = CreateMasterUseCase(dm_repo, tmpl_repo, output_repo) try: uc.execute(master_fn) except FileNotFoundError as e: raise FileNotFoundError(e) except DatamapNotCSVException: raise