def test_cli_init(cli_runner: CliRunner, base_context: CGConfig, caplog): caplog.set_level(logging.INFO) # GIVEN you want to setup a new database using the CLI database = "./test_db.sqlite3" database_path = Path(database) database_uri = f"sqlite:///{database}" base_context.status_db_ = Store(uri=database_uri) with cli_runner.isolated_filesystem(): assert database_path.exists() is False # WHEN calling "init" result = cli_runner.invoke(init, [], obj=base_context) # THEN it should setup the database with some tables assert result.exit_code == 0 assert database_path.exists() assert len(Store(database_uri).engine.table_names()) > 0 # GIVEN the database already exists # WHEN calling the init function result = cli_runner.invoke(init, [], obj=base_context) # THEN it should print an error and give error exit code assert result.exit_code != 0 assert "Database already exists" in caplog.text # GIVEN the database already exists # WHEN calling "init" with "--reset" result = cli_runner.invoke(init, ["--reset"], input="Yes", obj=base_context) # THEN it should re-setup the tables and print new tables assert result.exit_code == 0 assert "Success!" in caplog.text
def test_upload_genotype( upload_context: CGConfig, case_id: str, cli_runner: CliRunner, analysis_store_trio: Store, upload_genotypes_hk_api: HousekeeperAPI, caplog, ): """Test to upload genotypes via the CLI""" caplog.set_level(logging.DEBUG) # GIVEN a context with a case that is ready for upload sequence genotypes upload_context.status_db_ = analysis_store_trio upload_context.housekeeper_api_ = upload_genotypes_hk_api case_obj = upload_context.status_db.family(case_id) assert case_obj # WHEN uploading the genotypes result = cli_runner.invoke(upload_genotypes_cmd, [case_id], obj=upload_context) # THEN check that the command exits with success assert result.exit_code == 0 # THEN assert the correct information is communicated assert "loading VCF genotypes for sample(s):" in caplog.text
def fixture_nipt_upload_api_context( cg_context: CGConfig, nipt_stats_api: StatsAPI, re_sequenced_sample_store: Store) -> CGConfig: cg_context.status_db_ = re_sequenced_sample_store cg_context.cg_stats_api_ = nipt_stats_api return cg_context
def fixture_populated_compress_context(compress_api: CompressAPI, populated_compress_store: Store, cg_config_object: CGConfig) -> CGConfig: """Return a compress context populated with a completed analysis""" # Make sure that there is a case where analysis is completed cg_config_object.meta_apis["compress_api"] = compress_api cg_config_object.status_db_ = populated_compress_store return cg_config_object
def fixture_populated_mip_context( base_context: CGConfig, analysis_store: Store, mip_dna_housekeeper: HousekeeperAPI, project_dir: Path, ) -> CGConfig: base_context.housekeeper_api_ = mip_dna_housekeeper base_context.status_db_ = analysis_store base_context.delivery_path = str(project_dir) return base_context
def fixture_upload_genotypes_context( upload_genotypes_hk_api: HousekeeperAPI, genotype_api: GenotypeAPI, analysis_store_trio: Store, base_context: CGConfig, ) -> CGConfig: """Create a upload genotypes context""" base_context.genotype_api_ = genotype_api base_context.housekeeper_api_ = upload_genotypes_hk_api base_context.status_db_ = analysis_store_trio return base_context
def fixture_base_context( analysis_store: Store, housekeeper_api: HousekeeperAPI, upload_scout_api: UploadScoutAPI, trailblazer_api: TrailblazerAPI, cg_context: CGConfig, ) -> CGConfig: """context to use in cli""" cg_context.status_db_ = analysis_store cg_context.housekeeper_api_ = housekeeper_api cg_context.trailblazer_api_ = trailblazer_api cg_context.scout_api_ = MockScoutApi() cg_context.meta_apis["report_api"] = MockReportApi() cg_context.meta_apis["scout_upload_api"] = upload_scout_api cg_context.mip_rd_dna.root = tempdir return cg_context
def test_delete_flow_cell_housekeeper_only_sample_level( caplog, cg_context: CGConfig, demultiplexed_flowcells_working_directory: Path, flowcell_full_name: str, populated_flow_cell_store: Store, sample_level_housekeeper_api: HousekeeperAPI, tmp_fastq_paths: List[Path], ): """Test function to remove fastqs from Housekeeper when there are only files on sample level (not on flow cell name) """ caplog.set_level(logging.INFO) cg_context.housekeeper_api_ = sample_level_housekeeper_api cg_context.status_db_ = populated_flow_cell_store # GIVEN a DeleteDemuxAPI with a HousekeeperAPI with no files with flow cell name as a tag sample_level_files: List[Path] = tmp_fastq_paths wipe_demultiplex_api: DeleteDemuxAPI = DeleteDemuxAPI( config=cg_context, demultiplex_base=demultiplexed_flowcells_working_directory, dry_run=False, run_path=Path(flowcell_full_name), ) wipe_demultiplex_api._set_samples_on_flow_cell() # WHEN wiping files in Housekeeper wipe_demultiplex_api.delete_flow_cell_housekeeper() # THEN you should be notified that there are no files on flow cell names assert ( f"Housekeeper: No files found with tag: {wipe_demultiplex_api.flow_cell_name}" in caplog.text) # AND you should be notified that there were fastq files removed on sample level for file in sample_level_files: assert f"{file.as_posix()} deleted" in caplog.text
def test_delete_flow_cell_housekeeper_flowcell_name( caplog, cg_context: CGConfig, demultiplexed_flowcells_working_directory: Path, flow_cell_name_housekeeper_api: HousekeeperAPI, flowcell_full_name: str, populated_flow_cell_store: Store, tmp_fastq_paths: List[Path], tmp_sample_sheet_path: Path, ): """Test function to remove files from Housekeeper using flow cell name as a tag""" caplog.set_level(logging.INFO) cg_context.housekeeper_api_ = flow_cell_name_housekeeper_api cg_context.status_db_ = populated_flow_cell_store # GIVEN fastq_files: List[Path] = tmp_fastq_paths sample_sheet_file: Path = tmp_sample_sheet_path wipe_demultiplex_api: DeleteDemuxAPI = DeleteDemuxAPI( config=cg_context, demultiplex_base=demultiplexed_flowcells_working_directory, dry_run=False, run_path=Path(flowcell_full_name), ) wipe_demultiplex_api._set_samples_on_flow_cell() # WHEN wipe_demultiplex_api.delete_flow_cell_housekeeper() # THEN assert ( f"Housekeeper: No files found with tag: {wipe_demultiplex_api.flow_cell_name}" not in caplog.text) assert f"Deleted {sample_sheet_file.as_posix()} from housekeeper" in caplog.text for fastq_file in fastq_files: assert f"{fastq_file.as_posix()} deleted" in caplog.text
def fixture_config_object(cg_context: CGConfig, analysis_store: Store, real_housekeeper_api: HousekeeperAPI): cg_context.status_db_ = analysis_store cg_context.housekeeper_api_ = real_housekeeper_api return cg_context
def fixture_config_object_no_gisaid_samples(config: dict, cg_config_object: CGConfig, base_store: Store): cg_config_object.status_db_ = base_store cg_config_object.gisaid = GisaidConfig(**config["gisaid"]) return cg_config_object
def fixture_cg_context(context_config: dict, base_store: Store, housekeeper_api: HousekeeperAPI) -> CGConfig: cg_config = CGConfig(**context_config) cg_config.status_db_ = base_store cg_config.housekeeper_api_ = housekeeper_api return cg_config
def base_context(cg_context: CGConfig, analysis_store: Store) -> CGConfig: """context to use in cli""" cg_context.status_db_ = analysis_store return cg_context
def fixture_base_context(base_store: Store, housekeeper_api: HousekeeperAPI, cg_config_object: CGConfig) -> CGConfig: """context to use in cli""" cg_config_object.status_db_ = base_store cg_config_object.housekeeper_api_ = housekeeper_api return cg_config_object
def fixture_store_fastq_context(compress_api: CompressAPI, store: Store, cg_config_object: CGConfig) -> CGConfig: """Return a compress context""" cg_config_object.meta_apis["compress_api"] = compress_api cg_config_object.status_db_ = store return cg_config_object