def test_upload_genotype( upload_context: CGConfig, case_id: str, cli_runner: CliRunner, analysis_store_trio: Store, upload_genotypes_hk_api: HousekeeperAPI, caplog, ): """Test to upload genotypes via the CLI""" caplog.set_level(logging.DEBUG) # GIVEN a context with a case that is ready for upload sequence genotypes upload_context.status_db_ = analysis_store_trio upload_context.housekeeper_api_ = upload_genotypes_hk_api case_obj = upload_context.status_db.family(case_id) assert case_obj # WHEN uploading the genotypes result = cli_runner.invoke(upload_genotypes_cmd, [case_id], obj=upload_context) # THEN check that the command exits with success assert result.exit_code == 0 # THEN assert the correct information is communicated assert "loading VCF genotypes for sample(s):" in caplog.text
def fixture_populated_mip_context( base_context: CGConfig, analysis_store: Store, mip_dna_housekeeper: HousekeeperAPI, project_dir: Path, ) -> CGConfig: base_context.housekeeper_api_ = mip_dna_housekeeper base_context.status_db_ = analysis_store base_context.delivery_path = str(project_dir) return base_context
def fixture_demultiplex_context( demultiplexing_api: DemultiplexingAPI, stats_api: StatsAPI, real_housekeeper_api: HousekeeperAPI, cg_context: CGConfig, ) -> CGConfig: cg_context.demultiplex_api_ = demultiplexing_api cg_context.cg_stats_api_ = stats_api cg_context.housekeeper_api_ = real_housekeeper_api return cg_context
def fixture_upload_genotypes_context( upload_genotypes_hk_api: HousekeeperAPI, genotype_api: GenotypeAPI, analysis_store_trio: Store, base_context: CGConfig, ) -> CGConfig: """Create a upload genotypes context""" base_context.genotype_api_ = genotype_api base_context.housekeeper_api_ = upload_genotypes_hk_api base_context.status_db_ = analysis_store_trio return base_context
def fixture_rna_mip_context( cg_context: CGConfig, analysis_family_single_case: dict, helpers: StoreHelpers, apptag_rna: str, case_id: str, housekeeper_api: HousekeeperAPI, ) -> CGConfig: cg_context.housekeeper_api_ = housekeeper_api analysis_family_single_case["data_analysis"] = str(Pipeline.MIP_RNA) if not cg_context.status_db.family(case_id): helpers.ensure_case_from_dict(cg_context.status_db, case_info=analysis_family_single_case, app_tag=apptag_rna) cg_context.meta_apis["analysis_api"] = MipRNAAnalysisAPI(cg_context) return cg_context
def fixture_base_context( analysis_store: Store, housekeeper_api: HousekeeperAPI, upload_scout_api: UploadScoutAPI, trailblazer_api: TrailblazerAPI, cg_context: CGConfig, ) -> CGConfig: """context to use in cli""" cg_context.status_db_ = analysis_store cg_context.housekeeper_api_ = housekeeper_api cg_context.trailblazer_api_ = trailblazer_api cg_context.scout_api_ = MockScoutApi() cg_context.meta_apis["report_api"] = MockReportApi() cg_context.meta_apis["scout_upload_api"] = upload_scout_api cg_context.mip_rd_dna.root = tempdir return cg_context
def test_delete_flow_cell_housekeeper_only_sample_level( caplog, cg_context: CGConfig, demultiplexed_flowcells_working_directory: Path, flowcell_full_name: str, populated_flow_cell_store: Store, sample_level_housekeeper_api: HousekeeperAPI, tmp_fastq_paths: List[Path], ): """Test function to remove fastqs from Housekeeper when there are only files on sample level (not on flow cell name) """ caplog.set_level(logging.INFO) cg_context.housekeeper_api_ = sample_level_housekeeper_api cg_context.status_db_ = populated_flow_cell_store # GIVEN a DeleteDemuxAPI with a HousekeeperAPI with no files with flow cell name as a tag sample_level_files: List[Path] = tmp_fastq_paths wipe_demultiplex_api: DeleteDemuxAPI = DeleteDemuxAPI( config=cg_context, demultiplex_base=demultiplexed_flowcells_working_directory, dry_run=False, run_path=Path(flowcell_full_name), ) wipe_demultiplex_api._set_samples_on_flow_cell() # WHEN wiping files in Housekeeper wipe_demultiplex_api.delete_flow_cell_housekeeper() # THEN you should be notified that there are no files on flow cell names assert ( f"Housekeeper: No files found with tag: {wipe_demultiplex_api.flow_cell_name}" in caplog.text) # AND you should be notified that there were fastq files removed on sample level for file in sample_level_files: assert f"{file.as_posix()} deleted" in caplog.text
def test_store( cli_runner: CliRunner, balsamic_context: CGConfig, real_housekeeper_api, mock_config, mock_deliverable, mock_analysis_finish, caplog, hermes_deliverables, mocker, ): """Test to ensure all parts of store command are run successfully given ideal conditions""" caplog.set_level(logging.INFO) # GIVEN case-id for which we created a config file, deliverables file, and analysis_finish file case_id = "balsamic_case_wgs_single" # Set Housekeeper to an empty real Housekeeper store balsamic_context.housekeeper_api_ = real_housekeeper_api balsamic_context.meta_apis[ "analysis_api"].housekeeper_api = real_housekeeper_api # Make sure the bundle was not present in the store assert not balsamic_context.housekeeper_api.bundle(case_id) # Make sure analysis not already stored in ClinicalDB assert not balsamic_context.status_db.family(case_id).analyses # GIVEN that HermesAPI returns a deliverables output mocker.patch.object(HermesApi, "convert_deliverables") HermesApi.convert_deliverables.return_value = CGDeliverables( **hermes_deliverables) # WHEN running command result = cli_runner.invoke(store, [case_id, "--dry-run"], obj=balsamic_context) # THEN bundle should be successfully added to HK and STATUS assert result.exit_code == EXIT_SUCCESS assert "Analysis successfully stored in Housekeeper" in caplog.text assert "Analysis successfully stored in StatusDB" in caplog.text assert balsamic_context.status_db.family(case_id).analyses assert balsamic_context.housekeeper_api.bundle(case_id)
def test_delete_flow_cell_housekeeper_flowcell_name( caplog, cg_context: CGConfig, demultiplexed_flowcells_working_directory: Path, flow_cell_name_housekeeper_api: HousekeeperAPI, flowcell_full_name: str, populated_flow_cell_store: Store, tmp_fastq_paths: List[Path], tmp_sample_sheet_path: Path, ): """Test function to remove files from Housekeeper using flow cell name as a tag""" caplog.set_level(logging.INFO) cg_context.housekeeper_api_ = flow_cell_name_housekeeper_api cg_context.status_db_ = populated_flow_cell_store # GIVEN fastq_files: List[Path] = tmp_fastq_paths sample_sheet_file: Path = tmp_sample_sheet_path wipe_demultiplex_api: DeleteDemuxAPI = DeleteDemuxAPI( config=cg_context, demultiplex_base=demultiplexed_flowcells_working_directory, dry_run=False, run_path=Path(flowcell_full_name), ) wipe_demultiplex_api._set_samples_on_flow_cell() # WHEN wipe_demultiplex_api.delete_flow_cell_housekeeper() # THEN assert ( f"Housekeeper: No files found with tag: {wipe_demultiplex_api.flow_cell_name}" not in caplog.text) assert f"Deleted {sample_sheet_file.as_posix()} from housekeeper" in caplog.text for fastq_file in fastq_files: assert f"{fastq_file.as_posix()} deleted" in caplog.text
def fixture_dna_mip_context( cg_context: CGConfig, helpers: StoreHelpers, mip_case_ids: dict, real_housekeeper_api: HousekeeperAPI, tb_api, ) -> CGConfig: _store = cg_context.status_db cg_context.housekeeper_api_ = real_housekeeper_api cg_context.trailblazer_api_ = tb_api mip_analysis_api = MipDNAAnalysisAPI(config=cg_context) # Add apptag to db helpers.ensure_application_version(store=_store, application_tag="WGSA", application_type="wgs") # Add sample, cases and relationships to db for case_id in mip_case_ids: if not _store.family(case_id): case_obj = helpers.add_case( store=_store, data_analysis=Pipeline.MIP_DNA, internal_id=case_id, name=mip_case_ids[case_id]["name"], ) sample = helpers.add_sample( store=_store, customer_id="cust000", application_tag="WGSA", application_type="wgs", gender="unknown", ) helpers.add_relationship(store=_store, sample=sample, case=case_obj, status="affected") cg_context.meta_apis["analysis_api"] = mip_analysis_api return cg_context
def fluffy_context( cg_context: CGConfig, helpers: StoreHelpers, real_housekeeper_api: HousekeeperAPI, fluffy_samplesheet_bundle_data, fluffy_fastq_hk_bundle_data, fluffy_case_id_existing, fluffy_sample_lims_id, ) -> CGConfig: cg_context.housekeeper_api_ = real_housekeeper_api fluffy_analysis_api = FluffyAnalysisAPI(config=cg_context) helpers.ensure_hk_version(fluffy_analysis_api.housekeeper_api, bundle_data=fluffy_samplesheet_bundle_data) helpers.ensure_hk_version(fluffy_analysis_api.housekeeper_api, fluffy_fastq_hk_bundle_data) example_fluffy_case = helpers.add_case( fluffy_analysis_api.status_db, internal_id=fluffy_case_id_existing, name=fluffy_case_id_existing, data_analysis=Pipeline.FLUFFY, ) example_fluffy_sample = helpers.add_sample( fluffy_analysis_api.status_db, internal_id=fluffy_sample_lims_id, is_tumour=False, application_type="tgs", reads=100, sequenced_at=dt.datetime.now(), ) helpers.add_flowcell(fluffy_analysis_api.status_db, flowcell_id="flowcell", samples=[example_fluffy_sample]) helpers.add_relationship(fluffy_analysis_api.status_db, case=example_fluffy_case, sample=example_fluffy_sample) cg_context.meta_apis["analysis_api"] = fluffy_analysis_api return cg_context
def test_valid_case_already_added( cli_runner, mocker, hermes_deliverables, balsamic_context: CGConfig, real_housekeeper_api: HousekeeperAPI, mock_config, mock_deliverable, mock_analysis_finish, caplog, ): caplog.set_level(logging.ERROR) # GIVEN case-id case_id = "balsamic_case_wgs_single" # Make sure nothing is currently stored in Housekeeper balsamic_context.housekeeper_api_ = real_housekeeper_api balsamic_context.meta_apis["analysis_api"].housekeeper_api = real_housekeeper_api # Make sure analysis not already stored in ClinicalDB assert not balsamic_context.status_db.family(case_id).analyses # GIVEN that HermesAPI returns a deliverables output mocker.patch.object(HermesApi, "convert_deliverables") HermesApi.convert_deliverables.return_value = CGDeliverables(**hermes_deliverables) # Ensure bundles exist by creating them first cli_runner.invoke(store_housekeeper, [case_id], obj=balsamic_context) # WHEN running command result = cli_runner.invoke(store_housekeeper, [case_id], obj=balsamic_context) # THEN command should NOT execute successfully assert result.exit_code != EXIT_SUCCESS # THEN user should be informed that bundle was already added assert "Bundle already added" in caplog.text
def fixture_base_context(base_context: CGConfig, project_dir: Path, real_housekeeper_api: HousekeeperAPI) -> CGConfig: base_context.housekeeper_api_ = real_housekeeper_api base_context.delivery_path = str(project_dir) return base_context
def fixture_config_object(cg_context: CGConfig, analysis_store: Store, real_housekeeper_api: HousekeeperAPI): cg_context.status_db_ = analysis_store cg_context.housekeeper_api_ = real_housekeeper_api return cg_context
def fixture_cg_context(context_config: dict, base_store: Store, housekeeper_api: HousekeeperAPI) -> CGConfig: cg_config = CGConfig(**context_config) cg_config.status_db_ = base_store cg_config.housekeeper_api_ = housekeeper_api return cg_config
def fixture_balsamic_context( cg_context: CGConfig, helpers: StoreHelpers, balsamic_lims: MockLimsAPI, balsamic_housekeeper: HousekeeperAPI, trailblazer_api: MockTB, hermes_api: HermesApi, cg_dir, ) -> CGConfig: """context to use in cli""" cg_context.housekeeper_api_ = balsamic_housekeeper cg_context.lims_api_ = balsamic_lims cg_context.trailblazer_api_ = trailblazer_api cg_context.meta_apis["analysis_api"] = BalsamicAnalysisAPI( config=cg_context) status_db: Store = cg_context.status_db # Create tgs application version helpers.ensure_application_version(store=status_db, application_tag="TGSA", application_type="tgs") # Create wes application version helpers.ensure_application_version(store=status_db, application_tag="WESA", application_type="wes") # Create textbook case for WGS PAIRED with enough reads case_wgs_paired_enough_reads = helpers.add_case( store=status_db, internal_id="balsamic_case_wgs_paired_enough_reads", name="balsamic_case_wgs_paired_enough_reads", data_analysis=Pipeline.BALSAMIC, ) sample_case_wgs_paired_tumor_enough_reads = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_tumor_enough_reads", is_tumour=True, application_type="wgs", reads=10, sequenced_at=dt.datetime.now(), ) sample_case_wgs_paired_normal_enough_reads = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_normal_enough_reads", is_tumour=False, application_type="wgs", reads=10, sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_wgs_paired_enough_reads, sample=sample_case_wgs_paired_tumor_enough_reads, ) helpers.add_relationship( status_db, case=case_wgs_paired_enough_reads, sample=sample_case_wgs_paired_normal_enough_reads, ) # Create textbook case for WGS PAIRED case_wgs_paired = helpers.add_case( store=status_db, internal_id="balsamic_case_wgs_paired", name="balsamic_case_wgs_paired", data_analysis=Pipeline.BALSAMIC, ) sample_case_wgs_paired_tumor = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_tumor", is_tumour=True, application_type="wgs", reads=10, sequenced_at=dt.datetime.now(), ) sample_case_wgs_paired_normal = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_normal", is_tumour=False, application_type="wgs", reads=10, sequenced_at=dt.datetime.now(), ) helpers.add_relationship(status_db, case=case_wgs_paired, sample=sample_case_wgs_paired_tumor) helpers.add_relationship(status_db, case=case_wgs_paired, sample=sample_case_wgs_paired_normal) # Create textbook case for TGS PAIRED without enough reads case_tgs_paired = helpers.add_case( status_db, internal_id="balsamic_case_tgs_paired", name="balsamic_case_tgs_paired", data_analysis=Pipeline.BALSAMIC, ) sample_case_tgs_paired_tumor = helpers.add_sample( status_db, internal_id="sample_case_tgs_paired_tumor", is_tumour=True, application_tag="TGSA", application_type="tgs", reads=10, sequenced_at=dt.datetime.now(), ) sample_case_tgs_paired_normal = helpers.add_sample( status_db, internal_id="sample_case_tgs_paired_normal", is_tumour=False, application_tag="TGSA", application_type="tgs", reads=0, sequenced_at=dt.datetime.now(), ) helpers.add_relationship(status_db, case=case_tgs_paired, sample=sample_case_tgs_paired_tumor) helpers.add_relationship(status_db, case=case_tgs_paired, sample=sample_case_tgs_paired_normal) # Create textbook case for WGS TUMOR ONLY case_wgs_single = helpers.add_case( status_db, internal_id="balsamic_case_wgs_single", name="balsamic_case_wgs_single", data_analysis=Pipeline.BALSAMIC, ) sample_case_wgs_single_tumor = helpers.add_sample( status_db, internal_id="sample_case_wgs_single_tumor", is_tumour=True, application_type="wgs", reads=100, sequenced_at=dt.datetime.now(), ) helpers.add_relationship(status_db, case=case_wgs_single, sample=sample_case_wgs_single_tumor) # Create textbook case for TGS TUMOR ONLY case_tgs_single = helpers.add_case( status_db, internal_id="balsamic_case_tgs_single", name="balsamic_case_tgs_single", data_analysis=Pipeline.BALSAMIC, ) sample_case_tgs_single_tumor = helpers.add_sample( status_db, internal_id="sample_case_tgs_single_tumor", is_tumour=True, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship(status_db, case=case_tgs_single, sample=sample_case_tgs_single_tumor) # Create ERROR case for TGS NORMAL ONLY case_tgs_single_error = helpers.add_case( status_db, internal_id="balsamic_case_tgs_single_error", name="balsamic_case_tgs_single_error", data_analysis=Pipeline.BALSAMIC, ) sample_case_tgs_single_normal_error = helpers.add_sample( status_db, internal_id="sample_case_tgs_single_normal_error", is_tumour=False, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_tgs_single_error, sample=sample_case_tgs_single_normal_error, ) # Create ERROR case for TGS TWO TUMOR ONE NORMAL case_tgs_paired_error = helpers.add_case( status_db, internal_id="balsamic_case_tgs_paired_error", name="balsamic_case_tgs_paired_error", data_analysis=Pipeline.BALSAMIC, ) sample_case_tgs_paired_tumor_error = helpers.add_sample( status_db, internal_id="sample_case_tgs_paired_tumor_error", is_tumour=True, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) sample_case_tgs_paired_tumor2_error = helpers.add_sample( status_db, internal_id="sample_case_tgs_paired_tumor2_error", is_tumour=True, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) sample_case_tgs_paired_normal_error = helpers.add_sample( status_db, internal_id="sample_case_tgs_paired_normal_error", is_tumour=False, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_tgs_paired_error, sample=sample_case_tgs_paired_tumor_error, ) helpers.add_relationship( status_db, case=case_tgs_paired_error, sample=sample_case_tgs_paired_tumor2_error, ) helpers.add_relationship( status_db, case=case_tgs_paired_error, sample=sample_case_tgs_paired_normal_error, ) # Create ERROR case for MIXED application type case_mixed_paired_error = helpers.add_case( status_db, internal_id="balsamic_case_mixed_paired_error", name="balsamic_case_mixed_paired_error", data_analysis=Pipeline.BALSAMIC, ) mixed_sample_case_wgs_paired_tumor_error = helpers.add_sample( status_db, internal_id="mixed_sample_case_wgs_paired_tumor_error", is_tumour=True, application_type="wgs", sequenced_at=dt.datetime.now(), ) mixed_sample_case_tgs_paired_normal_error = helpers.add_sample( status_db, internal_id="mixed_sample_case_tgs_paired_normal_error", is_tumour=False, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_mixed_paired_error, sample=mixed_sample_case_wgs_paired_tumor_error, ) helpers.add_relationship( status_db, case=case_mixed_paired_error, sample=mixed_sample_case_tgs_paired_normal_error, ) # Create ERROR case for MIXED application type NOT BALSAMIC APPLICATION case_mixed_wgs_mic_paired_error = helpers.add_case( status_db, internal_id="balsamic_case_mixed_wgs_mic_paired_error", name="balsamic_case_mixed_wgs_mic_paired_error", data_analysis=Pipeline.BALSAMIC, ) mixed_sample_case_wgs_mic_paired_tumor_error = helpers.add_sample( status_db, internal_id="mixed_sample_case_wgs_mic_paired_tumor_error", is_tumour=True, application_type="wgs", sequenced_at=dt.datetime.now(), ) mixed_sample_case_wgs_mic_paired_normal_error = helpers.add_sample( status_db, internal_id="mixed_sample_case_wgs_mic_paired_normal_error", is_tumour=False, application_tag="MICA", application_type="mic", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_mixed_wgs_mic_paired_error, sample=mixed_sample_case_wgs_mic_paired_tumor_error, ) helpers.add_relationship( status_db, case=case_mixed_wgs_mic_paired_error, sample=mixed_sample_case_wgs_mic_paired_normal_error, ) # Create ERROR case for MIXED TARGET BED case_mixed_bed_paired_error = helpers.add_case( status_db, internal_id="balsamic_case_mixed_bed_paired_error", name="balsamic_case_mixed_bed_paired_error", data_analysis=Pipeline.BALSAMIC, ) mixed_sample_case_mixed_bed_paired_tumor_error = helpers.add_sample( status_db, internal_id="mixed_sample_case_mixed_bed_paired_tumor_error", is_tumour=True, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) mixed_sample_case_mixed_bed_paired_normal_error = helpers.add_sample( status_db, internal_id="mixed_sample_case_mixed_bed_paired_normal_error", is_tumour=False, application_tag="TGSA", application_type="tgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_mixed_bed_paired_error, sample=mixed_sample_case_mixed_bed_paired_tumor_error, ) helpers.add_relationship( status_db, case=case_mixed_bed_paired_error, sample=mixed_sample_case_mixed_bed_paired_normal_error, ) # Create ERROR case for WGS TUMOR ONLY MIP CLI_OPTION_ANALYSIS ONLY mip_case_wgs_single = helpers.add_case( status_db, internal_id="mip_case_wgs_single", name="mip_case_wgs_single", data_analysis=Pipeline.MIP_DNA, ) mip_sample_case_wgs_single_tumor = helpers.add_sample( status_db, internal_id="mip_sample_case_wgs_single_tumor", is_tumour=True, application_type="wgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=mip_case_wgs_single, sample=mip_sample_case_wgs_single_tumor, ) # Create ERROR case for WGS ONE TUMOR TWO NORMAL case_wgs_paired_two_normal_error = helpers.add_case( status_db, internal_id="balsamic_case_wgs_paired_two_normal_error", name="balsamic_case_wgs_paired_two_normal_error", data_analysis=Pipeline.BALSAMIC, ) sample_case_wgs_paired_two_normal_tumor_error = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_two_normal_tumor_error", is_tumour=True, application_tag="WGSA", application_type="wgs", sequenced_at=dt.datetime.now(), ) sample_case_wgs_paired_two_normal_normal1_error = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_two_normal_normal1_error", is_tumour=False, application_tag="WGSA", application_type="wgs", sequenced_at=dt.datetime.now(), ) sample_case_wgs_paired_two_normal_normal2_error = helpers.add_sample( status_db, internal_id="sample_case_wgs_paired_two_normal_normal2_error", is_tumour=False, application_tag="WGSA", application_type="wgs", sequenced_at=dt.datetime.now(), ) helpers.add_relationship( status_db, case=case_wgs_paired_two_normal_error, sample=sample_case_wgs_paired_two_normal_tumor_error, ) helpers.add_relationship( status_db, case=case_wgs_paired_two_normal_error, sample=sample_case_wgs_paired_two_normal_normal1_error, ) helpers.add_relationship( status_db, case=case_wgs_paired_two_normal_error, sample=sample_case_wgs_paired_two_normal_normal2_error, ) # Create WES case with 1 tumor sample case_wes_tumor = helpers.add_case( status_db, internal_id="balsamic_case_wes_tumor", name="balsamic_case_wes_tumor", data_analysis=Pipeline.BALSAMIC, ) sample_case_wes_tumor = helpers.add_sample( status_db, internal_id="sample_case_wes_tumor", is_tumour=True, application_tag="WESA", application_type="wes", sequenced_at=dt.datetime.now(), ) helpers.add_relationship(status_db, case=case_wes_tumor, sample=sample_case_wes_tumor) # Create ERROR case for WES when no panel is found case_wes_panel_error = helpers.add_case( status_db, internal_id="balsamic_case_wes_panel_error", name="balsamic_case_wes_panel_error", data_analysis=Pipeline.BALSAMIC, ) sample_case_wes_panel_error = helpers.add_sample( status_db, internal_id="sample_case_wes_panel_error", is_tumour=True, application_tag="WESA", application_type="wes", sequenced_at=dt.datetime.now(), ) helpers.add_relationship(status_db, case=case_wes_panel_error, sample=sample_case_wes_panel_error) # Create ERROR case with NO SAMPLES helpers.add_case(status_db, internal_id="no_sample_case", name="no_sample_case") # Create BED1 version 1 bed1_name = "BalsamicBed1" bed1_filename = "balsamic_bed_1.bed" Path(cg_dir, bed1_filename).touch(exist_ok=True) bed1 = status_db.add_bed(name=bed1_name) status_db.add_commit(bed1) version1 = status_db.add_bed_version(bed=bed1, version=1, filename=bed1_filename, shortname=bed1_name) status_db.add_commit(version1) # Create BED2 version 1 bed2_name = "BalsamicBed2" bed2_filename = "balsamic_bed_2.bed" Path(cg_dir, bed2_filename).touch(exist_ok=True) bed2 = status_db.add_bed(name=bed2_name) status_db.add_commit(bed2) version2 = status_db.add_bed_version(bed=bed2, version=1, filename=bed2_filename, shortname=bed2_name) status_db.add_commit(version2) return cg_context
def fixture_base_context(base_store: Store, housekeeper_api: HousekeeperAPI, cg_config_object: CGConfig) -> CGConfig: """context to use in cli""" cg_config_object.status_db_ = base_store cg_config_object.housekeeper_api_ = housekeeper_api return cg_config_object
def test_store_available( tmpdir_factory, cli_runner: CliRunner, balsamic_context: CGConfig, real_housekeeper_api, mock_config, mock_deliverable, mock_analysis_finish, caplog, mocker, hermes_deliverables, ): """Test to ensure all parts of compound store-available command are executed given ideal conditions Test that sore-available picks up eligible cases and does not pick up ineligible ones""" caplog.set_level(logging.INFO) # GIVEN CASE ID of sample where read counts pass threshold case_id_success = "balsamic_case_wgs_single" # GIVEN CASE ID where analysis finish is not mocked case_id_fail = "balsamic_case_wgs_paired" # Ensure the config is mocked for fail case to run compound command Path.mkdir( Path(balsamic_context.meta_apis["analysis_api"].get_case_config_path( case_id_fail)).parent, exist_ok=True, ) Path(balsamic_context.meta_apis["analysis_api"].get_case_config_path( case_id_fail)).touch(exist_ok=True) # GIVEN that HermesAPI returns a deliverables output mocker.patch.object(HermesApi, "convert_deliverables") HermesApi.convert_deliverables.return_value = CGDeliverables( **hermes_deliverables) # Ensure case was successfully picked up by start-available and status set to running result = cli_runner.invoke(start_available, ["--dry-run"], obj=balsamic_context) balsamic_context.status_db.family(case_id_success).action = "running" balsamic_context.status_db.commit() # THEN command exits with 1 because one of the cases threw errors assert result.exit_code == 1 assert case_id_success in caplog.text assert balsamic_context.status_db.family( case_id_success).action == "running" balsamic_context.housekeeper_api_ = real_housekeeper_api balsamic_context.meta_apis[ "analysis_api"].housekeeper_api = real_housekeeper_api # WHEN running command result = cli_runner.invoke(store_available, obj=balsamic_context) # THEN command exits successfully assert result.exit_code == 0 # THEN case id with analysis_finish gets picked up assert case_id_success in caplog.text # THEN case has analyses assert balsamic_context.status_db.family(case_id_success).analyses # THEN bundle can be found in Housekeeper assert balsamic_context.housekeeper_api.bundle(case_id_success) # THEN bundle added successfully and action set to None assert balsamic_context.status_db.family(case_id_success).action is None