예제 #1
0
def test_dry_run(
    cli_runner: CliRunner,
    clean_context: CGConfig,
    timestamp_yesterday: dt.datetime,
    helpers: StoreHelpers,
    caplog,
    mocker,
):
    """Test command with dry run options"""

    # GIVEN a case on disk that could be deleted
    caplog.set_level(logging.INFO)
    base_store = clean_context.status_db
    helpers.add_analysis(
        base_store,
        pipeline=Pipeline.BALSAMIC,
        started_at=timestamp_yesterday,
        uploaded_at=timestamp_yesterday,
        cleaned_at=None,
    )
    case_id = "balsamic_case_clean"
    analysis_to_clean = base_store.family(case_id).analyses[0]
    case_path = clean_context.meta_apis["analysis_api"].get_case_path(case_id)
    Path(case_path).mkdir(exist_ok=True, parents=True)

    mocker.patch.object(TrailblazerAPI, "is_latest_analysis_ongoing")
    TrailblazerAPI.is_latest_analysis_ongoing.return_value = False

    # WHEN dry running with dry run specified
    result = cli_runner.invoke(clean_run_dir, [case_id, "-d", "-y"], obj=clean_context)
    # THEN command should say it would have deleted
    assert result.exit_code == EXIT_SUCCESS
    assert "Would have deleted" in caplog.text
    assert case_id in caplog.text
    assert analysis_to_clean in base_store.analyses_to_clean(pipeline=Pipeline.BALSAMIC)
예제 #2
0
def test_analysis_without_started_at(upload_context: CGConfig,
                                     cli_runner: CliRunner,
                                     helpers: StoreHelpers, caplog):

    # GIVEN a correct case_id and a correct date
    analysis: models.Analysis = helpers.add_analysis(
        upload_context.status_db,
        started_at=datetime.now(),
        pipeline=Pipeline.MIP_DNA,
    )
    case_id: str = analysis.family.internal_id
    a_date: datetime = analysis.started_at
    assert a_date

    caplog.set_level(logging.INFO)

    # WHEN calling delivery_report without date parameter
    cli_runner.invoke(
        delivery_report,
        [case_id],
        obj=upload_context,
    )

    # THEN it should contain the date in the logged info
    assert "Using analysis started at: " in caplog.text
예제 #3
0
def test_upload_auto_with_pipeline_as_argument(
    cli_runner: CliRunner,
    helpers: StoreHelpers,
    timestamp: datetime.datetime,
    upload_context: CGConfig,
    caplog,
):
    """Test upload auto"""
    # GIVEN a store with a MIP analysis
    pipeline = Pipeline.MIP_DNA
    helpers.add_analysis(store=upload_context.status_db, completed_at=timestamp, pipeline=pipeline)

    # WHEN uploading all analysis from pipeline MIP
    caplog.set_level(logging.INFO)
    cli_runner.invoke(auto, ["--pipeline", "mip-dna"], obj=upload_context)
    # THEN assert that the MIP analysis was successfully uploaded
    assert "Uploading family" in caplog.text
예제 #4
0
def test_clean_hk_bundle_files_dry_run(
    caplog,
    case_id: str,
    cg_context: CGConfig,
    cli_runner: CliRunner,
    helpers: StoreHelpers,
    mocker,
    timestamp: datetime,
):
    # GIVEN a housekeeper api with some alignment files
    file_path = "path/to_file.cram"
    tag = "cram"
    hk_bundle_data = {
        "name": case_id,
        "created": timestamp,
        "expires": timestamp,
        "files": [
            {
                "path": file_path,
                "archive": False,
                "tags": [case_id, tag]
            },
        ],
    }
    store = cg_context.status_db
    case = helpers.ensure_case(store=store, case_id=case_id)
    helpers.add_analysis(store=store,
                         case=case,
                         started_at=timestamp,
                         completed_at=timestamp)
    helpers.ensure_hk_bundle(cg_context.housekeeper_api,
                             bundle_data=hk_bundle_data)

    # WHEN running the clean command in dry run mode
    caplog.set_level(logging.INFO)
    result = cli_runner.invoke(hk_bundle_files,
                               ["-c", case_id, "--dry-run", "--tags", tag],
                               obj=cg_context)

    # THEN assert it exits with success
    assert result.exit_code == 0
    # THEN assert that the files where removed
    assert f"{file_path} not on disk" in caplog.text
예제 #5
0
def test_find_analysis_via_date(sample_store: Store, helpers: StoreHelpers):
    # GIVEN a case with an analysis with a startdate in the database
    analysis = helpers.add_analysis(store=sample_store,
                                    started_at=datetime.now())
    assert analysis.started_at

    # WHEN getting analysis via case_id and start date
    db_analysis = sample_store.analysis(analysis.family, analysis.started_at)

    # THEN the analysis should have been retrieved
    assert db_analysis == analysis
예제 #6
0
def test_delete_case_with_analysis(
    cli_runner: CliRunner, base_context: CGConfig, helpers: StoreHelpers
):
    """Test that the delete case can't delete a case with analysis"""
    # GIVEN a database with a case with an analysis
    base_store: Store = base_context.status_db
    analysis_obj = helpers.add_analysis(base_store)
    case_id = analysis_obj.family.internal_id

    # WHEN deleting a case

    result = cli_runner.invoke(delete_case_command, [case_id, "--yes"], obj=base_context)

    # THEN it should not have been deleted
    assert result.exit_code != SUCCESS
    assert base_store.Family.query.count() == 1
예제 #7
0
def test_get_analysis_required(
    cli_runner: CliRunner, base_context: CGConfig, disk_store: Store, helpers: StoreHelpers
):
    """Test to get a analysis using only the required argument"""
    # GIVEN a database with an analysis
    analysis: models.Analysis = helpers.add_analysis(disk_store, pipeline_version="9.3")
    internal_id = analysis.family.internal_id
    assert disk_store.Analysis.query.count() == 1

    # WHEN getting a analysis
    result = cli_runner.invoke(get, ["analysis", internal_id], obj=base_context)

    # THEN it should have been gotten
    assert result.exit_code == RETURN_SUCCESS
    assert str(analysis.started_at) in result.output
    assert analysis.pipeline in result.output
    assert analysis.pipeline_version in result.output
예제 #8
0
def fixture_mip_dna_analysis_obj(
    analysis_store_trio: Store, case_id: str, timestamp: datetime, helpers: StoreHelpers
) -> models.Analysis:
    helpers.add_synopsis_to_case(store=analysis_store_trio, case_id=case_id)
    case_obj: models.Family = analysis_store_trio.family(case_id)
    analysis_obj: models.Analysis = helpers.add_analysis(
        store=analysis_store_trio,
        case=case_obj,
        started_at=timestamp,
        pipeline=Pipeline.MIP_DNA,
        completed_at=timestamp,
    )
    for link in case_obj.links:
        helpers.add_phenotype_groups_to_sample(
            store=analysis_store_trio, sample_id=link.sample.internal_id
        )
        helpers.add_phenotype_terms_to_sample(
            store=analysis_store_trio, sample_id=link.sample.internal_id
        )
        helpers.add_subject_id_to_sample(
            store=analysis_store_trio, sample_id=link.sample.internal_id
        )
    return analysis_obj