Example #1
0
def nipt_upload_case(context: CGConfig, case_id: str, dry_run: bool, force: bool):
    """Upload the results file of a NIPT case"""
    nipt_upload_api: NiptUploadAPI = NiptUploadAPI(context)
    nipt_upload_api.set_dry_run(dry_run=dry_run)

    if force or nipt_upload_api.flowcell_passed_qc_value(
        case_id=case_id, q30_threshold=Q30_THRESHOLD
    ):
        LOG.info("*** NIPT FTP UPLOAD START ***")

        hk_results_file: str = nipt_upload_api.get_housekeeper_results_file(case_id=case_id)
        results_file: Path = nipt_upload_api.get_results_file_path(hk_results_file)

        LOG.info(f"Results file found: {results_file}")
        LOG.info("Starting ftp upload!")

        nipt_upload_api.upload_to_ftp_server(results_file)

        LOG.info("Upload ftp finished!")
    else:
        LOG.error("Uploading case failed: %s", case_id)
        LOG.error(
            f"Flowcell did not pass one of the following QC parameters:\n"
            f"target_reads={nipt_upload_api.target_reads(case_id=case_id)}, Q30_threshold={Q30_THRESHOLD}"
        )
        raise AnalysisUploadError("Upload failed")
Example #2
0
def nipt_upload_all(context: click.Context, dry_run: bool):
    """Upload NIPT result files for all cases"""

    LOG.info("*** NIPT UPLOAD ALL START ***")

    nipt_upload_api: NiptUploadAPI = NiptUploadAPI(context.obj)
    nipt_upload_api.set_dry_run(dry_run=dry_run)

    all_good = True
    for analysis_obj in nipt_upload_api.get_all_upload_analyses():

        internal_id = analysis_obj.family.internal_id

        if nipt_upload_api.flowcell_passed_qc_value(
                case_id=internal_id, q30_threshold=Q30_THRESHOLD):
            LOG.info("Uploading case: %s", internal_id)
            try:
                context.invoke(nipt_upload_case,
                               case_id=internal_id,
                               dry_run=dry_run)
            except AnalysisUploadError:
                LOG.error(traceback.format_exc())
                all_good = False

    if not all_good:
        raise AnalysisUploadError("Some uploads failed")
Example #3
0
def nipt_upload_case(context: click.Context, case_id: Optional[str],
                     dry_run: bool, force: bool):
    """Upload NIPT result files for a case"""

    LOG.info("*** NIPT UPLOAD START ***")

    if context.invoked_subcommand is not None:
        return

    nipt_upload_api: NiptUploadAPI = NiptUploadAPI(context.obj)
    nipt_upload_api.set_dry_run(dry_run=dry_run)
    if force or nipt_upload_api.flowcell_passed_qc_value(
            case_id=case_id, q30_threshold=Q30_THRESHOLD):
        nipt_upload_api.update_analysis_upload_started_date(case_id)
        context.invoke(batch, case_id=case_id, force=force, dry_run=dry_run)
        context.invoke(nipt_upload_ftp_case,
                       case_id=case_id,
                       force=force,
                       dry_run=dry_run)
        nipt_upload_api.update_analysis_uploaded_at_date(case_id)
        LOG.info("%s: analysis uploaded!", case_id)
    else:
        LOG.error("Uploading case failed: %s", case_id)
        LOG.error(
            f"Flowcell did not pass one of the following QC parameters:\n"
            f"target_reads={nipt_upload_api.target_reads(case_id=case_id)}, Q30_threshold={Q30_THRESHOLD}"
        )
        raise AnalysisUploadError("Upload failed")
Example #4
0
def batch(configs: CGConfig, case_id: str, dry_run: bool, force: bool):
    """Loading batch into the NIPT database"""

    LOG.info("*** Statina UPLOAD START ***")

    nipt_upload_api = NiptUploadAPI(configs)
    nipt_upload_api.set_dry_run(dry_run=dry_run)
    statina_files: StatinaUploadFiles = nipt_upload_api.get_statina_files(case_id=case_id)
    if dry_run:
        LOG.info(f"Found file paths for statina upload: {statina_files.json(exclude_none=True)}")
    elif force or nipt_upload_api.flowcell_passed_qc_value(
        case_id=case_id, q30_threshold=Q30_THRESHOLD
    ):
        nipt_upload_api.upload_to_statina_database(statina_files=statina_files)
    else:
        LOG.error("Uploading case failed: %s", case_id)
        LOG.error(
            f"Flowcell did not pass one of the following QC parameters:\n"
            f"target_reads={nipt_upload_api.target_reads(case_id=case_id)}, Q30_threshold={Q30_THRESHOLD}"
        )
        raise AnalysisUploadError("Upload failed")
Example #5
0
def bioinfo(context: CGConfig, case_name: str, cleanup: bool, target_load: str, dry: bool):
    """Load bioinfo case results to the trending database"""
    status_db: Store = context.status_db
    housekeeper_api: HousekeeperAPI = context.housekeeper_api
    upload_vogue_api = UploadVogueAPI(
        genotype_api=context.genotype_api, vogue_api=context.vogue_api, store=status_db
    )

    click.echo(click.style("----------------- BIOINFO -----------------------"))

    load_bioinfo_raw_inputs = dict()

    # Probably get samples for a case_name through statusdb api
    load_bioinfo_raw_inputs["samples"] = _get_samples(status_db, case_name)

    # Probably get analysis result file through housekeeper ai
    load_bioinfo_raw_inputs["analysis_result_file"] = _get_multiqc_latest_file(
        housekeeper_api, case_name
    )

    # Probably get analysis_type [multiqc or microsalt or all] from cli
    # This might automated to some extend by checking if input multiqc json.
    # This tells us how the result was generated. If it is multiqc it will try to validate keys with
    # an actual model.
    load_bioinfo_raw_inputs["analysis_type"] = "multiqc"

    # case_name is the input
    load_bioinfo_raw_inputs["analysis_case_name"] = case_name

    # Get case_analysis_type from cli a free text for an entry in trending database
    load_bioinfo_raw_inputs["case_analysis_type"] = "multiqc"

    # Get workflow_name and workflow_version
    workflow_name, workflow_version = _get_analysis_workflow_details(status_db, case_name)

    if workflow_name is None:
        raise AnalysisUploadError(
            f"Case upload failed: {case_name}. Reason: Workflow name not found."
        )
    workflow_name = workflow_name.lower()

    if workflow_name not in VOGUE_VALID_BIOINFO:
        raise AnalysisUploadError(
            f"Case upload failed: {case_name}. Reason: Bad workflow name: {workflow_name}."
        )

    load_bioinfo_raw_inputs["analysis_workflow_name"] = workflow_name
    load_bioinfo_raw_inputs["analysis_workflow_version"] = workflow_version

    if dry:
        click.echo(click.style("----------------- DRY RUN -----------------------"))

    if target_load in ("raw", "all"):
        click.echo(click.style("----------------- UPLOAD UNPROCESSED -----------------------"))
        if not dry:
            upload_vogue_api.load_bioinfo_raw(load_bioinfo_raw_inputs)

    if target_load in ("process", "all"):
        click.echo(click.style("----------------- PROCESS CASE -----------------------"))
        if not dry:
            upload_vogue_api.load_bioinfo_process(load_bioinfo_raw_inputs, cleanup)
        click.echo(click.style("----------------- PROCESS SAMPLE -----------------------"))
        if not dry:
            upload_vogue_api.load_bioinfo_sample(load_bioinfo_raw_inputs)
Example #6
0
def upload(context: click.Context, family_id: Optional[str],
           force_restart: bool):
    """Upload results from analyses."""
    config_object: CGConfig = context.obj
    if not config_object.meta_apis.get("analysis_api"):
        config_object.meta_apis["analysis_api"] = MipDNAAnalysisAPI(
            context.obj)
    analysis_api: AnalysisAPI = config_object.meta_apis["analysis_api"]
    status_db: Store = config_object.status_db

    click.echo(click.style("----------------- UPLOAD ----------------------"))

    if family_id:
        try:
            analysis_api.verify_case_id_in_statusdb(case_id=family_id)
        except CgError:
            raise click.Abort

        case_obj: models.Family = status_db.family(family_id)
        if not case_obj.analyses:
            message = f"no analysis exists for family: {family_id}"
            click.echo(click.style(message, fg="red"))
            raise click.Abort

        analysis_obj: models.Analysis = case_obj.analyses[0]

        if analysis_obj.uploaded_at is not None:
            message = f"analysis already uploaded: {analysis_obj.uploaded_at.date()}"
            click.echo(click.style(message, fg="red"))
            raise click.Abort

        if not force_restart and analysis_obj.upload_started_at is not None:
            if dt.datetime.now(
            ) - analysis_obj.upload_started_at > dt.timedelta(hours=24):
                raise AnalysisUploadError(
                    f"The upload started at {analysis_obj.upload_started_at} "
                    f"something went wrong, restart it with the --restart flag"
                )

            message = f"analysis upload already started: {analysis_obj.upload_started_at.date()}"
            click.echo(click.style(message, fg="yellow"))
            return

    context.obj.meta_apis["report_api"] = ReportAPI(
        store=status_db,
        lims_api=config_object.lims_api,
        chanjo_api=config_object.chanjo_api,
        analysis_api=analysis_api,
        scout_api=config_object.scout_api,
    )

    context.obj.meta_apis["scout_upload_api"] = UploadScoutAPI(
        hk_api=config_object.housekeeper_api,
        scout_api=config_object.scout_api,
        madeline_api=config_object.madeline_api,
        analysis_api=analysis_api,
        lims_api=config_object.lims_api,
        status_db=status_db,
    )

    if context.invoked_subcommand is not None:
        return

    if not family_id:
        suggest_cases_to_upload(status_db=status_db)
        raise click.Abort

    case_obj: models.Family = status_db.family(family_id)
    analysis_obj: models.Analysis = case_obj.analyses[0]
    if analysis_obj.uploaded_at is not None:
        message = f"analysis already uploaded: {analysis_obj.uploaded_at.date()}"
        click.echo(click.style(message, fg="yellow"))
    else:
        analysis_obj.upload_started_at = dt.datetime.now()
        status_db.commit()
        context.invoke(coverage, re_upload=True, family_id=family_id)
        context.invoke(validate, family_id=family_id)
        context.invoke(genotypes, re_upload=False, family_id=family_id)
        context.invoke(observations, case_id=family_id)
        context.invoke(scout, case_id=family_id)
        analysis_obj.uploaded_at = dt.datetime.now()
        status_db.commit()
        click.echo(click.style(f"{family_id}: analysis uploaded!", fg="green"))
Example #7
0
def upload(context, family_id, force_restart):
    """Upload results from analyses."""

    click.echo(click.style("----------------- UPLOAD ----------------------"))

    context.obj["status"] = Store(context.obj["database"])

    if family_id:
        family_obj = context.obj["status"].family(family_id)
        if not family_obj:
            message = f"family not found: {family_id}"
            click.echo(click.style(message, fg="red"))
            context.abort()

        if not family_obj.analyses:
            message = f"no analysis exists for family: {family_id}"
            click.echo(click.style(message, fg="red"))
            context.abort()

        analysis_obj = family_obj.analyses[0]

        if analysis_obj.uploaded_at is not None:
            message = f"analysis already uploaded: {analysis_obj.uploaded_at.date()}"
            click.echo(click.style(message, fg="red"))
            context.abort()

        if not force_restart and analysis_obj.upload_started_at is not None:
            if dt.datetime.now(
            ) - analysis_obj.upload_started_at > dt.timedelta(hours=24):
                raise AnalysisUploadError(
                    f"The upload started at {analysis_obj.upload_started_at} "
                    f"something went wrong, restart it with the --restart flag"
                )

            message = f"analysis upload already started: {analysis_obj.upload_started_at.date()}"
            click.echo(click.style(message, fg="yellow"))
            return

    context.obj["housekeeper_api"] = hk.HousekeeperAPI(context.obj)

    context.obj["madeline_api"] = madeline.api.MadelineAPI(context.obj)
    context.obj["genotype_api"] = gt.GenotypeAPI(context.obj)
    context.obj["lims_api"] = lims.LimsAPI(context.obj)
    context.obj["tb_api"] = tb.TrailblazerAPI(context.obj)
    context.obj["chanjo_api"] = coverage_app.ChanjoAPI(context.obj)
    context.obj["deliver_api"] = DeliverAPI(
        context.obj,
        hk_api=context.obj["housekeeper_api"],
        lims_api=context.obj["lims_api"],
        case_tags=CASE_TAGS,
        sample_tags=SAMPLE_TAGS,
    )
    context.obj["scout_api"] = scoutapi.ScoutAPI(context.obj)
    context.obj["analysis_api"] = AnalysisAPI(
        context.obj,
        hk_api=context.obj["housekeeper_api"],
        scout_api=context.obj["scout_api"],
        tb_api=context.obj["tb_api"],
        lims_api=context.obj["lims_api"],
        deliver_api=context.obj["deliver_api"],
    )
    context.obj["report_api"] = ReportAPI(
        store=context.obj["status"],
        lims_api=context.obj["lims_api"],
        chanjo_api=context.obj["chanjo_api"],
        analysis_api=context.obj["analysis_api"],
        scout_api=context.obj["scout_api"],
    )

    context.obj["scout_upload_api"] = UploadScoutAPI(
        hk_api=context.obj["housekeeper_api"],
        scout_api=context.obj["scout_api"],
        madeline_api=context.obj["madeline_api"],
        analysis_api=context.obj["analysis_api"],
        lims_api=context.obj["lims_api"],
    )

    if context.invoked_subcommand is not None:
        return

    if not family_id:
        _suggest_cases_to_upload(context)
        context.abort()

    family_obj = context.obj["status"].family(family_id)
    analysis_obj = family_obj.analyses[0]
    if analysis_obj.uploaded_at is not None:
        message = f"analysis already uploaded: {analysis_obj.uploaded_at.date()}"
        click.echo(click.style(message, fg="yellow"))
    else:
        analysis_obj.upload_started_at = dt.datetime.now()
        context.obj["status"].commit()
        context.invoke(coverage, re_upload=True, family_id=family_id)
        context.invoke(validate, family_id=family_id)
        context.invoke(genotypes, re_upload=False, family_id=family_id)
        context.invoke(observations, case_id=family_id)
        context.invoke(scout, case_id=family_id)
        analysis_obj.uploaded_at = dt.datetime.now()
        context.obj["status"].commit()
        click.echo(click.style(f"{family_id}: analysis uploaded!", fg="green"))