Beispiel #1
0
def gdrive_processing(args):
    cfg = Config()
    create_logger(cfg, debug=args.debug)

    google_credentials = gutils.get_secrets_manager_credentials(args.secret_id)

    processing(cfg, google_credentials)
Beispiel #2
0
def cliadb(ctx, db_secret, debug):
    ctx.ensure_object(dict)
    ctx.obj["CONFIG"] = Config()
    create_logger(ctx.obj["CONFIG"], debug=debug)

    log.info("Starting CLIAHub database command line tool")
    ctx.obj["ENGINE"] = init_db(util.get_db_uri(db_secret))
Beispiel #3
0
def lambda_handler(event, context):
    cfg = Config()
    create_logger(cfg)

    try:
        google_credentials = gutils.get_secrets_manager_credentials()
        processing(cfg, google_credentials)
    except Exception:
        logger.critical(f"Error in [{cfg.aws_env}]", exc_info=True)
        raise
Beispiel #4
0
def compile_accessions_cli(args):
    cfg = Config()
    create_logger(cfg, debug=args.debug)
    accession_tracker = AccessionTracking(
        cfg=cfg,
        google_creds=gutils.get_secrets_manager_credentials(args.secret_id))
    accession_tracker.compile_accessions(
        sample_barcodes=args.sample_barcodes,
        run_path=args.run_path,
        updates_only=args.updates_only,
    )
Beispiel #5
0
def lambda_handler(event, context):
    try:
        cfg = Config()
        create_logger(cfg)
        creds = gutils.get_secrets_manager_credentials()
        os.chdir("/tmp")

        # Set up for Google Drive
        drive_service = drive.get_service(creds)
        sheets_service = sheets.get_service(creds)

        # Route based on the actions
        action = None
        if "body" in event:
            event_body = json.loads(event["body"])
            logger.info(msg=f"EVENT_BODY: {event_body}")
            action = event_body["action"]

        logger.info(msg=f"ACTION: {action}")
        if action == "external_sample_shipment":
            external_sample_shipment.handle_external_sample_shipment_request(
                cfg, drive_service, event_body)
        elif action == "sample_database":
            sample_database.external_sample_database(cfg, drive_service,
                                                     sheets_service,
                                                     event_body)
        elif action == "draw_96_plate_map":
            draw_96_plate_map.draw_96_plate_map(cfg, drive_service,
                                                sheets_service, event_body)
        elif action == "concat_96_384":
            concat_96_384.concat_96_384(cfg, drive_service, sheets_service,
                                        event_body)
        elif action == "bind_index_plate":
            bind_index_plate.handle_bind_index_plate_request(
                cfg, drive_service, sheets_service, event_body)
        elif action == "update_ripe_samples":
            update_ripe_samples.update_ripe_samples(cfg, drive_service,
                                                    sheets_service, event_body)
        elif action == "metadata_lookup":
            metadata_lookup.metadata_lookup(cfg, drive_service, sheets_service,
                                            event_body)

        return {
            "statusCode": 200,
            "headers": {
                "Content-Type": "text/html"
            },
            "body": "OK",
        }
    except Exception as err:
        slack.post(f"*Error in mNGS scripts:*\n{err}")
        raise
Beispiel #6
0
def main():
    import argparse

    parser = argparse.ArgumentParser(
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument("barcodes", nargs="+")
    parser.add_argument("--output-dir",
                        type=pathlib.Path,
                        default=pathlib.Path("."))
    parser.add_argument("--debug", action="store_true")
    parser.add_argument("--secret-id", default="covid-19/google_creds")

    args = parser.parse_args()

    cfg = Config()
    create_logger(cfg, debug=args.debug)

    fetch_barcodes(args, cfg)
Beispiel #7
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument("barcodes", nargs="+")
    parser.add_argument("--output-dir", type=Path, default=Path("."))

    parser.add_argument("--debug", action="store_true")
    parser.add_argument("--secret-id", default="covid-19/google_creds")

    args = parser.parse_args()

    cfg = Config()
    create_logger(cfg, debug=args.debug)

    google_creds = gutils.get_secrets_manager_credentials(args.secret_id)
    drive_service = drive.get_service(google_creds)

    logger.debug("Downloading collective form")
    collective_form = CollectiveForm(
        drive_service, cfg["DATA"]["collection_form_spreadsheet_id"])
    sample_plate_metadata = collective_form[SampleMetadata.SHEET_NAME]

    for barcode in args.barcodes:
        try:
            metadata_row = clean_single_row(
                sample_plate_metadata, SampleMetadata.SAMPLE_PLATE_BARCODE,
                barcode)
        except MetadataNotFoundError:
            logger.error(f"0 results for {barcode}, skipping")
            continue
        except MultipleRowsError as ex:
            logger.error(f"{ex.match_count} results for {barcode}, skipping")
            continue
        metadata_row[SampleMetadata.TIMESTAMP] = str(
            metadata_row[SampleMetadata.TIMESTAMP])
        metadata_row[LOCAL_RUN] = (args.output_dir, drive_service)

        logger.debug(f"Making layout PDF for {barcode}")
        create_layout_pdf(cfg=cfg, entry_data=metadata_row)
Beispiel #8
0
def lambda_handler(entry: Dict[str, Any], context):
    """AWS Lambda entry point.

    Parameters
    ----------
    entry : Dict
        Dictionary passed to the entry point by AWS. The value of 'body' will be loaded
        as json to form the entry_data dictionary.
    context :
        AWS Lambda context object. Not used here.
    """
    cfg = Config()
    create_logger(cfg)

    entry_data = json.loads(entry["body"])
    logger.info(f"got entry: {entry_data}")

    try:
        create_layout_pdf(cfg=cfg, entry_data=entry_data)
    except Exception as err:
        logger.critical(f"Error in [{cfg.aws_env}]: {err}",
                        extra={"notify_slack": True})
        logger.exception("Details:")
def lambda_handler(event, context):
    cfg = Config()
    create_logger(cfg)

    log.info("Starting DB population process")
    engine = init_db(util.get_db_uri(f"cliahub/cliahub_{cfg.aws_env}"))

    log.debug("Getting google credentials")
    google_creds = get_secrets_manager_credentials()

    if event.get("CLEAR_DATABASE", False):
        log.info("Deleting existing tables")
        delete_tables(engine)

        log.info("Recreating schema")
        create_tables_and_schema(engine)

    with session_scope():
        log.info("Populating DB")
        db_populator = DBPopulator(google_creds, cfg)
        db_populator.populate_all_data()

        ngs_populator = SequencingDBPopulator(google_creds, cfg)
        ngs_populator.populate_all_data()
Beispiel #10
0
def accession_tracking_lambda_handler(event, context):
    cfg = Config()
    create_logger(cfg)
    accession_tracker = AccessionTracking(
        cfg=cfg, google_creds=gutils.get_secrets_manager_credentials())
    accession_tracker.compile_accessions(updates_only=True)
Beispiel #11
0
def parse_qpcr_csv(args):
    cfg = Config()
    create_logger(cfg, debug=args.debug)

    logger.info(msg=f"Started local processing in: {args.qpcr_run_path}")

    if args.use_gdrive and not args.barcodes:
        raise ValueError(
            "You must specify barcodes to process from Google Drive")

    run_path = pathlib.Path(args.qpcr_run_path)

    google_credentials = gutils.get_secrets_manager_credentials(args.secret_id)

    drive_service = drive.get_service(google_credentials)
    collective_form = CollectiveForm(
        drive_service, cfg["DATA"]["collection_form_spreadsheet_id"])

    sample_metadata_form = collective_form[SampleMetadata.SHEET_NAME]
    rerun_form = collective_form[SampleRerun.SHEET_NAME]

    if args.use_gdrive:
        logs_folder_id = drive.get_folder_id_of_path(drive_service,
                                                     cfg.PCR_LOGS_FOLDER)
        logs_folder_contents = [
            drive_file for drive_file in drive.get_contents_by_folder_id(
                drive_service, logs_folder_id, only_files=True)
        ]

        plate_layout_folder_id = drive.get_folder_id_of_path(
            drive_service, cfg.PLATE_LAYOUT_FOLDER)
    else:
        logs_folder_contents = run_path.glob("*.csv")

    barcodes_to_process = defaultdict(RunFiles)
    for run_file in logs_folder_contents:
        m = RunFiles.get_qpcr_file_type(run_file.name)
        if m is None:
            continue
        elif args.barcodes and m[RunFiles.BARCODE] not in args.barcodes:
            continue
        else:
            barcodes_to_process[m[RunFiles.BARCODE]].add_file(m, run_file)

    for barcode, barcode_files in barcodes_to_process.items():
        # all files must be present, at least one quant_amp file
        if not barcode_files.all_files:
            message = f"Missing files for: {barcode}. Skipping for now"
            logger.info(msg=message)
            continue

        logger.info(msg=f"Found sample to process, barcode: {barcode}")

        logger.info(msg=f"Getting metadata and data for: {barcode}")
        bravo_metadata = BravoMetadata.load_from_spreadsheet(
            barcode, collective_form)
        if args.protocol is not None:
            # user specified the protocol
            protocol = get_protocol(args.protocol)
        else:
            protocol = get_protocol(bravo_metadata.sop_protocol)

        if not set(barcode_files.quant_amp).issuperset(protocol.mapping):
            missing = map(str,
                          set(protocol.mapping) - set(barcode_files.quant_amp))
            message = f"Missing quant amp files for {barcode}: {', '.join(missing)}"
            logger.critical(msg=message)
            continue

        if args.plate_map_file is not None:
            plate_map_type = accession.get_plate_map_type_from_name(
                args.plate_map_file.name)
            accession_data = accession.read_accession_data(
                plate_map_type, args.plate_map_file)
        elif args.use_gdrive:
            accession_data = accession.get_accession_data_with_rerun(
                drive_service,
                plate_layout_folder_id,
                sample_metadata_form,
                rerun_form,
                bravo_metadata.sample_barcode,
            )
        else:
            raise ValueError(
                "You must provide a plate map file or use Google Drive")

        control_wells = get_control_wells_from_type(
            controls_type=bravo_metadata.controls_type,
            accession_data=accession_data,
        )
        # check for valid accessions
        update_accession_data_with_controls(control_wells, accession_data,
                                            barcode)

        # process well data and check controls, return results
        logger.info(msg=f"Processing well data and controls for: {barcode}")

        processing_results = process_barcode(
            cfg,
            barcode,
            barcode_files,
            bravo_metadata,
            protocol,
            control_wells,
            accession_data,
        )

        with (run_path / processing_results.results_filename).open("w") as fh:
            processing_results.write_results(fh)

        with (run_path /
              processing_results.cb_report_filename).open("w") as fh:
            processing_results.write_cb_report(fh)

        # create pdf report
        logger.info(msg=f"Generating results PDF for: {barcode}")
        final_pdf_filename = run_path / processing_results.final_pdf_filename
        with open(final_pdf_filename, "wb") as output_file:
            create_final_pdf(processing_results, output_file)