Esempio n. 1
0
def gdrive_processing(args):
    cfg = Config()
    create_logger(cfg, debug=args.debug)

    google_credentials = gutils.get_secrets_manager_credentials(args.secret_id)

    processing(cfg, google_credentials)
Esempio n. 2
0
def create_layout_pdf(cfg: Config, entry_data: Dict[str, str]):
    """Main function to read a layout file and write the resulting plate layout map.

    Parameters
    ----------
    cfg: Config
        configuration information
    entry_data: Dict[str, str]
        dictionary containing the response that was submitted to Sample Plate Metada.
        The required keys are: the researcher, timestamp, sample plate barcode, and a
        link to the sample plate map in Google Drive. Optionally, the "local_run" key
        is used as a flag to indicate the script is being run from the command line
        rather than on AWS.
    """
    sample_barcode = entry_data[SampleMetadata.SAMPLE_PLATE_BARCODE]
    output_filename = f"{sample_barcode}.pdf"

    if LOCAL_RUN in entry_data:
        output_path, drive_service = entry_data[LOCAL_RUN]
        output_file_object = (output_path / output_filename).open("wb")
    else:
        logger.debug("getting gdrive credentials")
        google_creds = gutils.get_secrets_manager_credentials()
        drive_service = drive.get_service(google_creds)

        processed_layout_folder_id = drive.get_folder_id_of_path(
            drive_service, cfg.LAYOUT_PDF_FOLDER)

        output_file_object = drive.put_file(
            drive_service,
            processed_layout_folder_id,
            output_filename,
            binary=True,
        )

    try:
        plate_map_file = drive.get_layout_file_from_url(
            drive_service, entry_data[SampleMetadata.SAMPLE_PLATE_MAP])
    except KeyError:
        raise BadDriveURL(
            f"Bad URL in {SampleMetadata.SHEET_NAME} for {sample_barcode}")

    plate_map_type = accession.get_plate_map_type_from_name(
        plate_map_file.name)

    with plate_map_file.open() as fh:
        accession_data = accession.read_accession_data(plate_map_type, fh)

    logger.info(f"Writing layout map to {output_filename}")
    with output_file_object as output_fh:
        format_pdf(
            entry_data[SampleMetadata.SAMPLE_PLATE_BARCODE],
            accession_data,
            entry_data[SampleMetadata.RESEARCHER_NAME],
            format_time(cfg, entry_data[SampleMetadata.TIMESTAMP]),
            output_fh,
        )
Esempio n. 3
0
def lambda_handler(event, context):
    cfg = Config()
    create_logger(cfg)

    try:
        google_credentials = gutils.get_secrets_manager_credentials()
        processing(cfg, google_credentials)
    except Exception:
        logger.critical(f"Error in [{cfg.aws_env}]", exc_info=True)
        raise
Esempio n. 4
0
def compile_accessions_cli(args):
    cfg = Config()
    create_logger(cfg, debug=args.debug)
    accession_tracker = AccessionTracking(
        cfg=cfg,
        google_creds=gutils.get_secrets_manager_credentials(args.secret_id))
    accession_tracker.compile_accessions(
        sample_barcodes=args.sample_barcodes,
        run_path=args.run_path,
        updates_only=args.updates_only,
    )
Esempio n. 5
0
def lambda_handler(event, context):
    try:
        cfg = Config()
        create_logger(cfg)
        creds = gutils.get_secrets_manager_credentials()
        os.chdir("/tmp")

        # Set up for Google Drive
        drive_service = drive.get_service(creds)
        sheets_service = sheets.get_service(creds)

        # Route based on the actions
        action = None
        if "body" in event:
            event_body = json.loads(event["body"])
            logger.info(msg=f"EVENT_BODY: {event_body}")
            action = event_body["action"]

        logger.info(msg=f"ACTION: {action}")
        if action == "external_sample_shipment":
            external_sample_shipment.handle_external_sample_shipment_request(
                cfg, drive_service, event_body)
        elif action == "sample_database":
            sample_database.external_sample_database(cfg, drive_service,
                                                     sheets_service,
                                                     event_body)
        elif action == "draw_96_plate_map":
            draw_96_plate_map.draw_96_plate_map(cfg, drive_service,
                                                sheets_service, event_body)
        elif action == "concat_96_384":
            concat_96_384.concat_96_384(cfg, drive_service, sheets_service,
                                        event_body)
        elif action == "bind_index_plate":
            bind_index_plate.handle_bind_index_plate_request(
                cfg, drive_service, sheets_service, event_body)
        elif action == "update_ripe_samples":
            update_ripe_samples.update_ripe_samples(cfg, drive_service,
                                                    sheets_service, event_body)
        elif action == "metadata_lookup":
            metadata_lookup.metadata_lookup(cfg, drive_service, sheets_service,
                                            event_body)

        return {
            "statusCode": 200,
            "headers": {
                "Content-Type": "text/html"
            },
            "body": "OK",
        }
    except Exception as err:
        slack.post(f"*Error in mNGS scripts:*\n{err}")
        raise
Esempio n. 6
0
def fetch_barcodes(args, cfg):
    google_credentials = gutils.get_secrets_manager_credentials(args.secret_id)
    drive_service = drive.get_service(google_credentials)

    # qpcr logs folder
    logs_folder_id = drive.get_folder_id_of_path(drive_service,
                                                 cfg.PCR_LOGS_FOLDER)
    logs_folder_contents = drive.get_contents_by_folder_id(drive_service,
                                                           logs_folder_id,
                                                           only_files=True)

    barcodes_to_fetch = defaultdict(RunFiles)
    for entry in logs_folder_contents:
        m = RunFiles.get_qpcr_file_type(entry.name)
        if m is None:
            continue
        elif m[RunFiles.BARCODE] in args.barcodes:
            barcodes_to_fetch[m[RunFiles.BARCODE]].add_file(m, entry)

    for barcode, barcode_files in barcodes_to_fetch.items():
        # all files must be present, at least one quant_amp file
        if not barcode_files.all_files:
            logger.warning(msg=f"Missing files for {barcode}!")
            continue

        logger.info(msg=f"Found sample to fetch: {barcode}")

        # read in the run information and quant cq
        run_info = barcode_files.run_info
        logger.info(msg=f"    Downloading: {run_info.name}")
        with drive.get_file(drive_service, run_info.id, binary=False) as fh:
            with (args.output_dir / run_info.name).open("w") as out:
                out.write(fh.read())

        quant_cq = barcode_files.quant_cq
        logger.info(msg=f"    Downloading: {quant_cq.name}")
        with drive.get_file(drive_service, quant_cq.id, binary=False) as fh:
            with (args.output_dir / quant_cq.name).open("w") as out:
                out.write(fh.read())

        for quant_amp in barcode_files.quant_amp.values():
            logger.info(msg=f"    Downloading: {quant_amp.name}")
            with drive.get_file(drive_service, quant_amp.id,
                                binary=False) as fh:
                with (args.output_dir / quant_amp.name).open("w") as out:
                    out.write(fh.read())
Esempio n. 7
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument("barcodes", nargs="+")
    parser.add_argument("--output-dir", type=Path, default=Path("."))

    parser.add_argument("--debug", action="store_true")
    parser.add_argument("--secret-id", default="covid-19/google_creds")

    args = parser.parse_args()

    cfg = Config()
    create_logger(cfg, debug=args.debug)

    google_creds = gutils.get_secrets_manager_credentials(args.secret_id)
    drive_service = drive.get_service(google_creds)

    logger.debug("Downloading collective form")
    collective_form = CollectiveForm(
        drive_service, cfg["DATA"]["collection_form_spreadsheet_id"])
    sample_plate_metadata = collective_form[SampleMetadata.SHEET_NAME]

    for barcode in args.barcodes:
        try:
            metadata_row = clean_single_row(
                sample_plate_metadata, SampleMetadata.SAMPLE_PLATE_BARCODE,
                barcode)
        except MetadataNotFoundError:
            logger.error(f"0 results for {barcode}, skipping")
            continue
        except MultipleRowsError as ex:
            logger.error(f"{ex.match_count} results for {barcode}, skipping")
            continue
        metadata_row[SampleMetadata.TIMESTAMP] = str(
            metadata_row[SampleMetadata.TIMESTAMP])
        metadata_row[LOCAL_RUN] = (args.output_dir, drive_service)

        logger.debug(f"Making layout PDF for {barcode}")
        create_layout_pdf(cfg=cfg, entry_data=metadata_row)
Esempio n. 8
0
def lambda_handler(event, context):
    cfg = Config()
    create_logger(cfg)

    log.info("Starting DB population process")
    engine = init_db(util.get_db_uri(f"cliahub/cliahub_{cfg.aws_env}"))

    log.debug("Getting google credentials")
    google_creds = get_secrets_manager_credentials()

    if event.get("CLEAR_DATABASE", False):
        log.info("Deleting existing tables")
        delete_tables(engine)

        log.info("Recreating schema")
        create_tables_and_schema(engine)

    with session_scope():
        log.info("Populating DB")
        db_populator = DBPopulator(google_creds, cfg)
        db_populator.populate_all_data()

        ngs_populator = SequencingDBPopulator(google_creds, cfg)
        ngs_populator.populate_all_data()
Esempio n. 9
0
def populate_sequencing_db(ctx, google_secret):
    google_creds = get_secrets_manager_credentials(google_secret)

    db_populator = SequencingDBPopulator(google_creds, ctx.obj["CONFIG"])
    db_populator.populate_all_data()
Esempio n. 10
0
def credentials_for_tests() -> service_account.Credentials:
    return get_secrets_manager_credentials(
        secret_id="covid-19/google_test_creds")
Esempio n. 11
0
def accession_tracking_lambda_handler(event, context):
    cfg = Config()
    create_logger(cfg)
    accession_tracker = AccessionTracking(
        cfg=cfg, google_creds=gutils.get_secrets_manager_credentials())
    accession_tracker.compile_accessions(updates_only=True)
Esempio n. 12
0
def parse_qpcr_csv(args):
    cfg = Config()
    create_logger(cfg, debug=args.debug)

    logger.info(msg=f"Started local processing in: {args.qpcr_run_path}")

    if args.use_gdrive and not args.barcodes:
        raise ValueError(
            "You must specify barcodes to process from Google Drive")

    run_path = pathlib.Path(args.qpcr_run_path)

    google_credentials = gutils.get_secrets_manager_credentials(args.secret_id)

    drive_service = drive.get_service(google_credentials)
    collective_form = CollectiveForm(
        drive_service, cfg["DATA"]["collection_form_spreadsheet_id"])

    sample_metadata_form = collective_form[SampleMetadata.SHEET_NAME]
    rerun_form = collective_form[SampleRerun.SHEET_NAME]

    if args.use_gdrive:
        logs_folder_id = drive.get_folder_id_of_path(drive_service,
                                                     cfg.PCR_LOGS_FOLDER)
        logs_folder_contents = [
            drive_file for drive_file in drive.get_contents_by_folder_id(
                drive_service, logs_folder_id, only_files=True)
        ]

        plate_layout_folder_id = drive.get_folder_id_of_path(
            drive_service, cfg.PLATE_LAYOUT_FOLDER)
    else:
        logs_folder_contents = run_path.glob("*.csv")

    barcodes_to_process = defaultdict(RunFiles)
    for run_file in logs_folder_contents:
        m = RunFiles.get_qpcr_file_type(run_file.name)
        if m is None:
            continue
        elif args.barcodes and m[RunFiles.BARCODE] not in args.barcodes:
            continue
        else:
            barcodes_to_process[m[RunFiles.BARCODE]].add_file(m, run_file)

    for barcode, barcode_files in barcodes_to_process.items():
        # all files must be present, at least one quant_amp file
        if not barcode_files.all_files:
            message = f"Missing files for: {barcode}. Skipping for now"
            logger.info(msg=message)
            continue

        logger.info(msg=f"Found sample to process, barcode: {barcode}")

        logger.info(msg=f"Getting metadata and data for: {barcode}")
        bravo_metadata = BravoMetadata.load_from_spreadsheet(
            barcode, collective_form)
        if args.protocol is not None:
            # user specified the protocol
            protocol = get_protocol(args.protocol)
        else:
            protocol = get_protocol(bravo_metadata.sop_protocol)

        if not set(barcode_files.quant_amp).issuperset(protocol.mapping):
            missing = map(str,
                          set(protocol.mapping) - set(barcode_files.quant_amp))
            message = f"Missing quant amp files for {barcode}: {', '.join(missing)}"
            logger.critical(msg=message)
            continue

        if args.plate_map_file is not None:
            plate_map_type = accession.get_plate_map_type_from_name(
                args.plate_map_file.name)
            accession_data = accession.read_accession_data(
                plate_map_type, args.plate_map_file)
        elif args.use_gdrive:
            accession_data = accession.get_accession_data_with_rerun(
                drive_service,
                plate_layout_folder_id,
                sample_metadata_form,
                rerun_form,
                bravo_metadata.sample_barcode,
            )
        else:
            raise ValueError(
                "You must provide a plate map file or use Google Drive")

        control_wells = get_control_wells_from_type(
            controls_type=bravo_metadata.controls_type,
            accession_data=accession_data,
        )
        # check for valid accessions
        update_accession_data_with_controls(control_wells, accession_data,
                                            barcode)

        # process well data and check controls, return results
        logger.info(msg=f"Processing well data and controls for: {barcode}")

        processing_results = process_barcode(
            cfg,
            barcode,
            barcode_files,
            bravo_metadata,
            protocol,
            control_wells,
            accession_data,
        )

        with (run_path / processing_results.results_filename).open("w") as fh:
            processing_results.write_results(fh)

        with (run_path /
              processing_results.cb_report_filename).open("w") as fh:
            processing_results.write_cb_report(fh)

        # create pdf report
        logger.info(msg=f"Generating results PDF for: {barcode}")
        final_pdf_filename = run_path / processing_results.final_pdf_filename
        with open(final_pdf_filename, "wb") as output_file:
            create_final_pdf(processing_results, output_file)