Esempio n. 1
0
def upload(table_name, document_file):
    """
    Upload documents into a receiving table.

    <table> must be the name of a table in the receiving schema which has a
    "document" column.  All other columns in the table must be optional on
    insert, as only "document" is provided.

    <documents.ndjson> must be a newline-delimited JSON file containing one
    document per line to insert as a table row.
    """
    db = DatabaseSession()

    try:
        LOG.info(f"Copying documents from {document_file.name}")

        row_count = db.copy_from_ndjson(("receiving", table_name, "document"), document_file)

        LOG.info(f"Received {row_count:,} {table_name} records")
        LOG.info("Committing all changes")
        db.commit()

    except:
        LOG.info("Rolling back all changes; the database will not be modified")
        db.rollback()
        raise
Esempio n. 2
0
def upload(manifest_file):
    """
    Upload manifest records into the database receiving area.

    <manifest.ndjson> must be a newline-delimited JSON file produced by this
    command's sibling commands.

    Once records are uploaded, the manifest ETL routine will reconcile the
    manifest records with known identifiers and existing samples.
    """
    db = DatabaseSession()

    try:
        LOG.info(f"Copying sample manifest records from {manifest_file.path}")

        row_count = db.copy_from_ndjson(("receiving", "manifest", "document"), manifest_file)

        LOG.info(f"Received {row_count:,} manifest records")
        LOG.info("Committing all changes")
        db.commit()

    except:
        LOG.info("Rolling back all changes; the database will not be modified")
        db.rollback()
        raise
Esempio n. 3
0
def upload(consensus_genome_file):
    """
    Upload consensus genomes and summary statistics to the warehouse receiving area.

    Consensus genomes and summary statistics should be in newline-delimited JSON
    format that matches those generated by the assembly pipeline.
    """
    db = DatabaseSession()

    try:
        LOG.info(
            f"Copying consensus genome records from {consensus_genome_file.name}"
        )

        row_count = db.copy_from_ndjson(
            ("receiving", "consensus_genome", "document"),
            consensus_genome_file)

        LOG.info(f"Received {row_count:,} consensus genome records")
        LOG.info("Committing all changes")
        db.commit()

    except:
        LOG.info("Rolling back all changes; the database will not be modified")
        db.rollback()
        raise
Esempio n. 4
0
        def decorated(*args, action, **kwargs):
            db = DatabaseSession()

            kwargs["db"] = db

            if pass_action:
                kwargs["action"] = action

            processed_without_error = None

            try:
                command(*args, **kwargs)

            except Exception as error:
                processed_without_error = False

                LOG.error(f"Aborting with error: {error}")
                raise error from None

            else:
                processed_without_error = True

            finally:
                if action is DatabaseSessionAction.PROMPT:
                    ask_to_commit = \
                        "Commit all changes?" if processed_without_error else \
                        "Commit successfully processed records up to this point?"

                    commit = click.confirm(ask_to_commit)
                else:
                    commit = action is DatabaseSessionAction.COMMIT

                if commit:
                    LOG.info(
                        "Committing all changes" if processed_without_error else \
                        "Committing successfully processed records up to this point")
                    db.commit()

                else:
                    LOG.info(
                        "Rolling back all changes; the database will not be modified"
                    )
                    db.rollback()
Esempio n. 5
0
def upload(det_file):
    """
    Upload REDCap DET notifications into database receiving area.

    <det.ndjson> must be a newline-delimited JSON file produced by this
    command's sibling command.
    """
    db = DatabaseSession()

    try:
        LOG.info(f"Copying REDCap DET records from {det_file.name}")

        row_count = db.copy_from_ndjson(
            ("receiving", "redcap_det", "document"), det_file)

        LOG.info(f"Received {row_count:,} DET records")
        LOG.info("Committing all changes")
        db.commit()

    except:
        LOG.info("Rolling back all changes; the database will not be modified")
        db.rollback()
        raise
def notify(*, action: str):
    LOG.debug(f"Starting the reportable conditions notification routine, revision {REVISION}")

    db = DatabaseSession()

    def webhook(suffix):
        return os.environ.get("SLACK_WEBHOOK_ALERTS_TEST") \
            or os.environ[f"SLACK_WEBHOOK_REPORTING_{suffix}"]

    slack_webhooks = {
        "ncov-reporting": webhook("HCOV19"),
    }

    projects = [
        {
            "collection_sets": {"collections-childcare"},
            "slack_channel_name": "ncov-reporting-childcare",
            "slack_webhook": webhook("HCOV19_CHILDCARE"),
        },

        {
            "collection_sets": {"collections-apple-respiratory"},
            "slack_channel_name": "ncov-reporting-apple",
            "slack_webhook": webhook("HCOV19_APPLE"),
        },

        {
            "collection_sets": {"collections-school-testing-home",
              "collections-school-testing-observed",
              "collections-radxup-yakima-schools-home",
              "collections-radxup-yakima-schools-observed"},
            "slack_channel_name": "ncov-reporting-schools",
            "slack_webhook": webhook("HCOV19_SCHOOLS"),
        },

        {
            "collection_sets": {"collections-adult-family-home-outbreak"},
            "slack_channel_name": "ncov-reporting-afh",
            "slack_webhook": webhook("HCOV19_AFH"),
        },

        {
            "collection_sets": {"collections-workplace-outbreak"},
            "slack_channel_name": "ncov-reporting-workplace",
            "slack_webhook": webhook("HCOV19_WORKPLACE"),
        }
    ]

    # Fetch and iterate over reportable condition records that aren't processed
    #
    # Rows we fetch are locked for update so that two instances of this
    # command don't try to process the same reportable condition records.
    LOG.debug("Fetching unprocessed reportable conditions records")

    reportable_conditions = db.cursor("reportable_conditions")
    reportable_conditions.execute("""
        select reportable_condition_v1.*, presence_absence_id as id
            from shipping.reportable_condition_v1
            join warehouse.presence_absence using (presence_absence_id)
        where details @> %s is not true
        order by id
            for update of presence_absence;
        """, (Json({"reporting_log":[{ "revision": REVISION }]}),))

    processed_without_error = None

    try:
        for record in reportable_conditions:
            with db.savepoint(f"reportable condition presence_absence_id {record.id}"):
                LOG.info(f"Processing reportable condition, presence_absence_id «{record.id}»")

                if not record.site:
                    LOG.info(f"No site found for presence_absence_id «{record.id}». " +
                        "Inferring site from manifest data.")

                responses = {'ncov-reporting': send_slack_post_request(record, slack_webhooks['ncov-reporting'])}

                # Also send study-specific results to their respective channels
                for project in projects:
                    if (record.collection_set_name in project['collection_sets']):
                        responses[project['slack_channel_name']] = send_slack_post_request(
                            record, project['slack_webhook'])

                # Check all POSTs to Slack were successful to mark as processed
                # This does mean that if one fails but others succeed, there
                # will be duplicate POSTs to the already succeeded channels.
                # The chance of this happening is pretty small, but we can
                # revisit this if it becomes a common problem
                #   -Jover, 21 October 2020
                if all(response.status_code == 200 for response in responses.values()):
                    mark_processed(db, record.id, {"status": "sent Slack notification"})
                    LOG.info(f"Finished processing presence_absence_id «{record.id}»")

                else:
                    for channel, response in responses.items():
                        if response.status_code != 200:
                            LOG.error(("Error: A Slack notification could not " \
                            f"be sent to the channel «{channel}» for "
                            f"presence_absence_id «{record.id}».\n" \
                            f"Slack API returned status code {response.status_code}: "\
                            f"{response.text}"))

    except Exception as error:
        processed_without_error = False

        LOG.error(f"Aborting with error")
        raise error from None

    else:
        processed_without_error = True

    finally:
        if action == "prompt":
            ask_to_commit = \
                "Commit all changes?" if processed_without_error else \
                "Commit successfully processed reportable condition records up to this point?"

            commit = click.confirm(ask_to_commit)
        else:
            commit = action == "commit"

        if commit:
            LOG.info(
                "Committing all changes" if processed_without_error else \
                "Committing successfully processed reportable condition records up to this point")
            db.commit()

        else:
            LOG.info("Rolling back all changes; the database will not be modified")
            db.rollback()