def upload(table_name, document_file): """ Upload documents into a receiving table. <table> must be the name of a table in the receiving schema which has a "document" column. All other columns in the table must be optional on insert, as only "document" is provided. <documents.ndjson> must be a newline-delimited JSON file containing one document per line to insert as a table row. """ db = DatabaseSession() try: LOG.info(f"Copying documents from {document_file.name}") row_count = db.copy_from_ndjson(("receiving", table_name, "document"), document_file) LOG.info(f"Received {row_count:,} {table_name} records") LOG.info("Committing all changes") db.commit() except: LOG.info("Rolling back all changes; the database will not be modified") db.rollback() raise
def upload(manifest_file): """ Upload manifest records into the database receiving area. <manifest.ndjson> must be a newline-delimited JSON file produced by this command's sibling commands. Once records are uploaded, the manifest ETL routine will reconcile the manifest records with known identifiers and existing samples. """ db = DatabaseSession() try: LOG.info(f"Copying sample manifest records from {manifest_file.path}") row_count = db.copy_from_ndjson(("receiving", "manifest", "document"), manifest_file) LOG.info(f"Received {row_count:,} manifest records") LOG.info("Committing all changes") db.commit() except: LOG.info("Rolling back all changes; the database will not be modified") db.rollback() raise
def upload(consensus_genome_file): """ Upload consensus genomes and summary statistics to the warehouse receiving area. Consensus genomes and summary statistics should be in newline-delimited JSON format that matches those generated by the assembly pipeline. """ db = DatabaseSession() try: LOG.info( f"Copying consensus genome records from {consensus_genome_file.name}" ) row_count = db.copy_from_ndjson( ("receiving", "consensus_genome", "document"), consensus_genome_file) LOG.info(f"Received {row_count:,} consensus genome records") LOG.info("Committing all changes") db.commit() except: LOG.info("Rolling back all changes; the database will not be modified") db.rollback() raise
def decorated(*args, action, **kwargs): db = DatabaseSession() kwargs["db"] = db if pass_action: kwargs["action"] = action processed_without_error = None try: command(*args, **kwargs) except Exception as error: processed_without_error = False LOG.error(f"Aborting with error: {error}") raise error from None else: processed_without_error = True finally: if action is DatabaseSessionAction.PROMPT: ask_to_commit = \ "Commit all changes?" if processed_without_error else \ "Commit successfully processed records up to this point?" commit = click.confirm(ask_to_commit) else: commit = action is DatabaseSessionAction.COMMIT if commit: LOG.info( "Committing all changes" if processed_without_error else \ "Committing successfully processed records up to this point") db.commit() else: LOG.info( "Rolling back all changes; the database will not be modified" ) db.rollback()
def upload(det_file): """ Upload REDCap DET notifications into database receiving area. <det.ndjson> must be a newline-delimited JSON file produced by this command's sibling command. """ db = DatabaseSession() try: LOG.info(f"Copying REDCap DET records from {det_file.name}") row_count = db.copy_from_ndjson( ("receiving", "redcap_det", "document"), det_file) LOG.info(f"Received {row_count:,} DET records") LOG.info("Committing all changes") db.commit() except: LOG.info("Rolling back all changes; the database will not be modified") db.rollback() raise
def mint(set_name, count, *, labels, layout, quiet, dry_run): """ Mint new identifiers and make barcode labels. <set name> is an existing identifier set, e.g. as output by the `id3c identifier set ls` command. <count> is the number of new identifiers to mint. If --labels are requested, a PDF of printable barcode labels is generated using the Lab Labels¹ instance <https://backoffice.seattleflu.org/labels/>. An alternative instance may be used by setting the LABEL_API environment variable to the instance URL. If --layout is requested, the printable barcode labels will use the given version of the layout, if available. ¹ https://github.com/MullinsLab/Lab-Labels """ session = DatabaseSession() with session: minted = db.mint_identifiers(session, set_name, count) if dry_run: LOG.info("Rolling back all changes; the database will not be modified") session.rollback() if not quiet: for identifier in minted: print(identifier.barcode, identifier.uuid, sep = "\t") if labels: label_layout = labelmaker.layout_identifiers(set_name, minted, layout) pdf = labelmaker.generate_pdf(label_layout) labels.write(pdf)
def notify(*, action: str): LOG.debug(f"Starting the reportable conditions notification routine, revision {REVISION}") db = DatabaseSession() def webhook(suffix): return os.environ.get("SLACK_WEBHOOK_ALERTS_TEST") \ or os.environ[f"SLACK_WEBHOOK_REPORTING_{suffix}"] slack_webhooks = { "ncov-reporting": webhook("HCOV19"), } projects = [ { "collection_sets": {"collections-childcare"}, "slack_channel_name": "ncov-reporting-childcare", "slack_webhook": webhook("HCOV19_CHILDCARE"), }, { "collection_sets": {"collections-apple-respiratory"}, "slack_channel_name": "ncov-reporting-apple", "slack_webhook": webhook("HCOV19_APPLE"), }, { "collection_sets": {"collections-school-testing-home", "collections-school-testing-observed", "collections-radxup-yakima-schools-home", "collections-radxup-yakima-schools-observed"}, "slack_channel_name": "ncov-reporting-schools", "slack_webhook": webhook("HCOV19_SCHOOLS"), }, { "collection_sets": {"collections-adult-family-home-outbreak"}, "slack_channel_name": "ncov-reporting-afh", "slack_webhook": webhook("HCOV19_AFH"), }, { "collection_sets": {"collections-workplace-outbreak"}, "slack_channel_name": "ncov-reporting-workplace", "slack_webhook": webhook("HCOV19_WORKPLACE"), } ] # Fetch and iterate over reportable condition records that aren't processed # # Rows we fetch are locked for update so that two instances of this # command don't try to process the same reportable condition records. LOG.debug("Fetching unprocessed reportable conditions records") reportable_conditions = db.cursor("reportable_conditions") reportable_conditions.execute(""" select reportable_condition_v1.*, presence_absence_id as id from shipping.reportable_condition_v1 join warehouse.presence_absence using (presence_absence_id) where details @> %s is not true order by id for update of presence_absence; """, (Json({"reporting_log":[{ "revision": REVISION }]}),)) processed_without_error = None try: for record in reportable_conditions: with db.savepoint(f"reportable condition presence_absence_id {record.id}"): LOG.info(f"Processing reportable condition, presence_absence_id «{record.id}»") if not record.site: LOG.info(f"No site found for presence_absence_id «{record.id}». " + "Inferring site from manifest data.") responses = {'ncov-reporting': send_slack_post_request(record, slack_webhooks['ncov-reporting'])} # Also send study-specific results to their respective channels for project in projects: if (record.collection_set_name in project['collection_sets']): responses[project['slack_channel_name']] = send_slack_post_request( record, project['slack_webhook']) # Check all POSTs to Slack were successful to mark as processed # This does mean that if one fails but others succeed, there # will be duplicate POSTs to the already succeeded channels. # The chance of this happening is pretty small, but we can # revisit this if it becomes a common problem # -Jover, 21 October 2020 if all(response.status_code == 200 for response in responses.values()): mark_processed(db, record.id, {"status": "sent Slack notification"}) LOG.info(f"Finished processing presence_absence_id «{record.id}»") else: for channel, response in responses.items(): if response.status_code != 200: LOG.error(("Error: A Slack notification could not " \ f"be sent to the channel «{channel}» for " f"presence_absence_id «{record.id}».\n" \ f"Slack API returned status code {response.status_code}: "\ f"{response.text}")) except Exception as error: processed_without_error = False LOG.error(f"Aborting with error") raise error from None else: processed_without_error = True finally: if action == "prompt": ask_to_commit = \ "Commit all changes?" if processed_without_error else \ "Commit successfully processed reportable condition records up to this point?" commit = click.confirm(ask_to_commit) else: commit = action == "commit" if commit: LOG.info( "Committing all changes" if processed_without_error else \ "Committing successfully processed reportable condition records up to this point") db.commit() else: LOG.info("Rolling back all changes; the database will not be modified") db.rollback()