def associate_sample(self, account_id, source_id, sample_id): with self._transaction.cursor() as cur: cur.execute( "SELECT " "ag_kit_barcode_id, " "source.account_id, " "source.id " "FROM " "ag_kit_barcodes " "LEFT OUTER JOIN source " "ON ag_kit_barcodes.source_id = source.id " "WHERE " "ag_kit_barcode_id = %s", (sample_id, )) row = cur.fetchone() if row is None: raise werkzeug.exceptions.NotFound("No sample ID: %s" % sample_id) if row[2] is not None: if row[1] != account_id: # This is the case where the sample is already assigned in # another account raise RepoException("Sample is already assigned") else: # This is the case where the sample is already assigned in # the same account. We will make them dissociate first. raise RepoException("Sample is already assigned") else: # This is the case where the sample is not yet assigned self._update_sample_association(sample_id, source_id)
def dissociate_sample(self, account_id, source_id, sample_id, override_locked=False): existing_sample = self.get_sample(account_id, source_id, sample_id) if existing_sample is None: raise werkzeug.exceptions.NotFound("No sample ID: %s" % sample_id) if existing_sample.edit_locked and not override_locked: raise RepoException( "Sample information locked: Sample already evaluated for " "processing") # Wipe any user entered fields from the sample: self.update_info(account_id, source_id, SampleInfo(sample_id, None, None, None), override_locked=override_locked) if existing_sample.remove_locked and not override_locked: raise RepoException( "Sample association locked: Sample already received") # And detach the sample from the source self._update_sample_association(sample_id, None, override_locked=override_locked)
def scrub(self, account_id, source_id): """Replace identifying information with scrubbed text Parameters ---------- account_id : str, uuid The associated account ID to scrub source_id : str, uuid The associated source ID to scrub Raises ------ RepoException If the source was not found If the source is not human If the update failed Returns ------- True if the record was updated, raises otherwise """ source = self.get_source(account_id, source_id) if source is None: raise RepoException("Source not found") if source.source_type != Source.SOURCE_TYPE_HUMAN: raise RepoException("Source is not human") name = "scrubbed" description = "scrubbed" email = "*****@*****.**" parent1_name = "scrubbed" parent2_name = "scrubbed" assent_obtainer = "scrubbed" date_revoked = datetime.datetime.now() with self._transaction.cursor() as cur: cur.execute( """UPDATE source SET source_name = %s, participant_email = %s, description = %s, parent_1_name = %s, parent_2_name = %s, date_revoked = %s, assent_obtainer = %s, update_time = %s WHERE id = %s""", (name, email, description, parent1_name, parent2_name, date_revoked, assent_obtainer, date_revoked, source_id)) if cur.rowcount != 1: raise RepoException("Invalid source relation") else: return True
def verify_address(self, interested_user_id): with self._transaction.dict_cursor() as cur: cur.execute( "SELECT address_1, address_2, city, state, postal_code, " "country " "FROM campaign.interested_users WHERE interested_user_id = %s " "AND address_checked = false AND address_1 != '' AND " "postal_code != '' AND country != ''", (interested_user_id, )) r = cur.fetchone() if r is None: return None else: try: melissa_response = verify_address(r['address_1'], r['address_2'], r['city'], r['state'], r['postal_code'], r['country']) except KeyError as e: raise RepoException(e) except ValueError as e: raise RepoException(e) except RepoException as e: raise RepoException(e) except Exception as e: raise RepoException(e) if melissa_response['valid'] is True: # For valid addresses, we append the latitude/longitude # and silently update the address to the Melissa-verified # version. However, we leave country alone to maintain # consistency with internal country names cur.execute( "UPDATE campaign.interested_users " "SET address_checked = true, address_valid = true, " "address_1 = %s, address_2 = %s, city = %s, " "state = %s, postal_code = %s, " "latitude = %s, longitude = %s " "WHERE interested_user_id = %s", ( melissa_response['address_1'], melissa_response['address_2'], melissa_response['city'], melissa_response['state'], melissa_response['postal'], melissa_response['latitude'], melissa_response['longitude'], interested_user_id, )) return True else: cur.execute( "UPDATE campaign.interested_users " "SET address_checked = true, address_valid = false " "WHERE interested_user_id = %s", (interested_user_id, )) return False
def delete_source(self, account_id, source_id): try: with self._transaction.cursor() as cur: cur.execute( "DELETE FROM source WHERE source.id = %s AND " "source.account_id = %s", (source_id, account_id)) return cur.rowcount == 1 except psycopg2.errors.ForeignKeyViolation as e: if e.diag.constraint_name == "fk_ag_kit_barcodes_sources": raise RepoException("A source cannot be deleted while samples " "are associated with it") from e raise RepoException("Error deleting source") from e
def scrub(self, account_id, source_id, survey_id): """Replace free text with scrubbed text""" with self._transaction.cursor() as cur: cur.execute( """SELECT survey_id FROM ag.ag_login_surveys WHERE ag_login_id = %s AND source_id = %s AND survey_id = %s""", (account_id, source_id, survey_id)) res = cur.fetchone() if res is None: raise RepoException("Invalid account / source / survey " "relation") text = 'scrubbed' cur.execute( """UPDATE survey_answers_other SET response=%s WHERE survey_id=%s AND survey_question_id IN ( SELECT survey_question_id FROM survey_answers_other JOIN survey_question_response_type USING (survey_question_id) WHERE survey_response_type='TEXT' AND survey_id=%s )""", (text, survey_id, survey_id))
def _create_daklapack_order(order_dict): order_dict[ORDER_ID_KEY] = str(uuid.uuid4()) with Transaction() as t: try: daklapack_order = DaklapackOrder.from_api(**order_dict) except ValueError as e: raise RepoException(e) # The Daklapack API wants a *list* of orders, and we are submitting one # at a time, so we have a list of one item :) post_response = post_daklapack_orders( [daklapack_order.order_structure]) if post_response.status_code >= 400: # for now, very basic error handling--just pass on dak api error response_msg = {"order_address": daklapack_order.order_structure[ADDR_DICT_KEY], "order_success": False, "daklapack_api_error_msg": post_response.get_data(as_text=True), "daklapack_api_error_code": post_response.status_code} return response_msg # write order to db admin_repo = AdminRepo(t) order_id = admin_repo.create_daklapack_order(daklapack_order) t.commit() status_msg = {"order_address": daklapack_order.order_structure[ADDR_DICT_KEY], "order_success": True, "order_id": order_id} return status_msg
def update_campaign(self, **kwargs): # required parameters to update a campaign campaign_id = kwargs['campaign_id'] title = kwargs['title'] # not permitted to update associated projects if 'associated_projects' in kwargs: raise RepoException("Modifying associated projects not allowed") # optional parameters to update a campaign instructions = kwargs.get('instructions') permitted_countries = kwargs.get('permitted_countries') language_key = kwargs.get('language_key') accepting_participants = kwargs.get('accepting_participants') language_key_alt = kwargs.get('language_key_alt') title_alt = kwargs.get('title_alt') instructions_alt = kwargs.get('instructions_alt') extension = kwargs.get('extension') with self._transaction.cursor() as cur: cur.execute( "UPDATE campaign.campaigns SET title = %s, instructions = %s, " "permitted_countries = %s, language_key = %s, " "accepting_participants = %s, language_key_alt = %s, " "title_alt = %s, instructions_alt = %s " "WHERE campaign_id = %s", (title, instructions, permitted_countries, language_key, accepting_participants, language_key_alt, title_alt, instructions_alt, campaign_id)) self.update_header_image(campaign_id, extension) return self.get_campaign_by_id(campaign_id)
def update_info(self, account_id, source_id, sample_info): """ Updates end user writable information about a sample that is assigned to a source. """ existing_sample = self.get_sample(account_id, source_id, sample_info.id) if existing_sample is None: raise werkzeug.exceptions.NotFound("No sample ID: %s" % sample_info.id) if existing_sample.is_locked: raise RepoException("Sample edits locked: Sample already received") sample_date = None sample_time = None if sample_info.datetime_collected is not None: sample_date = sample_info.datetime_collected.date() sample_time = sample_info.datetime_collected.time() # TODO: Need to get the exact policy on which fields user is allowed # to set. For starters: I think: datetime_collected, site, notes with self._transaction.cursor() as cur: cur.execute( "UPDATE " "ag_kit_barcodes " "SET " "sample_date = %s, " "sample_time = %s, " "site_sampled = %s, " "notes = %s " "WHERE " "ag_kit_barcode_id = %s", (sample_date, sample_time, sample_info.site, sample_info.notes, sample_info.id))
def insert_interested_user(self, interested_user): with self._transaction.cursor() as cur: cur.execute( "INSERT INTO campaign.interested_users (" "campaign_id, acquisition_source, first_name, last_name, " "email, phone, address_1, address_2, city, state, " "postal_code, country, latitude, longitude, confirm_consent, " "ip_address, address_checked, address_valid, over_18, " "creation_timestamp) " "VALUES (" "%s, %s, %s, %s, " "%s, %s, %s, %s, %s, %s, " "%s, %s, %s, %s, %s, " "%s, %s, %s, %s, " "NOW()) RETURNING interested_user_id", (interested_user.campaign_id, interested_user.acquisition_source, interested_user.first_name, interested_user.last_name, interested_user.email, interested_user.phone, interested_user.address_1, interested_user.address_2, interested_user.city, interested_user.state, interested_user.postal_code, interested_user.country, interested_user.latitude, interested_user.longitude, interested_user.confirm_consent, interested_user.ip_address, interested_user.address_checked, interested_user.address_valid, interested_user.over_18)) interested_user_id = cur.fetchone()[0] if interested_user_id is None: raise RepoException("Error inserting interested user") else: return interested_user_id
def create_project(body, token_info): validate_admin_access(token_info) project_name = body['project_name'] is_microsetta = body['is_microsetta'] bank_samples = body['bank_samples'] plating_start_date = body.get('plating_start_date') if plating_start_date is not None: try: plating_start_date = datetime.datetime.strptime( plating_start_date, "%Y-%m-%d") except ValueError: raise BadRequest( "plating start date '{0}' is not a valid date in YYYY-MM-DD " "format".format(plating_start_date)) if len(project_name) == 0: return jsonify(code=400, message="No project name provided"), 400 if not bank_samples and plating_start_date is not None: raise RepoException("Plating start date cannot be set for" " unbanked projects") with Transaction() as t: admin_repo = AdminRepo(t) admin_repo.create_project(project_name, is_microsetta, bank_samples, plating_start_date) t.commit() return {}, 201
def scan_barcode(self, sample_barcode, scan_info): with self._transaction.cursor() as cur: # not actually using the result, just checking there IS one # to ensure this is a valid barcode cur.execute( "SELECT barcode FROM barcodes.barcode WHERE barcode=%s", (sample_barcode, )) if cur.rowcount == 0: raise NotFound("No such barcode: %s" % sample_barcode) elif cur.rowcount > 1: # Note: This "can't" happen. raise RepoException("ERROR: Multiple barcode entries would be " "affected by scan; failing out") # put a new row in the barcodes.barcode_scans table new_uuid = str(uuid.uuid4()) scan_args = (new_uuid, sample_barcode, datetime.datetime.now(), scan_info['sample_status'], scan_info['technician_notes']) cur.execute( "INSERT INTO barcodes.barcode_scans " "(barcode_scan_id, barcode, scan_timestamp, " "sample_status, technician_notes) " "VALUES (%s, %s, %s, %s, %s)", scan_args) return new_uuid
def claim_legacy_account(self, email, auth_iss, auth_sub): # Returns now-claimed legacy account if an unclaimed legacy account # that matched the input email was found; otherwise returns None. # (Note that None is returned in the case where there is a NON-legacy # account with the input email--find such accounts with # find_linked_account instead.) Throws a RepoException # if logic indicates inconsistent auth info. found_account = self._find_account_by_email(email) # if no account is found by email, just return none. if found_account is None: return None auth = found_account.account_matches_auth(email, auth_iss, auth_sub) if auth == AuthorizationMatch.FULL_MATCH: return None elif auth == AuthorizationMatch.LEGACY_MATCH: # this is a legacy account from before we used an external # authorization provider. claim it for this authorized user. found_account.auth_issuer = auth_iss found_account.auth_sub = auth_sub self.update_account(found_account) return found_account elif auth == AuthorizationMatch.NO_MATCH: # any other situation is an error and shouldn't happen, # e.g. one of auth_iss or auth_sub is null in db but the other # isn't, or one or more of non-null auth_iss and auth_sub values # in db do not match the analogous input auth_iss and auth_sub # values for the provided email ... may be more edge cases as well raise RepoException("Inconsistent data found for provided email.") else: raise ValueError("Unknown authorization match value")
def scrub(self, account_id, source_id, sample_id): """Wipe out free text information for a sample Parameters ---------- account_id : str, uuid The associated account ID to scrub source_id : str, uuid The associated source ID to scrub sample_id : str, uuid The associated sample ID to scrub Raises ------ RepoException If the account / source is relation is bad If the source / sample relation is bad If the update fails for any reason Returns ------- True if the sample was scrubbed, will raise otherwise """ notes = "scrubbed" with self._transaction.cursor() as cur: # verify our account / source relation is reasonable cur.execute( """SELECT id FROM ag.source WHERE id=%s AND account_id=%s""", (source_id, account_id)) res = cur.fetchone() if res is None: raise RepoException("Invalid account / source relation") cur.execute( """UPDATE ag_kit_barcodes SET notes=%s WHERE source_id=%s AND ag_kit_barcode_id=%s""", (notes, source_id, sample_id)) if cur.rowcount != 1: raise RepoException("Invalid source / sample relation") else: return True
def migrate_sample(self, sample_id, source_id_src, source_id_dst, areyousure=False): """Migrate a sample among sources WARNING !!! This is NOT intended for general use. This is an administrative method for correcting unusual circumstances, and the person issuing this function call knows what they are doing. WARNING !!! """ if not areyousure: raise RepoException("You aren't sure you want to do this") if source_id_src == source_id_dst: # nothing to do return with self._transaction.cursor() as cur: # verify the destination source exists cur.execute("SELECT id FROM ag.source where id=%s", (source_id_dst, )) res = cur.fetchall() if len(res) != 1: raise RepoException(f"source ({source_id_dst}) does not exist") # verify the sample is currently associated with source_id_src cur.execute( """SELECT ag_kit_barcode_id FROM ag.ag_kit_barcodes WHERE source_id=%s AND ag_kit_barcode_id=%s""", (source_id_src, sample_id)) res = cur.fetchall() if len(res) != 1: raise RepoException(f"{len(res)} entries associated with " f"source ({source_id_src}) and sample " f"({sample_id})") self._update_sample_association(sample_id, source_id_dst, override_locked=True)
def build_condition(top_level_obj): def build_condition_helper_group(root_obj, out_values, is_top_level=False): condition = root_obj['condition'] rules = root_obj['rules'] if condition == 'AND': join_str = " and " elif condition == 'OR': join_str = " or " else: raise RepoException("Unknown condition: " + str(condition)) cond = sql.SQL(join_str)\ .join([build_condition_helper(x, out_values) for x in rules]) if not is_top_level: cond = sql.SQL('({cond})').format(cond=cond) return cond def build_condition_helper_rule(root_obj, out_values): id = root_obj['id'] # field = root_obj['field'] # type = root_obj['type'] # input = root_obj['input'] operator = root_obj['operator'] value = root_obj['value'] if operator not in supported_operators: raise RepoException("Unsupported query operator: " + str(operator)) if operator in ["is_null", "is_not_null"]: cond = "{id}" + supported_operators[operator] # no need to append null to out_values else: cond = "{id} " + supported_operators[operator] + " {value}" out_values.append(value) return sql.SQL(cond).format(id=sql.Identifier(id), value=sql.Placeholder()) def build_condition_helper(root_obj, out_values, is_top_level=False): if "condition" in root_obj: return build_condition_helper_group(root_obj, out_values, is_top_level=is_top_level) else: return build_condition_helper_rule(root_obj, out_values) valid = top_level_obj['valid'] if not valid: raise RepoException("Query is invalid") out_values = [] return build_condition_helper(top_level_obj, out_values, is_top_level=True), out_values
def dissociate_sample(self, account_id, source_id, sample_id): existing_sample = self.get_sample(account_id, source_id, sample_id) if existing_sample is None: raise werkzeug.exceptions.NotFound("No sample ID: %s" % sample_id) if existing_sample.is_locked: raise RepoException("Sample edits locked: Sample already received") # Wipe any user entered fields from the sample: self.update_info(account_id, source_id, SampleInfo(sample_id, None, None, None)) # And detach the sample from the source self._update_sample_association(sample_id, None)
def put_campaign_information(body, token_info): validate_admin_access(token_info) with Transaction() as t: campaign_repo = CampaignRepo(t) try: res = campaign_repo.update_campaign(**body) except ValueError as e: raise RepoException(e) t.commit() return jsonify(res), 200
def _update_sample_association(self, sample_id, source_id): with self._transaction.cursor() as cur: existing_sample = self._get_sample_by_id(sample_id) if existing_sample.is_locked: raise RepoException( "Sample edits locked: Sample already received") cur.execute( "UPDATE " "ag_kit_barcodes " "SET " "source_id = %s " "WHERE " "ag_kit_barcode_id = %s", (source_id, sample_id))
def update_account(self, account): with self._transaction.cursor() as cur: row = AccountRepo._account_to_row(account) # Shift id to end since it appears in the WHERE clause row_id = row[0:1] row_email_to_cc = row[1:] final_row = row_email_to_cc + row_id try: cur.execute( "UPDATE account " "SET " "email = %s, " "account_type = %s, " "auth_issuer = %s, " "auth_sub = %s, " "first_name = %s, " "last_name = %s, " "street = %s, " "city = %s, " "state = %s, " "post_code = %s, " "country_code = %s, " "created_with_kit_id = %s " "WHERE " "account.id = %s", final_row) return cur.rowcount == 1 except psycopg2.errors.UniqueViolation as e: if e.diag.constraint_name == 'idx_account_email': # TODO: Ugh. Localization of error messages is needed. raise RepoException("Email %s is not available" % account.email) from e if e.diag.constraint_name == 'idx_account_issuer_sub': # Ugh. This is really difficult to explain to an end user. raise RepoException("Cannot claim more than one account") # Unknown exception, re raise it. raise e
def create_account(self, account): try: with self._transaction.cursor() as cur: cur.execute( "INSERT INTO account (" + AccountRepo.write_cols + ") " "VALUES(" "%s, %s, " "%s, %s, %s, " "%s, %s, " "%s, %s, %s, %s, %s, %s)", AccountRepo._account_to_row(account)) return cur.rowcount == 1 except psycopg2.errors.UniqueViolation as e: if e.diag.constraint_name == 'idx_account_email': # TODO: Ugh. Localization of error messages is needed someday. raise RepoException("Email %s is not available" % account.email) from e if e.diag.constraint_name == 'idx_account_issuer_sub': # Ugh. This is really difficult to explain to an end user. raise RepoException("Cannot create two accounts on the same " "email") # Unknown exception, re raise it. raise e
def build_condition_helper_group(root_obj, out_values, is_top_level=False): condition = root_obj['condition'] rules = root_obj['rules'] if condition == 'AND': join_str = " and " elif condition == 'OR': join_str = " or " else: raise RepoException("Unknown condition: " + str(condition)) cond = sql.SQL(join_str)\ .join([build_condition_helper(x, out_values) for x in rules]) if not is_top_level: cond = sql.SQL('({cond})').format(cond=cond) return cond
def create_project(body, token_info): validate_admin_access(token_info) try: project = Project.from_dict(body) except ValueError as e: raise RepoException(e) with Transaction() as t: admin_repo = AdminRepo(t) proj_id = admin_repo.create_project(project) t.commit() response = Response() response.status_code = 201 response.headers['Location'] = '/api/admin/projects/%s' % (proj_id,) return response
def create_source(account_id, body, token_info): _validate_account_access(token_info, account_id) with Transaction() as t: source_repo = SourceRepo(t) source_id = str(uuid.uuid4()) name = body["source_name"] source_type = body['source_type'] if source_type == Source.SOURCE_TYPE_HUMAN: # TODO: Unfortunately, humans require a lot of special handling, # and we started mixing Source calls used for transforming to/ # from the database with source calls to/from the api. # Would be nice to split this out better. source_info = HumanInfo.from_dict(body, consent_date=date.today(), date_revoked=None) # the "legacy" value of the age_range enum is not valid to use when # creating a new source, so do not allow that. # NB: Not necessary to do this check when updating a source as # only source name and description (not age_range) may be updated. if source_info.age_range == "legacy": raise RepoException("Age range may not be set to legacy.") else: source_info = NonHumanInfo.from_dict(body) new_source = Source(source_id, account_id, source_type, name, source_info) source_repo.create_source(new_source) # Must pull from db to get creation_time, update_time s = source_repo.get_source(account_id, new_source.id) t.commit() response = jsonify(s.to_api()) response.status_code = 201 response.headers['Location'] = '/api/accounts/%s/sources/%s' % \ (account_id, source_id) return response
def build_condition_helper_rule(root_obj, out_values): id = root_obj['id'] # field = root_obj['field'] # type = root_obj['type'] # input = root_obj['input'] operator = root_obj['operator'] value = root_obj['value'] if operator not in supported_operators: raise RepoException("Unsupported query operator: " + str(operator)) if operator in ["is_null", "is_not_null"]: cond = "{id}" + supported_operators[operator] # no need to append null to out_values else: cond = "{id} " + supported_operators[operator] + " {value}" out_values.append(value) return sql.SQL(cond).format(id=sql.Identifier(id), value=sql.Placeholder())
def scan_barcode(self, sample_barcode, scan_info): with self._transaction.cursor() as cur: cur.execute( "SELECT scan_date FROM barcodes.barcode WHERE barcode=%s", (sample_barcode,) ) row = cur.fetchone() if row is None: raise NotFound("No such barcode: %s" % sample_barcode) existing_scan_date = row[0] new_scan_date = existing_scan_date if scan_info['sample_status'] == 'sample-is-valid': new_scan_date = date.today() update_args = ( scan_info['sample_status'], scan_info['technician_notes'], new_scan_date, sample_barcode ) cur.execute( "UPDATE barcodes.barcode " "SET " "sample_status = %s, " "technician_notes = %s, " "scan_date = %s " "WHERE " "barcode = %s", update_args ) if cur.rowcount == 0: raise NotFound("No such barcode: %s" % sample_barcode) if cur.rowcount > 1: # Note: This "can't" happen. raise RepoException("ERROR: Multiple barcode entries would be " "updated by scan, failing out")
def create_campaign(self, **kwargs): # required parameters to create a campaign title = kwargs['title'] associated_projects = kwargs['associated_projects'] # optional parameters when creating a campaign instructions = kwargs.get('instructions') permitted_countries = kwargs.get('permitted_countries') language_key = kwargs.get('language_key') accepting_participants = kwargs.get('accepting_participants') language_key_alt = kwargs.get('language_key_alt') title_alt = kwargs.get('title_alt') instructions_alt = kwargs.get('instructions_alt') extension = kwargs.get('extension') with self._transaction.cursor() as cur: cur.execute( "INSERT INTO campaign.campaigns (title, instructions, " "permitted_countries, language_key, accepting_participants, " "language_key_alt, title_alt, " "instructions_alt) " "VALUES (%s, %s, %s, %s, %s, %s, %s, %s) " "RETURNING campaign_id", (title, instructions, permitted_countries, language_key, accepting_participants, language_key_alt, title_alt, instructions_alt)) campaign_id = cur.fetchone()[0] if campaign_id is None: raise RepoException("Error inserting campaign into database") else: cur.executemany( "INSERT INTO campaign.campaigns_projects (" "campaign_id,project_id" ") VALUES (%s, %s) ", [(campaign_id, pid) for pid in associated_projects]) self.update_header_image(campaign_id, extension) return self.get_campaign_by_id(campaign_id)
def verify_address(address_1, address_2=None, city=None, state=None, postal=None, country=None): """ Required parameters: address_1, postal, country Optional parameters: address_2, city, state Note - postal and country default to None as you can't have non-default arguments after default arguments, and preserving structural order makes sense for addresses """ if address_1 is None or len(address_1) < 1 or postal is None or\ len(postal) < 1 or country is None or len(country) < 1: raise KeyError("Must include address_1, postal, and country fields") with Transaction() as t: # The response codes we can treat as deliverable GOOD_CODES = ["AV25", "AV24", "AV23", "AV22"] melissa_repo = MelissaRepo(t) dupe_status = melissa_repo.check_duplicate(address_1, address_2, postal, country) if dupe_status is not False: # duplicate record - return result with an added field noting dupe return_dict = { "address_1": dupe_status["result_address_1"], "address_2": dupe_status['result_address_2'], "city": dupe_status['result_city'], "state": dupe_status['result_state'], "postal": dupe_status['result_postal'], "country": dupe_status['result_country'], "latitude": dupe_status['result_latitude'], "longitude": dupe_status['result_longitude'], "valid": dupe_status['result_good'], "duplicate": True } return return_dict else: record_id = melissa_repo.create_record(address_1, address_2, city, state, postal, country) if record_id is None: raise RepoException("Failed to create record in database.") url_params = { "id": SERVER_CONFIG["melissa_license_key"], "opt": "DeliveryLines:ON", "format": "JSON", "t": record_id, "a1": address_1, "a2": address_2, "loc": city, "admarea": state, "postal": postal, "ctry": country } url = SERVER_CONFIG["melissa_url"] + "?%s" % \ urllib.parse.urlencode(url_params) response = requests.get(url) if response.ok is False: exception_msg = "Error connecting to Melissa API." exception_msg += " Status Code: " + response.status_code exception_msg += " Status Text: " + response.reason raise Exception(exception_msg) response_raw = response.text response_obj = json.loads(response_raw) if "Records" in response_obj.keys(): """ Note: Melissa's Global Address API allows batch requests. However, our usage is on a single-record basis. Therefore, we can safely assume that the response will only include one record to parse and use. """ record_obj = response_obj["Records"][0] r_formatted_address = record_obj["FormattedAddress"] r_codes = record_obj["Results"] r_good = False codes = r_codes.split(",") for code in codes: if code in GOOD_CODES: r_good = True break r_address_1 = record_obj["AddressLine1"] r_address_2 = record_obj["AddressLine2"] r_city = record_obj["Locality"] r_state = record_obj["AdministrativeArea"] r_postal = record_obj["PostalCode"] r_country = record_obj["CountryName"] r_latitude = record_obj["Latitude"] r_longitude = record_obj["Longitude"] u_success = melissa_repo.update_results( record_id, url, response_raw, r_codes, r_good, r_formatted_address, r_address_1, r_address_2, r_city, r_state, r_postal, r_country, r_latitude, r_longitude) t.commit() if u_success is False: exception_msg = "Failed to update results for Melissa " exception_msg += "Address Query " + record_id raise ValueError(exception_msg) return_dict = { "address_1": r_address_1, "address_2": r_address_2, "city": r_city, "state": r_state, "postal": r_postal, "country": r_country, "latitude": r_latitude, "longitude": r_longitude, "valid": r_good } return return_dict else: t.commit() exception_msg = "Melissa Global Address API failed on " exception_msg += record_id raise Exception(exception_msg)
def submit_answered_survey(self, ag_login_id, source_id, language_tag, survey_template_id, survey_model, survey_answers_id=None): # note that "ag_login_id" is the same as account_id # This is actually pretty complicated in the current schema: # We need to filter the model down to questions that are in the # template # We need to ensure that the account has access to write the given # participant # We need to generate a survey_answer id # We need to log that the user submitted this survey # We need to write each answer to one or more rows # TODO: We need to ensure that the account has access to write the # given participant!?! if survey_answers_id is None: survey_answers_id = str(uuid.uuid4()) survey_template_repo = SurveyTemplateRepo(self._transaction) survey_template = survey_template_repo.get_survey_template( survey_template_id, language_tag) with self._transaction.cursor() as cur: # Log that the user submitted this survey cur.execute( "INSERT INTO ag_login_surveys " "(ag_login_id, survey_id, source_id) " "VALUES(%s, %s, %s)", (ag_login_id, survey_answers_id, source_id)) # Write each answer for survey_template_group in survey_template.groups: for survey_question in survey_template_group.questions: survey_question_id = survey_question.id q_type = survey_question.response_type # TODO FIXME HACK: Modify DB to make this unnecessary! # We MUST record at least ONE answer for each survey # (even if the user answered nothing) # or we can't properly track the survey template id later. # Therefore, if the user answered NOTHING, store an empty # string for the first string or text question in the # survey, just so something is recorded. if len(survey_model) == 0 and \ (q_type == "STRING" or q_type == "TEXT"): survey_model[str(survey_question_id)] = "" if str(survey_question_id) not in survey_model: # TODO: Is this supposed to leave the question blank # or write Unspecified? continue answer = survey_model[str(survey_question_id)] if q_type == "SINGLE": # Normalize localized answer normalized_answer = self._unlocalize( answer, language_tag) try: cur.execute( "INSERT INTO survey_answers " "(survey_id, " "survey_question_id, " "response) " "VALUES(%s, %s, %s)", (survey_answers_id, survey_question_id, normalized_answer)) except psycopg2.errors.ForeignKeyViolation: raise BadRequest("Invalid survey response: %s" % answer) if q_type == "MULTIPLE": for ans in answer: normalized_answer = self._unlocalize( ans, language_tag) try: cur.execute( "INSERT INTO survey_answers " "(survey_id, " "survey_question_id, " "response) " "VALUES(%s, %s, %s)", (survey_answers_id, survey_question_id, normalized_answer)) except psycopg2.errors.ForeignKeyViolation: raise BadRequest( "Invalid survey response: %s" % ans) if q_type == "STRING" or q_type == "TEXT": # Note: Can't convert language on free text... cur.execute( "INSERT INTO survey_answers_other " "(survey_id, " "survey_question_id, " "response) " "VALUES(%s, %s, %s)", (survey_answers_id, survey_question_id, answer)) if len(survey_model) == 0: # we should not have gotten to the end without recording at least # ONE answer (even an empty one) ... but it could happen if this # survey template includes NO text or string questions AND the # user doesn't answer any of the questions it does include. Not # worth making the code robust to this case, as this whole "include # one empty answer" is a temporary hack, but at least ensure we # know this problem occurred if it ever does raise RepoException("No answers provided for survey template %s " "and not able to add an empty string default" % survey_template_id) return survey_answers_id
def get_survey_metadata(self, sample_barcode, survey_template_id=None): ids = self._get_ids_relevant_to_barcode(sample_barcode) if ids is None: raise NotFound("No such barcode") account_id = ids.get('account_id') source_id = ids.get('source_id') sample_id = ids.get('sample_id') account = None source = None sample = None if sample_id is not None: sample_repo = SampleRepo(self._transaction) sample = sample_repo._get_sample_by_id(sample_id) if source_id is not None and account_id is not None: source_repo = SourceRepo(self._transaction) account_repo = AccountRepo(self._transaction) account = account_repo.get_account(account_id) source = source_repo.get_source(account_id, source_id) if source is None: raise RepoException("Barcode is not associated with a source") # TODO: This is my best understanding of how the data must be # transformed to get the host_subject_id, needs verification that it # generates the expected values for preexisting samples. prehash = account_id + source.name.lower() host_subject_id = sha512(prehash.encode()).hexdigest() survey_answers_repo = SurveyAnswersRepo(self._transaction) answer_ids = survey_answers_repo.list_answered_surveys_by_sample( account_id, source_id, sample_id) answer_to_template_map = {} for answer_id in answer_ids: template_id = survey_answers_repo.find_survey_template_id( answer_id) answer_to_template_map[answer_id] = template_id # if a survey template is specified, filter the returned surveys if survey_template_id is not None: # TODO: This schema is so awkward for this type of query... answers = [] for answer_id in answer_ids: if answer_to_template_map[answer_id] == survey_template_id: answers.append(answer_id) if len(answers) == 0: raise NotFound("This barcode is not associated with any " "surveys matching this template id") if len(answers) > 1: # I really hope this can't happen. (x . x) raise RepoException("This barcode is associated with more " "than one survey matching this template" " id") answer_ids = answers metadata_map = survey_answers_repo.build_metadata_map() all_survey_answers = [] for answer_id in answer_ids: answer_model = survey_answers_repo.get_answered_survey( account_id, source_id, answer_id, "en-US") survey_answers = {} for k in answer_model: new_k = metadata_map[int(k)] survey_answers[k] = [new_k, answer_model[k]] all_survey_answers.append({ "template": answer_to_template_map[answer_id], "response": survey_answers }) pulldown = { "sample_barcode": sample_barcode, "host_subject_id": host_subject_id, "account": account, "source": source, "sample": sample, "survey_answers": all_survey_answers } return pulldown