def get_package_specific_scenario_data_from_db(package_id): timing = [] section_time = time() counter_dict = get_counter_totals_from_db(package_id) timing.append(("time from db: counter", elapsed(section_time, 2))) section_time = time() command = """select citing.issn_l, citing.year::int, sum(num_citations) as num_citations from jump_citing citing join jump_grid_id institution_grid on citing.grid_id = institution_grid.grid_id join jump_account_package institution_package on institution_grid.institution_id = institution_package.institution_id where citing.year < 2019 and institution_package.package_id='{package_id}' and (citing.issn_l in (select distinct issn_l from jump_counter where package_id='{package_id}')) group by citing.issn_l, year""".format(package_id=package_id) citation_rows = None with get_db_cursor() as cursor: cursor.execute(command) citation_rows = cursor.fetchall() citation_dict = defaultdict(dict) for row in citation_rows: citation_dict[row["issn_l"]][row["year"]] = round(row["num_citations"]) timing.append(("time from db: citation_rows", elapsed(section_time, 2))) section_time = time() command = """ select authorship.issn_l, authorship.year::int, sum(num_authorships) as num_authorships from jump_authorship authorship join jump_grid_id institution_grid on authorship.grid_id = institution_grid.grid_id join jump_account_package institution_package on institution_grid.institution_id = institution_package.institution_id where authorship.year < 2019 and institution_package.package_id='{package_id}' and (authorship.issn_l in (select distinct issn_l from jump_counter where package_id='{package_id}')) group by authorship.issn_l, year""".format(package_id=package_id) authorship_rows = None with get_db_cursor() as cursor: cursor.execute(command) authorship_rows = cursor.fetchall() authorship_dict = defaultdict(dict) for row in authorship_rows: authorship_dict[row["issn_l"]][row["year"]] = round( row["num_authorships"]) timing.append(("time from db: authorship_rows", elapsed(section_time, 2))) section_time = time() data = { "timing": timing, "counter_dict": counter_dict, "citation_dict": citation_dict, "authorship_dict": authorship_dict } return data
def recompute_journal_metadata(): journals_raw = JournalsDBRaw.query.all() print len(journals_raw) new_computed_journals = [] print "making backups and getting tables ready to run" with get_db_cursor() as cursor: cursor.execute("drop table journalsdb_raw_bak_yesterday;") cursor.execute("drop table journalsdb_computed_bak_yesterday;") cursor.execute( "create table journalsdb_raw_bak_yesterday as (select * from journalsdb_raw);" ) cursor.execute( "create table journalsdb_computed_bak_yesterday as (select * from journalsdb_computed);" ) # do it as its own to force commit with get_db_cursor() as cursor: # don't truncate raw! is populated by xplenty. # further more truncate hangs, so do truncation this way instead cursor.execute("delete from journalsdb_computed;") print "tables ready for insertion" for journal_raw in journals_raw: new_journal_metadata = JournalMetadata(journal_raw) new_computed_journals.append(new_journal_metadata) print "starting commits" start_time = time() insert_values_list = [j.get_insert_values() for j in new_computed_journals] command_start = u"""INSERT INTO journalsdb_computed ({}) VALUES """.format( ",".join(JournalMetadata.get_insert_column_names())) with get_db_cursor() as cursor: i = 0 for short_values_list in chunks(insert_values_list, 1000): values_list_string = u",".join(short_values_list) q = u"{} {};".format(command_start, values_list_string) cursor.execute(q) i += 1 print i print u"done committing journals, took {} seconds total".format( elapsed(start_time)) print u"now refreshing flat view" with get_db_cursor() as cursor: cursor.execute("refresh materialized view journalsdb_computed_flat;") cursor.execute("analyze journalsdb_computed;") print u"done writing to db, took {} seconds total".format( elapsed(start_time))
def consortium_calculate(): # command = "truncate jump_scenario_computed_update_queue" # print command # with get_db_cursor() as cursor: # cursor.execute(command) while True: command = "select * from jump_scenario_computed_update_queue where completed is null order by random()" # print command with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() for row in rows: start_time = time() print "in consortium_calculate, starting recompute_journal_dicts for scenario_id {}".format( row["scenario_id"]) my_consortium = Consortium(row["scenario_id"]) my_consortium.recompute_journal_dicts() print "in consortium_calculate, done recompute_journal_dicts for scenario_id {} took {}s".format( row["scenario_id"], elapsed(start_time)) print "updating jump_scenario_computed_update_queue with completed" command = "update jump_scenario_computed_update_queue set completed=sysdate where scenario_id='{}' and completed is null".format( row["scenario_id"]) # print command with get_db_cursor() as cursor: cursor.execute(command) if row["email"]: print "SENDING EMAIL" done_email = create_email(row["email"], u'Unsub update complete', 'update_done', { 'data': { 'consortium_name': row.get("consortium_name", ""), 'package_name': row.get("package_name", ""), 'start_time': row.get("created", ""), 'end_time': datetime.datetime.utcnow().isoformat(), 'institution_id': row.get("institution_id", ""), 'package_id': row.get("package_id", ""), 'scenario_id': row["scenario_id"] }}) send(done_email, for_real=True) print "SENT EMAIL DONE" print "DONE UPDATING", row["scenario_id"] sleep( 2 * random.random())
def food_retrieve(food_id): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT get_food_item(%s) AS response ''', (food_id,)) res = cur.fetchone()['response'] return success_response({'data': res})
def to_dict_journal_zoom(self, issn_l): start_time = time() command = """select i.id as institution_id, i.display_name as institution_name, s.member_package_id as package_id, s.usage as usage, s.cpu as cpu from jump_scenario_computed s join jump_account_package p on s.member_package_id = p.package_id join jump_institution i on i.id = p.institution_id where s.scenario_id='{scenario_id}' and s.issn_l = '{issn_l}' order by usage desc """.format(scenario_id=self.scenario_id, issn_l=issn_l) with get_db_cursor(use_realdictcursor=True) as cursor: cursor.execute(command) rows = cursor.fetchall() response = [] if self.scenario_id is not None: for row in rows: if row["package_id"] in self.member_institution_included_list: response.append(row) return response
def set_to_delete(self, package_id, report_name=None): with get_db_cursor() as cursor: command = "update jump_raw_file_upload_object set to_delete_date=sysdate where package_id = '{}' and file = '{}'".format( package_id, self.file_type_label()) print command cursor.execute(command) return u"Queued to delete"
def warm_the_cache(): q = """select doi from unpaywall where genre='journal-article' and year > 2017 order by random() limit 1000""" with get_db_cursor() as cursor: cursor.execute(q) rows = cursor.fetchall() dois = [row["doi"] for row in rows] chunked_dois = chunks(dois, 10) for chunk in chunked_dois: start_time = time.time() if not hasattr(threading.current_thread(), "_children"): threading.current_thread()._children = weakref.WeakKeyDictionary() my_thread_pool = ThreadPool(10) def cache_it(doi): url = "https://api.greenoait.org/permissions/doi/{}".format(doi) # 12*30*24*60*60 = 31104000 headers = {"Cache-Control": "public, max-age=31104000"} # sleep part of a second so not all at once sleep(random.random()) r = requests.get(url, headers=headers) return r.status_code responses = my_thread_pool.imap_unordered(cache_it, chunk) my_thread_pool.close() my_thread_pool.join() my_thread_pool.terminate() print list(responses) print "took {} seconds".format(elapsed(start_time))
def food_record(account_id, food_id, servings): with get_db_cursor(commit=True) as cur: cur.execute(''' INSERT INTO food_history(account_id, food_id, food_servings) VALUES (%s, %s, %s) ''', (account_id, food_id, servings)) return success_response()
def copy_package(old_package_id, new_package_id, new_institution_id): command = """ insert into jump_counter (issn_l, package_id, journal_name, total, report_year, report_name, report_version, metric_type, yop, access_type, created) ( select issn_l, '{new_package_id}', journal_name, total, report_year, report_name, report_version, metric_type, yop, access_type, created from jump_counter where package_id = '{old_package_id}' ); insert into jump_counter_input (issn, journal_name, total, package_id, report_year, report_name, report_version, metric_type, yop, access_type) ( select issn, journal_name, total, '{new_package_id}', report_year, report_name, report_version, metric_type, yop, access_type from jump_counter_input where package_id = '{old_package_id}' ); insert into jump_perpetual_access (package_id, issn_l, start_date, end_date, created) ( select '{new_package_id}', issn_l, start_date, end_date, created from jump_perpetual_access where package_id = '{old_package_id}' ); insert into jump_perpetual_access_input (package_id, issn, start_date, end_date) ( select '{new_package_id}', issn, start_date, end_date from jump_perpetual_access_input where package_id = '{old_package_id}' ); insert into jump_journal_prices (package_id, publisher, title, issn_l, price, created) ( select '{new_package_id}', publisher, title, issn_l, price, created from jump_journal_prices where package_id = '{old_package_id}' ); insert into jump_journal_prices_input (package_id, publisher, issn, price) ( select '{new_package_id}', publisher, issn, price from jump_journal_prices_input where package_id = '{old_package_id}' ); insert into jump_raw_file_upload_object (package_id, file, bucket_name, object_name, created, num_rows, error_details, error, to_delete_date) ( select '{new_package_id}', file, bucket_name, object_name, created, num_rows, error_details, error, to_delete_date from jump_raw_file_upload_object where package_id = '{old_package_id}' ); insert into jump_apc_authorships ( package_id, doi, publisher, num_authors_total, num_authors_from_uni, journal_name, issn_l, year, oa_status, apc) ( select '{new_package_id}', doi, publisher, num_authors_total, num_authors_from_uni, journal_name, issn_l, year, oa_status, apc from jump_apc_authorships where package_id = '{old_package_id}' ); insert into jump_account_package (package_id, publisher, package_name, created, consortium_package_id, institution_id, is_demo, big_deal_cost, big_deal_cost_increase, is_deleted, updated, default_to_no_perpetual_access, currency) ( select '{new_package_id}', publisher, package_name, sysdate, consortium_package_id, '{new_institution_id}', is_demo, big_deal_cost, big_deal_cost_increase, is_deleted, updated, default_to_no_perpetual_access, currency from jump_account_package where package_id = '{old_package_id}' ); """.format(new_package_id=new_package_id, old_package_id=old_package_id, new_institution_id=new_institution_id) print command with get_db_cursor() as cursor: cursor.execute(command)
def readings_blood_sugar(account_id, reading): with get_db_cursor(commit=True) as cur: cur.execute(''' INSERT INTO readings(account_id, reading) VALUES (%s, %s) ''', (account_id, reading)) return success_response()
def account_profile(account_id): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT json_build_object( 'first_name', first_name, 'last_name', last_name, 'height', height, 'weight', weight, 'sex', sex, 'dob', dob, 'email', email, 'diabetes_type', diabetes_type, 'high_blood_pressure', high_blood_pressure, 'pregnant', pregnant, 'insulin_tdd', insulin_tdd, 'background_dose', background_dose, 'pre_meal_target', pre_meal_target, 'post_meal_target', post_meal_target, 'basal_corr_factor', basal_corr_factor, 'bolus_corr_factor', bolus_corr_factor, 'grams_carb_per_unit', grams_carb_per_unit)::JSONB AS response FROM accounts WHERE account_id = %s ''', (account_id,)) account = cur.fetchone()['response'] if account is None: raise InvalidUsage('There are no accounts with ' 'the provided id.', 'account_not_found') return success_response({'data': account})
def get_latest_scenario(scenario_id, my_jwt=None): my_saved_scenario = SavedScenario.query.get(scenario_id) if my_saved_scenario: package_id = my_saved_scenario.package_id else: package_id = DEMO_PACKAGE_ID if scenario_id.startswith("demo"): tablename = "jump_scenario_details_demo" else: tablename = "jump_scenario_details_paid" rows = None with get_db_cursor() as cursor: command = u"""select scenario_json from {} where scenario_id='{}' order by updated desc limit 1;""".format( tablename, scenario_id) # print command cursor.execute(command) rows = cursor.fetchall() scenario_data = None if rows: scenario_data = json.loads(rows[0]["scenario_json"]) my_scenario = Scenario(package_id, scenario_data, my_jwt=my_jwt) return my_scenario
def readings_insulin(account_id, units): with get_db_cursor(commit=True) as cur: cur.execute(''' INSERT INTO doses(account_id, dose_units) VALUES (%s, %s) ''', (account_id, units)) return success_response()
def get_column_values(column): print u"getting values for column {}".format(column) (column_table, column_solo) = column.split(".") if (column_table == "a"): table = "mag_authors_paperid3 a" else: table = "ricks_fast_pub_affil_journal v" with get_db_cursor() as cursor: q = "select {column} from {table} where {column} is not null order by random() limit 100".format( column=column, table=table) cursor.execute(q) rows = cursor.fetchall() values = [] for row in rows: if isinstance(row[column_solo], bool) or isinstance( row[column_solo], int) or isinstance(row[column_solo], long): value = row[column_solo] values.append(value) else: value = row[column_solo].decode('utf-8') value = value.replace("'", "''") if value: value = u"'{}'".format(value) values.append(value) # don't include empty strings return values
def food_search(query, results=10): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT food_search(%s, %s) AS response ''', (query, results)) res = cur.fetchone()['response'] return success_response({'data': res})
def to_dict_institutions(self): from saved_scenario import get_latest_scenario_raw start_time = time() command = """with tags as (select institution_id, listagg(tag_string, ', ') as tag_listagg from jump_tag_institution group by institution_id) select max(i.id) as institution_id, max(i.old_username) as institution_short_name, max(i.display_name) as institution_name, s.member_package_id as package_id, sum(s.usage) as usage, count(s.member_package_id) as num_journals, max(t.tag_listagg) as tags, false as included from jump_scenario_computed s join jump_account_package p on s.member_package_id = p.package_id join jump_institution i on i.id = p.institution_id left join tags t on t.institution_id=p.institution_id where s.scenario_id='{scenario_id}' group by s.member_package_id order by usage desc """.format(scenario_id=self.scenario_id) with get_db_cursor(use_realdictcursor=True) as cursor: cursor.execute(command) rows = cursor.fetchall() command = """select * from jump_consortium_feedback_requests where consortium_scenario_id='{scenario_id}' """.format(scenario_id=self.scenario_id) with get_db_cursor() as cursor: cursor.execute(command) rows_for_feedback = cursor.fetchall() if self.scenario_id is not None: for row in rows: if row["package_id"] in self.member_institution_included_list: row["included"] = True for row_for_feedback in rows_for_feedback: if row_for_feedback["member_package_id"] == row[ "package_id"]: row["sent_date"] = row_for_feedback["sent_date"] row["return_date"] = row_for_feedback["return_date"] (updated, scenario_data) = get_latest_scenario_raw( row_for_feedback["member_scenario_id"]) if not "member_added_subrs" in scenario_data: updated = None row["last_edited_date"] = updated return rows
def get_pmh_dates(start_date=None): my_sickle = None if not start_date: command = """select max(pmh_date) as max_pmh_date from unpaywall_pmc_pmh_dates""" with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() if rows[0]["max_pmh_date"]: start_date = rows[0]["max_pmh_date"] else: start_date = parser.parse("2001-02-27") new_objects = [] while True: (header, my_sickle, start_date) = call_pmh_endpoint( start_date, my_sickle) # can iterate up if no records xml_content = header.next() while xml_content: matches = re.findall( ur"<identifier>(.+?)</identifier><datestamp>(.+?)</datestamp>", str(xml_content)) (pmh_id, pmh_date) = matches[0] my_obj = PmcPmhDates(pmh_id=pmh_id, pmh_date=pmh_date) print ".", new_objects.append(my_obj) if len(new_objects) > 500: # print "committing" with get_db_cursor() as cursor: command = u"""INSERT INTO unpaywall_pmc_pmh_dates (pmh_id, pmh_date, updated) values """ insert_strings = [] for obj in new_objects: insert_string = u"""('{}', '{}', '{}')""".format( obj.pmh_id, obj.pmh_date, obj.updated) insert_strings.append(insert_string) command = command + u",".join(insert_strings) + u";" print "*", cursor.execute(command) new_objects = [] try: xml_content = header.next() except: xml_content = None start_date = start_date + datetime.timedelta(days=1)
def get_core_list_from_db(input_package_id): command = "select issn_l, baseline_access from jump_core_journals where package_id='{}'".format( input_package_id) with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() my_dict = dict([(a["issn_l"], a) for a in rows]) return my_dict
def food_calculate(account_id, food_id, servings): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT round(food_insulin_units_required(%s, %s, %s))::INTEGER AS response ''', (account_id, food_id, servings)) res = cur.fetchone()['response'] return success_response({'data': {'units': res}})
def update_dest_table(self, package_id): with get_db_cursor() as cursor: cursor.execute("delete from {} where package_id = '{}'".format( self.destination_table(), package_id)) cursor.execute( "insert into {} (select * from {} where package_id = '{}')". format(self.destination_table(), self.import_view_name(), package_id))
def all_member_package_ids(self): q = u"select member_package_id from jump_consortium_members where consortium_package_id='{}'".format( self.package_id) with get_db_cursor() as cursor: cursor.execute(q) rows = cursor.fetchall() if rows: return [row["member_package_id"] for row in rows] return []
def get_consortium_package_ids(package_id): command = """select package_id from jump_account_package where consortium_package_id = '{}'""".format( package_id) rows = None with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() package_ids = [row["package_id"] for row in rows] return package_ids
def save_raw_member_institutions_included_to_db(scenario_id, member_institutions_list, ip): scenario_members = json.dumps(member_institutions_list) with get_db_cursor() as cursor: command = u"""INSERT INTO jump_consortium_member_institutions (scenario_id, updated, ip, scenario_members) values ('{}', sysdate, '{}', '{}');""".format( scenario_id, ip, scenario_members) # print command cursor.execute(command)
def get_num_papers_from_db(): command = "select issn_l, year, num_papers from jump_num_papers" with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() lookup_dict = defaultdict(dict) for row in rows: lookup_dict[row["issn_l"]][row["year"]] = row["num_papers"] return lookup_dict
def feedback_rows(self): if not self.is_feeder_package: return [] command = """select * from jump_consortium_feedback_requests where member_package_id='{member_package_id}' """.format(member_package_id=self.package_id) with get_db_cursor() as cursor: cursor.execute(command) rows_for_feedback = cursor.fetchall() return rows_for_feedback
def get_society_data_from_db(): command = "select issn_l, is_society_journal from jump_society_journals_input where is_society_journal is not null" with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() lookup_dict = defaultdict(list) for row in rows: lookup_dict[row["issn_l"]] = row["is_society_journal"] return lookup_dict
def get_unpaywall_downloads_from_db(): command = "select * from jump_unpaywall_downloads where issn_l in (select distinct issn_l from jump_counter)" big_view_rows = None with get_db_cursor() as cursor: cursor.execute(command) big_view_rows = cursor.fetchall() unpaywall_downloads_dict = dict( (row["issn_l"], row) for row in big_view_rows) return unpaywall_downloads_dict
def get_embargo_data_from_db(): command = "select issn_l, embargo from journal_delayed_oa_active" embargo_rows = None with get_db_cursor() as cursor: cursor.execute(command) embargo_rows = cursor.fetchall() embargo_dict = dict( (a["issn_l"], round(a["embargo"])) for a in embargo_rows) return embargo_dict
def get_social_networks_data_from_db(): command = """select issn_l, asn_only_rate::float from jump_mturk_asn_rates """ with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() lookup_dict = {} for row in rows: lookup_dict[row["issn_l"]] = row["asn_only_rate"] return lookup_dict
def history_blood_sugar(account_id): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT json_agg(summarize_readings(readings))::jsonb AS response FROM readings WHERE account_id = %s AND reading_timestamp >= (now() - interval '31 day'); ''', (account_id,)) res = cur.fetchone()['response'] return success_response({'data': res})
def history_meals(account_id): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT json_agg(summarize_food_history(food_history))::jsonb AS response FROM food_history WHERE account_id = %s AND food_timestamp >= (now() - interval '31 day'); ''', (account_id,)) res = cur.fetchone()['response'] return success_response({'data': res})
def check_if_to_delete(package_id, file): command = u"""select * from jump_raw_file_upload_object where package_id = '{}' and to_delete_date is not null""".format( package_id) with get_db_cursor() as cursor: cursor.execute(command) rows_to_delete = cursor.fetchall() for row in rows_to_delete: if (row["package_id"] == package_id) and (row["file"] == file): return True return False
def update_notification_email(self): command = "select email from jump_scenario_computed_update_queue where completed is null and scenario_id='{}'".format( self.scenario_id) # print command with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() if rows: return rows[0]["email"] return None
def get_counter_journals_by_report_name_from_db(package_id): command = """select report_version, report_name, count(distinct issn_l) as num_journals from jump_counter where package_id='{}' group by report_version, report_name """.format(package_id) with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() return rows
def consortium_get_issns(scenario_id): start_time = time() command = """select distinct issn_l from jump_scenario_computed where scenario_id='{}'""".format( scenario_id) with get_db_cursor() as cursor: cursor.execute(command) rows = cursor.fetchall() return [row["issn_l"] for row in rows]
def stats_insulin(account_id): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT json_agg(summarize_doses(doses))::jsonb AS response FROM doses WHERE account_id = %s AND dose_timestamp >= (now() - interval '48 hour'); ''', (account_id,)) res = cur.fetchone()['response'] return success_response({'data': res})
def get_all_rows(cls, oa_column): with get_db_cursor() as cursor: q = "select {}, {} from {}".format(cls.__columns__, oa_column, cls.__tablename__) # print q cursor.execute(q) rows = cursor.fetchall() my_objects = [cls(row) for row in rows] return my_objects
def row_for_feedback(self): if not self.is_feedback_scenario: return [] command = """select * from jump_consortium_feedback_requests where member_scenario_id='{scenario_id}' """.format(scenario_id=self.scenario_id) with get_db_cursor() as cursor: cursor.execute(command) rows_for_feedback = cursor.fetchall() return rows_for_feedback[0]
def account_email_available(email): with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT email FROM accounts WHERE email = lower(%s) ''', (email,)) res = cur.fetchone() if res is not None: raise InvalidUsage('Email not available.', 'email_not_available') return success_response({'data': {'available': email}})
def get_consortium_ids(): q = """select institution_id, i.display_name as consortium_name, i.old_username as consortium_short_name, p.package_id, p.publisher, s.scenario_id from jump_package_scenario s join jump_account_package p on p.package_id = s.package_id join jump_institution i on i.id = p.institution_id where is_consortium = true""" with get_db_cursor() as cursor: cursor.execute(q) rows = cursor.fetchall() return rows
def consortia_scenario_ids_who_own_this_package(self): q = u""" select consortium_package_id, scenario_id as consortium_scenario_id from jump_consortium_members cm join jump_package_scenario ps on cm.consortium_package_id=ps.package_id where member_package_id='{}' """.format(self.package_id) with get_db_cursor() as cursor: cursor.execute(q) rows = cursor.fetchall() return [row["consortium_scenario_id"] for row in rows]
def account_login(): req = get_request_data() with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT account_id FROM accounts WHERE email = %(email)s AND password = %(password)s ''', req) res = cur.fetchone() if res is None: raise InvalidUsage('Email not found or password invalid.', 'email_or_password_invalid') else: account_id = res['account_id'] return success_response({'data': {'account_id': account_id}})
def account_signup(): req = get_request_data() with get_db_cursor(commit=True) as cur: cur.execute(''' SELECT account_signup(%(first_name)s, %(last_name)s, %(height)s, %(weight)s, %(sex)s, %(dob)s, %(email)s, %(password)s, %(diabetes_type)s, %(high_blood_pressure)s, %(pregnant)s, %(insulin_tdd)s, %(background_dose)s, %(pre_meal_target)s, %(post_meal_target)s) AS response ''', req) res = cur.fetchone()['response'] if not res['success']: raise InvalidUsage(res['message'], res['status']) return success_response({'data': {'account_id': res['account_id']}})