def new_entry(): print("new_entry") elementcnt = 1 for row in tv.get_children(): elementcnt = elementcnt + 1 database.insert_row(elementcnt, website_text.get(), username_text.get(), password_text.get(), email_text.get(), company_text.get(), security1_text.get(), security2_text.get(), security3_text.get()) view_all()
def food(): if request.method == 'GET': return render_template('add_food.html') else: food_data = dict() food_data['name'] = request.form.get('food-name') food_data['protein'] = request.form.get('protein') food_data['carb'] = request.form.get('carb') food_data['fat'] = request.form.get('fat') database.insert_row(food_data, 'food') list_food_data = database.query_all('food') return render_template('add_food.html', list_food_data=list_food_data)
def test_adding_10000_rows_sequentially_then_100_rows_in_parallel(): create_table() assert row_count() == 0 for example_row in generate_example_rows(10000): insert_row(example_row) assert row_count() == 10000 insert_rows_in_parallel(generate_example_rows(100)) assert row_count() == 10100
def index(): if request.method == 'POST': rawdate = request.form.get('date') entry_date = datetime.datetime.strptime(rawdate, '%Y-%m-%d') entry_date = datetime.datetime.strftime(entry_date, '%Y%m%d') database.insert_row(entry_date, 'log_date') all_dates = database.query_dates() pretty_dates = list( map( lambda di: datetime.datetime.strptime(str(di['entry_date']), '%Y%m%d'), all_dates)) total_nutri = [] for pdate in all_dates: total_nutri.append(total_nutri_all_dates(pdate['entry_date'])) pretty_dates = list( map(lambda dobj: datetime.datetime.strftime(dobj, '%B %d %Y'), pretty_dates)) pretty_dates = list(map(lambda dobj: str(dobj), pretty_dates)) return render_template('home.html', all_dates=pretty_dates, total_nutri=total_nutri) else: all_dates = database.query_dates() pretty_dates = list( map( lambda di: datetime.datetime.strptime(str(di['entry_date']), '%Y%m%d'), all_dates)) total_nutri = [] for pdate in all_dates: total_nutri.append(total_nutri_all_dates(pdate['entry_date'])) pretty_dates = list( map(lambda dobj: datetime.datetime.strftime(dobj, '%B %d %Y'), pretty_dates)) pretty_dates = list(map(lambda dobj: str(dobj), pretty_dates)) return render_template('home.html', all_dates=pretty_dates, total_nutri=total_nutri)
def main(): conn = db.create_connection(db.DB_FILE) with conn: db.create_db(conn) while True: temperature = read_temperature() fever_event = get_fever_event(temperature) current_time = get_current_time() send_to_firebase(fever_event,current_time) write_in_plotly(TEMP_UNDER_THRESHOLD,temperature,current_time) with conn: db.insert_row(conn, current_time, temperature, fever_event, db.TABLE_NAME) log.debug("temperature:{}".format(temperature)) log.debug("TEMP_UNDER_THRESHOLD:{}".format(TEMP_UNDER_THRESHOLD)) log.debug("TEMP_OVER_THRESHOLD:{}".format(TEMP_OVER_THRESHOLD)) log.debug("fever_event:{}".format(fever_event)) time.sleep(SLEEP_DURATION)
def save_resource_handler(): id_token = request.cookies.get("token") claims = None error_message = None try: claims = google.oauth2.id_token.verify_firebase_token( id_token, firebase_request_adapter) except ValueError as exc: error_message = str(exc) print(claims['email']) email = claims['email'] resource_name = request.form["name"] description = request.form['description'] input_type = request.form["input_type"] output_type = request.form["output_type"] input_language = request.form["input_language"] output_language = request.form["output_language"] input_resource = None output_resource = None if input_type == "text": input_resource = request.form["input_resource"] if input_type == "audio" or input_type == "image": input_resource = request.files['input_resource'].read() if output_type == "text": output_resource = request.form["output_resource"] if output_type == "audio": output_resource = request.files['output_resource'].read() database.insert_row(email, resource_name, description, input_type, output_type, input_language, output_language) storage.store_file(email, resource_name, "input", input_resource) storage.store_file(email, resource_name, "output", output_resource) return "Received"
def update_db(date, tablename): """ Start by creating a backup of the exisiting DB. Then update the DB by comparing rows in new CSV export to rows in the existing DB. Add new rows from the CSV into the DB, and remove rows from the DB if they do not exist in the new CSV. """ config = cfg.get_config() rootpath = config['paths']['rootpath'] csvpath = config['paths']['csvpath'] clean_csv = date + "_" + "gor_diva_merged_cleaned.csv" if os.path.isfile(os.path.join(rootpath, 'database.db')) is not True: return else: try: shutil.copy2( os.path.join(rootpath, 'database.db'), os.path.join(rootpath, 'database_BKP_' + date + '.db')) update_db_msg = f"BEGIN DB UPDATE" logger.info(update_db_msg) print(update_db_msg) cca.crosscheck_assets(tablename) os.chdir(csvpath) with open(clean_csv, mode='r', encoding='utf-8-sig') as c_csv: pd_reader = pd.read_csv(c_csv, header=0) df = pd.DataFrame(pd_reader) update_count = 0 update_index = [] drop_count = 0 drop_index = [] insert_count = 0 insert_index = [] mismatch_count = 0 mismatch_index = [] total_count = 0 none_count = 0 for index, row in df.iterrows(): os.chdir(rootpath) guid = str(row['GUID']) titletype = str(row['TITLETYPE']) datatapeid = str(row['DATATAPEID']) update_db_msg_01 = f"Updating DB for (Index, GUID): ({index}, {guid})" logger.info(update_db_msg_01) print(str(index) + " " + guid) db_row_id = db.fetchone_guid( guid ) #fetch db row based on GUID, row ID may not match db Row ID. if db_row_id is not None: db_row = db.select_row(db_row_id[0]) else: db_row_msg = f"None value(s) found in db row, skipping this row. \n {db_row}" none_count += 1 continue db_datatapeid = db_row[4] db_aoid = db_row[24] db_titletype = db_row[14] if (guid == db_row[1] and db_datatapeid == "NULL" and db_aoid == "NULL"): db.update_row("assets", index, row) update_count += 1 update_index.append(index) if (guid != db_row[1] and db.fetchone_guid(guid) is None): db.drop_row('assets', index, guid) drop_count += 1 drop_index.append(index) if (db_row is None and row['_merge'] == 'both'): db.insert_row(index, row) insert_count += 1 insert_index.append(index) if (titletype != db_titletype): db.update_column("assets", 'TITLETYPE', titletype, index) update_count += 1 update_index.append(index) if (guid != db_row[1] and db.fetchone_guid(guid) != None): mismatch_msg = f"Mismatch in the db update: {db_row[1]} != {guid}" logger.error(mismatch_msg) mismatch_count += 1 mismatch_index.append(index) pass else: nochange_msg = f"No change to {guid} at row index {index}." logger.debug(nochange_msg) # print(nochange_msg) pass total_count += 1 update_summary_msg = f"\n\ Update Count: {update_count}\n\ Drop Count: {drop_count}\n\ Insert Count: {insert_count}\n\ Mismatch Count: {mismatch_count}\n\ No Change Count: {total_count - (update_count + drop_count + insert_count + mismatch_count)}\n\ Total Count: {total_count}\n\ " index_summary_msg = f"\n\ None Value Count = {none_count}\n\ update index: {update_index}\n\ drop index: {drop_index}\n\ insert index: {insert_index}\n\ mismatch index: {mismatch_index}\n\ " logger.info(update_summary_msg) logger.info(index_summary_msg) print(update_summary_msg) print("") print(index_summary_msg) db_update_complete_msg = f"DB UPDATE COMPLETE" logger.info(db_update_complete_msg) except Exception as e: dbupdate_err_msg = f"Error updating the DB." logger.exception(dbupdate_err_msg)
def write_data(): db.insert_row(db.DEFAULT_PATH, request.get_json()) return 'Success!'