def copy_db(db_test = 'gma_bank_db_test', db_prod = 'gma_bank_db'): engine_prod = helpers.db_engine() engine_test = helpers.db_engine(test = 1) tables = reflection.Inspector.from_engine(engine_prod).get_table_names() conn = engine_test.connect() trans = conn.begin() print(tables) for table in tables: try: db_prod_sql = db_prod + "." + table sql = "CREATE TABLE IF NOT EXISTS {} LIKE {};".format(table, db_prod_sql) conn.execute(sql) print('Table {} done'.format(table)) except Exception as e: print(e) trans.rollback() sys.exit() trans.commit()
def insert_ftos(fto_stages, test): engine = helpers.db_engine() conn = engine.connect() trans = conn.begin() try: fto_stages.to_sql('fto_queue', con=engine, index=False, if_exists='replace', chunksize=100, dtype={ 'fto_no': String(100), 'fto_type': String(15), 'done': SmallInteger(), 'stage': String(15) }) if test == 0: trans.commit() except Exception as e: print(e) er.handle_error(error_code='3', data={}) trans.rollback() sys.exit()
def get_camp_table_from_db(): engine = helpers.db_engine() df_db = pd.read_sql("SELECT id FROM enrolment_record;", con = engine) return(df_db)
def put_new_trainees(new_df): engine = helpers.db_engine() conn = engine.connect() if not new_df.empty: try: new_df.to_sql('enrolment_record', if_exists = 'append', con = engine, index = False, chunksize = 100, dtype = {'id': Integer(), 'phone': String(50), 'jcn': String(50), 'jc_status': Integer(), 'time_pref': String(50), 'time_pref_label': String(50), 'file_name_s3': String(50), 'file_upload_to_s3_date': String(50), 'breastfeeding': String(50), 'pregnant': String(50), 'children_under6': String(50), 'teenage_girls': String(50), 'nocategory': String(50), 'health_category': String(50), 'insert_date': String(50), 'enrolment_date': String(50), 'pilot': TINYINT(2)}) except Exception as e: er.handle_error(error_code ='23', data = {}) sys.exit() return
def check_if_camp_table_exists(): db_name = helpers.sql_connect()['db'] engine = helpers.db_engine() table_exists = update.check_table_exists(engine, db_name, 'enrolment_record') return(table_exists)
def get_transactions(start_date, end_date, table_name='transactions_alt'): engine = helpers.db_engine() conn = engine.connect() get_joined_tables = '''SELECT a.jcn, a.transact_ref_no, a.transact_date, a.processed_date, a.credit_amt_due, a.credit_amt_actual, a.status, a.rejection_reason, a.fto_no, b.stage FROM {} a INNER JOIN fto_queue b ON a.fto_no = b.fto_no WHERE a.transact_date BETWEEN '{}' and '{}';'''.format( table_name, start_date, end_date) try: gens_transactions = pd.read_sql(get_joined_tables, con=conn, chunksize=1000) transactions = pd.concat([gen for gen in gens_transactions]) conn.close() except Exception as e: er.handle_error(error_code='27', data={}) sys.exit() conn.close() return (transactions)
def main(): engine = helpers.db_engine() scraped_ftos = get_scraped_ftos(engine) target_ftos = get_target_ftos(engine) total, done, progress = update_ftos(engine, scraped_ftos, target_ftos) send_progress(total, done, progress)
def put_scripts(scripts): engine = helpers.db_engine() conn = engine.connect() if not scripts.empty: try: scripts.to_sql('scripts', if_exists='replace', con=engine, index=False, chunksize=100, dtype={ 'id': Integer(), 'phone': String(50), 'time_pref': String(50), 'time_pref_label': String(50), 'amount': Integer(), 'transact_date': String(50), 'rejection_reason': String(50), 'day1': String(50), 'file_name_s3': String(50), 'file_upload_to_s3_date': String(50), 'insert_date': String(50) }) except Exception as e: er.handle_error(error_code='23', data={}) sys.exit() return
def check_data_from_db(file_name_s3_source, file_name_s3_target, var): engine = helpers.db_engine() df = pd.read_csv(helpers.get_object_s3(file_name_s3_source)) df_db = pd.read_sql("SELECT id, {} FROM enrolment_record where file_name_s3 = '{}'".format(var, file_name_s3_target), con = engine) df_check = pd.merge(df, df_db, how = 'inner', on = ['id'], indicator = True) return(df_check, df_db, df)
def main(): parser = argparse.ArgumentParser( description='Append to or replace the queue?') parser.add_argument('if_exists', type=str, help='Append or replace?') args = parser.parse_args() if_exists = args.if_exists path = os.path.abspath('./output/fto_queue.csv') engine = helpers.db_engine() put_fto_nos(engine, path, if_exists)
def check_static_nrega_script(df, static_script_code = "P0 P1 P2 P3 Q A P0 Z1 Z2", ind = 'got_static_nrega'): engine = helpers.db_engine() try: static_nrega = pd.read_sql("SELECT id FROM scripts WHERE day1 = '{}'".format(static_script_code), con = engine) except Exception as e: print(e) sys.exit() df = pd.merge(df, static_nrega, how = 'left', on = 'id', indicator = ind) return(df)
def db_execute(branch): engine = helpers.db_engine() if branch == 1: schema.create_branch_transactions(engine) elif branch == 0: schema.create_bank_transactions(engine) schema.create_wage_list(engine) schema.create_accounts(engine) schema.create_banks(engine) schema.create_fto_queue(engine) schema.create_fto_current_stage(engine) schema.send_keys_to_file(engine)
def check_welcome_script(df, welcome_code = "P0 P1 P2 00", welcome_code_alt = "P0 P1 P2 00 P0"): query = "SELECT id FROM scripts where day1 = '{}' OR day1 = '{}';" engine = helpers.db_engine() try: welcome_script = pd.read_sql(query.format(welcome_code, welcome_code_alt), con = engine) except Exception as e: print(e) sys.exit() df = pd.merge(df, welcome_script, how = 'left', on = 'id', indicator = 'got_welcome') return(df)
def open_spider(selif, spider): if spider.name == 'fto_branch': subject = 'GMA Update: The alternate transactions scrape is starting...' message = '' send_email(subject, message) engine = db_engine() tables = ['transactions_alt', 'wage_lists_alt', 'banks_alt'] for table in tables: delete_data(engine, table) return
def make_script_primary_key(): engine = helpers.db_engine() conn = engine.connect() try: add_primary_key = "ALTER TABLE scripts ADD PRIMARY KEY(id, file_upload_to_s3_date(50));" engine.execute(add_primary_key) except Exception as e: er.handle_error(error_code='25', data={}) sys.exit() return
def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ connectable = helpers.db_engine() with connectable.connect() as connection: context.configure( connection=connection, target_metadata=target_metadata ) with context.begin_transaction(): context.run_migrations()
def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = helpers.db_engine() context.configure(url=url, target_metadata=target_metadata, literal_binds=True) with context.begin_transaction(): context.run_migrations()
def add_fto_type(fto_stages): get_fto_types = "SELECT fto_no, fto_type FROM fto_queue;" engine = helpers.db_engine() conn = engine.connect() trans = conn.begin() try: fto_queue = pd.read_sql(get_fto_types, con=engine) except Exception as e: trans.rollback() print(e) fto_stages = pd.merge(fto_stages, fto_queue, on=['fto_no'], how='left') fto_stages.rename(columns={'fto_type_y': 'fto_type'}, inplace=True) fto_stages = fto_stages[['fto_no', 'done', 'fto_type', 'stage']] return (fto_stages)
def upgrade(): conn = helpers.db_engine() metadata = MetaData(bind = conn, reflect = True) states = metadata.tables['states'] op.bulk_insert(states, [{'state_code': 33, 'state_name': 'Chhattisgarh'}]) data = [ {"block_code": 15, "block_name": "Arang"}, {"block_code": 12, "block_name": "Dharsiwa"}, {"block_code": 8, "block_name": "Abhanpur"}, {"block_code": 7, "block_name": "Tilda"}] blocks = metadata.tables['blocks'] op.bulk_insert(blocks, data)
def download_transactions(transactions, to_dropbox, to_s3, file_to, file_from): engine = helpers.db_engine() conn = engine.connect() if to_dropbox == 1: try: helpers.upload_dropbox(file_from, file_to) except Exception as e: er.handle_error(error_code='8', data={}) if to_s3 == 1: try: helpers.upload_s3(file_from, file_to) except Exception as e: er.handle_error(error_code='9', data={})
def write_new_calls(new_df_field): data_types = {'recipient_id': Integer(), 'recipient_phone': String(50), 'identifier': String(50), 'script_pushed': String(50), 'calltime': String(50), 'call_status': String(50), 'call_duration': String(50),'call_complete': Integer(),'gender_classification': String(50), 'confidence_score': Float(), 'audio_files': String(50)} engine = helpers.db_engine() conn = engine.connect() try: df_report.to_sql('call_report', if_exists = 'append', con = engine, index = False, chunksize = 100, dtype = data_types) except Exception as e: print(e) return(df_report)
def insert_ftos_history(fto_stages, test): engine = helpers.db_engine() conn = engine.connect() trans = conn.begin() fto_stages['action_time'] = str(datetime.datetime.now().date()) try: fto_stages_history = pd.read_sql('fto_queue_history', con=engine) except Exception as e: print(e) fto_stages = update.anti_join(fto_stages, fto_stages_history, on=['fto_no', 'stage']) fto_stages = fto_stages[['fto_no', 'stage', 'action_time_x']] fto_stages.rename(columns={'action_time_x': 'action_time'}, inplace=True) try: fto_stages.to_sql('fto_queue_history', con=engine, index=False, if_exists='append', chunksize=100, dtype={ 'fto_no': String(100), 'stage': String(15), 'action_time': String(50) }) if test == 0: trans.commit() except Exception as e: er.handle_error(error_code='3', data={}) trans.rollback() sys.exit()
def get_camp_data(pilot): engine = helpers.db_engine() conn = engine.connect() get_field_data = '''SELECT id, phone, jcn, jc_status, health_category, time_pref, time_pref_label FROM enrolment_record WHERE pilot = {};'''.format( pilot) try: gens_field = pd.read_sql(get_field_data, con=conn, chunksize=1000) df_field = pd.concat([gen for gen in gens_field]) conn.close() except Exception as e: er.handle_error(error_code='26', data={}) sys.exit() conn.close() return (df_field)
def upgrade(): conn = helpers.db_engine() metadata = MetaData(bind = conn, reflect = True) fto_stages = metadata.tables['fto_stages'] data = [ {"stage_id": 6, "stage":"P"}, {"stage_id": 2, "stage": "fst_sig"}, {"stage_id": 1, "stage": "fst_sig_not"}, {"stage_id": 8, "stage": "pb"}, {"stage_id": 7, "stage": "pp"}, {"stage_id": 5, "stage": "sb"}, {"stage_id": 4, "stage": "sec_sig"}, {"stage_id": 3, "stage": "sec_sig_not"}] op.bulk_insert(fto_stages, data)
def make_camp_primary_key(): engine = helpers.db_engine() conn = engine.connect() try: has_primary_key = update.check_primary_key(engine, 'enrolment_record') except Exception as e: er.handle_error(error_code ='24', data = {}) sys.exit() try: if has_primary_key == 0: update.create_primary_key(engine, "enrolment_record", "id") except Exception as e: er.handle_error(error_code ='25', data = {}) sys.exit() return
def get_transactions(start_date, end_date): engine = helpers.db_engine() conn = engine.connect() get_transactions = "SELECT * FROM transactions WHERE transact_date BETWEEN '{}' AND '{}';".format( start_date, end_date) get_banks = "SELECT * FROM banks;" get_accounts = "SELECT * from accounts;" try: transactions = pd.read_sql(get_transactions, con=conn) banks = pd.read_sql(get_banks, con=conn) accounts = pd.read_sql(get_accounts, con=conn) conn.close() except Exception as e: er.handle_error(error_code='5', data={}) conn.close() return (transactions, banks, accounts)
def test_db_engine(): helpers.db_engine()
def stage_names_execute(): engine = helpers.db_engine() db_schema.create_stage_table_names()
def get_report_table_from_db(): engine = helpers.db_engine() df_db = pd.read_sql("SELECT identifier FROM reports;", con = engine) return(df_db)