def get_transactions(start_date, end_date, table_name='transactions_alt'): engine = helpers.db_engine() conn = engine.connect() get_joined_tables = '''SELECT a.jcn, a.transact_ref_no, a.transact_date, a.processed_date, a.credit_amt_due, a.credit_amt_actual, a.status, a.rejection_reason, a.fto_no, b.stage FROM {} a INNER JOIN fto_queue b ON a.fto_no = b.fto_no WHERE a.transact_date BETWEEN '{}' and '{}';'''.format( table_name, start_date, end_date) try: gens_transactions = pd.read_sql(get_joined_tables, con=conn, chunksize=1000) transactions = pd.concat([gen for gen in gens_transactions]) conn.close() except Exception as e: er.handle_error(error_code='27', data={}) sys.exit() conn.close() return (transactions)
def insert_ftos(fto_stages, test): engine = helpers.db_engine() conn = engine.connect() trans = conn.begin() try: fto_stages.to_sql('fto_queue', con=engine, index=False, if_exists='replace', chunksize=100, dtype={ 'fto_no': String(100), 'fto_type': String(15), 'done': SmallInteger(), 'stage': String(15) }) if test == 0: trans.commit() except Exception as e: print(e) er.handle_error(error_code='3', data={}) trans.rollback() sys.exit()
def put_new_trainees(new_df): engine = helpers.db_engine() conn = engine.connect() if not new_df.empty: try: new_df.to_sql('enrolment_record', if_exists = 'append', con = engine, index = False, chunksize = 100, dtype = {'id': Integer(), 'phone': String(50), 'jcn': String(50), 'jc_status': Integer(), 'time_pref': String(50), 'time_pref_label': String(50), 'file_name_s3': String(50), 'file_upload_to_s3_date': String(50), 'breastfeeding': String(50), 'pregnant': String(50), 'children_under6': String(50), 'teenage_girls': String(50), 'nocategory': String(50), 'health_category': String(50), 'insert_date': String(50), 'enrolment_date': String(50), 'pilot': TINYINT(2)}) except Exception as e: er.handle_error(error_code ='23', data = {}) sys.exit() return
def put_scripts(scripts): engine = helpers.db_engine() conn = engine.connect() if not scripts.empty: try: scripts.to_sql('scripts', if_exists='replace', con=engine, index=False, chunksize=100, dtype={ 'id': Integer(), 'phone': String(50), 'time_pref': String(50), 'time_pref_label': String(50), 'amount': Integer(), 'transact_date': String(50), 'rejection_reason': String(50), 'day1': String(50), 'file_name_s3': String(50), 'file_upload_to_s3_date': String(50), 'insert_date': String(50) }) except Exception as e: er.handle_error(error_code='23', data={}) sys.exit() return
def send_progress(total, done, progress): try: msg = 'There are a total of {} FTOs. The code has done {} FTOs. The code is {} done.' subject = 'GMA FTO Scrape: Progress Report' helpers.send_email(subject, msg.format(total, done, progress)) except Exception as e: er.handle_error(error_code='11', data={}) return
def test(*args, **kwargs): success = 1 try: func(*args, **kwargs) except Exception as e: success = 0 er.handle_error(error_code='1') return (success)
def make_script_primary_key(): engine = helpers.db_engine() conn = engine.connect() try: add_primary_key = "ALTER TABLE scripts ADD PRIMARY KEY(id, file_upload_to_s3_date(50));" engine.execute(add_primary_key) except Exception as e: er.handle_error(error_code='25', data={}) sys.exit() return
def test(*args, **kwargs): data = None success = 1 try: data = func(*args, **kwargs) except Exception as e: success = 0 er.handle_error(error_code='1') return (data, success)
def update_ftos(engine, scraped_ftos, target_ftos): conn = engine.connect() trans = conn.begin() try: # The code block below proceeds as follows # Use a right join to exclude all FTOs in transactions table from previous districts/blocks # Update the tracker for the FTOs which are transactions # Drop the merge variable # Update the materials FTOs because there are no transactions scraped from these in the wage-list scrape # Write to the SQL table all_ftos = pd.merge(scraped_ftos, target_ftos, how='right', on=['fto_no'], indicator=True) all_ftos.loc[(all_ftos['_merge'] == 'both'), 'done'] = 1 all_ftos.drop(['_merge'], axis=1, inplace=True) all_ftos.loc[(all_ftos['fto_type'] == 'Material'), 'done'] = 1 all_ftos.to_sql('fto_queue', con=conn, index=False, if_exists='replace', chunksize=100, dtype={ 'fto_no': String(100), 'fto_type': String(15), 'done': SmallInteger(), 'stage': String(15) }) conn.execute("ALTER TABLE fto_queue ADD PRIMARY KEY (fto_no(100));") trans.commit() conn.close() except Exception as e: er.handle_error(error_code='10', data={}) trans.rollback() conn.close() total = len(all_ftos) done = len(all_ftos.loc[all_ftos['done'] == 1]) progress = done / total return (total, done, progress)
def join_camp_data(transactions, df_field_data): df = pd.merge(transactions, df_field_data, on='jcn', how='outer', indicator=True) df = df.loc[df['_merge'] != 'left_only'] df = df[['jcn', 'transact_date', 'processed_date', 'credit_amt_due', 'credit_amt_actual', 'status', 'rejection_reason', 'fto_no', 'stage', 'id', 'phone', 'time_pref', 'time_pref_label', '_merge']] df.columns = ['jcn', 'transact_date', 'processed_date', 'credit_amt_due', 'credit_amt_actual', 'status', 'rejection_reason', 'fto_no', 'stage', 'id', 'phone', 'time_pref', 'time_pref_label', '_merge'] if df.empty: er.handle_error(error_code ='29', data = {}) return(df)
def prep_csv(stage): try: df = pd.read_csv('./output/{}.csv'.format(stage)) df['stage'] = stage except pd.errors.EmptyDataError as e: er.handle_error(error_code='2', data={'stage': stage}) cols = [ 'block_code', 'district_code', 'fto_no', 'state_code', 'transact_date', 'url', 'stage' ] df = pd.DataFrame([], columns=cols) return (df)
def download_transactions(transactions, to_dropbox, to_s3, file_to, file_from): engine = helpers.db_engine() conn = engine.connect() if to_dropbox == 1: try: helpers.upload_dropbox(file_from, file_to) except Exception as e: er.handle_error(error_code='8', data={}) if to_s3 == 1: try: helpers.upload_s3(file_from, file_to) except Exception as e: er.handle_error(error_code='9', data={})
def insert_ftos_history(fto_stages, test): engine = helpers.db_engine() conn = engine.connect() trans = conn.begin() fto_stages['action_time'] = str(datetime.datetime.now().date()) try: fto_stages_history = pd.read_sql('fto_queue_history', con=engine) except Exception as e: print(e) fto_stages = update.anti_join(fto_stages, fto_stages_history, on=['fto_no', 'stage']) fto_stages = fto_stages[['fto_no', 'stage', 'action_time_x']] fto_stages.rename(columns={'action_time_x': 'action_time'}, inplace=True) try: fto_stages.to_sql('fto_queue_history', con=engine, index=False, if_exists='append', chunksize=100, dtype={ 'fto_no': String(100), 'stage': String(15), 'action_time': String(50) }) if test == 0: trans.commit() except Exception as e: er.handle_error(error_code='3', data={}) trans.rollback() sys.exit()
def get_camp_data(pilot): engine = helpers.db_engine() conn = engine.connect() get_field_data = '''SELECT id, phone, jcn, jc_status, health_category, time_pref, time_pref_label FROM enrolment_record WHERE pilot = {};'''.format( pilot) try: gens_field = pd.read_sql(get_field_data, con=conn, chunksize=1000) df_field = pd.concat([gen for gen in gens_field]) conn.close() except Exception as e: er.handle_error(error_code='26', data={}) sys.exit() conn.close() return (df_field)
def make_camp_primary_key(): engine = helpers.db_engine() conn = engine.connect() try: has_primary_key = update.check_primary_key(engine, 'enrolment_record') except Exception as e: er.handle_error(error_code ='24', data = {}) sys.exit() try: if has_primary_key == 0: update.create_primary_key(engine, "enrolment_record", "id") except Exception as e: er.handle_error(error_code ='25', data = {}) sys.exit() return
def merge_transactions(transactions, banks, accounts, file_from): try: transactions = pd.merge(transactions, banks, how='left', on=['ifsc_code'], indicator='banks_merge') transactions = pd.merge(transactions, accounts, how='left', on=['jcn', 'acc_no', 'ifsc_code'], indicator='accounts_merge') except Exception as e: er.handle_error(error_code='6', data={}) try: transactions.to_csv(file_from, index=False) except Exception as e: er.handle_error(error_code='7', data={})
def get_transactions(start_date, end_date): engine = helpers.db_engine() conn = engine.connect() get_transactions = "SELECT * FROM transactions WHERE transact_date BETWEEN '{}' AND '{}';".format( start_date, end_date) get_banks = "SELECT * FROM banks;" get_accounts = "SELECT * from accounts;" try: transactions = pd.read_sql(get_transactions, con=conn) banks = pd.read_sql(get_banks, con=conn) accounts = pd.read_sql(get_accounts, con=conn) conn.close() except Exception as e: er.handle_error(error_code='5', data={}) conn.close() return (transactions, banks, accounts)
def process_log(): parser = argparse.ArgumentParser(description='Dropbox upload parser') parser.add_argument('file_from', type=str, help='Source file path') parser.add_argument('file_to', type=str, help='Destination file path') parser.add_argument( 'log_size', type=int, help='Whether to send an update about the log size or not') args = parser.parse_args() file_from = args.file_from file_to = args.file_to + '_' + str(datetime.today()) + '.csv' log_size = args.log_size if log_size == 1: try: log_size = os.path.getsize(file_from) / 1024 subj = "GMA Update 3: Log size report" msg = "The size of the log is {} KB.".format(log_size) helpers.send_email(subj, msg) except Exception as e: er.handle_error(error_code='12', data={}) try: helpers.upload_dropbox(file_from, file_to) except Exception as e: er.handle_error(error_code='13', data={}) try: helpers.upload_s3(file_from, file_to) except Exception as e: er.handle_error(error_code='14', data={}) os.unlink(file_from)
def test_error_handling(): msg = 'This is a test. If you see this the test has passed.' er.handle_error(error_code='1', data={'traceback': msg})