def api_get_df_proba_sm(): '''API that returns prediction summary in dataframe in JSON Returns: j_df_proba_sm (JSON of pandas.Dataframe) ''' db = DataBase([], DIR_DB) q = ''' SELECT * FROM proba WHERE sym + datetime_update IN (SELECT sym + MAX(datetime_update) FROM proba GROUP BY sym) ''' df = pd.read_sql(q, db.conn) df1 = (df.sort_values('proba', ascending=0).drop_duplicates( subset=['sym'], keep='first').rename(columns={'proba': 'proba_max'})) df1 = df1[['sym', 'proba_max', 'datetime_update']] df2 = (df.sort_values('datetime', ascending=0).drop_duplicates( subset=['sym'], keep='first').rename(columns={ 'datetime': 'datetime_last', 'proba': 'proba_last' })) df2 = df2[['sym', 'datetime_last', 'proba_last']] df_proba_sm = pd.merge(df1, df2, how='left', on='sym') j_df_proba_sm = df_proba_sm.to_json(orient='split') db.close() return j_df_proba_sm
def test_reload_from_backup(backup_db: Path) -> None: """This test requires preparing the backup by calling create_db_backup()""" delete_files(DB_ROOT) for path in backup_db.iterdir(): (DB_ROOT / path.name).write_bytes(path.read_bytes()) db = DataBase() assert db.num_tables() == 1 assert db.get_tables_names() == ['Students'] students = db.get_table('Students') assert students.count() == 100
def api_get_df_proba(): '''API that returns all predictions in dataframe in JSON''' global dir_db db = DataBase([], dir_db) q = ''' SELECT * FROM proba WHERE sym + datetime_update IN (SELECT sym + MAX(datetime_update) FROM proba GROUP BY sym) ''' df_proba = pd.read_sql(q, db.conn) j_df_proba = df_proba.to_json(orient='split') db.close() return j_df_proba
def __init__(self): self.args = self.create_parser().parse_args() self.db = DataBase() self.utils = Utils() self.face_recognition = FaceRecognition() self.camera = Camera( int(self.args.camera) if str(self.args.camera).isdigit() else self. args.camera) self.lock = mraa.Gpio(20) self.green_light = mraa.Gpio(32) self.is_door_opened = mraa.Gpio(26) self.quit = mraa.Gpio(18) self.remember_new_face = mraa.Gpio(16) self.pin = mraa.Gpio(12) self.exit_code = 0 self.log_folder = None self.create_logger()
def create_db_backup() -> Path: DB_BACKUP_ROOT.mkdir(parents=True, exist_ok=True) delete_files(DB_BACKUP_ROOT) db = DataBase() create_students_table(db, 100) for path in DB_ROOT.iterdir(): path.rename(DB_BACKUP_ROOT / path.name) return DB_BACKUP_ROOT
def api_get_df_c(): '''API that returns full cooked price dataframe in JSON for input symbol(s) Returns: j_df_c (JSON of pandas.Dataframe) ''' global tup_model global date_str global live_data global target_profit global target_loss global buffer_seconds global pause pause = 1 time.sleep(2) db = DataBase([], DIR_DB) j_data = request.get_json() ls_sym = json.loads(j_data)['ls_sym'] time_str = json.loads(j_data)['time_str'] try: ls_df = [] for sym in ls_sym: time.sleep(buffer_seconds) df = get_df_c(sym, date_str, live_data, db, target_profit, target_loss) df = df[df['datetime'].dt.strftime('%H%M') <= time_str] df_proba = get_df_proba(df, tup_model) if not df_proba.empty: df = pd.merge(df, df_proba[['sym', 'datetime', 'proba']], how='left', on=['sym', 'datetime']) else: df['proba'] = None ls_df.append(df) df_c = pd.concat(ls_df) j_df_c = df_c.to_json(orient='split') except Exception as e: print(ERROR_EXCEPTION_SYM.format(sym, type(e).__name__, e.args)) j_df_c = pd.DataFrame().to_json(orient='split') db.close() pause = 0 return j_df_c
def api_get_df_c(): '''API that returns full price dataframe in JSON for input symbol(s)''' global dir_db global tup_model global date_str global live_data db = DataBase([], dir_db) j_data = request.get_json() ls_sym = json.loads(j_data)['ls_sym'] time_str = json.loads(j_data)['time_str'] target_profit = 0.011 target_loss = -0.031 try: ls_df = [] for sym in ls_sym: df = get_df_c(sym, date_str, live_data, db, target_profit, target_loss) df = df[df['datetime'].dt.strftime('%H%M') <= time_str] df_proba = get_df_proba(df, tup_model) if not df_proba.empty: df = pd.merge(df, df_proba[['sym', 'datetime', 'proba']], how='left', on=['sym', 'datetime']) else: df['proba'] = None ls_df.append(df) df_c = pd.concat(ls_df) j_df_c = df_c.to_json(orient='split') db.close() return j_df_c except Exception as e: print(sym, type(e).__name__, e.args) #traceback.print_exc() db.close() return pd.DataFrame().to_json(orient='split')
def update_predictions(): '''Runs an iteration of model predictions on selected symbols and saves output in database ''' db = DataBase([], DIR_DB) df_sym = get_df_sym_filter(db, LS_SEC, LS_IND) c_error = collections.Counter() ls_skip = [] while 1: dt_error = {} idx_not_skip = -df_sym['sym'].isin(ls_skip) for i, tup in tqdm(df_sym[idx_not_skip].iterrows(), total=df_sym[idx_not_skip].shape[0]): while pause: time.sleep(BUFFER_SECONDS) sym = tup['sym'] try: time.sleep(BUFFER_SECONDS) df_c = get_df_c(sym, DATE_STR, LIVE_DATA, db, TARGET_PROFIT, TARGET_LOSS) df_proba = get_df_proba(df_c, tup_model) if not df_proba.empty: df_proba.to_sql('proba', db.conn, if_exists='append', index=0) except Exception as e: dt_error[sym] = ERROR_EXCEPTION.format( type(e).__name__, e) # traceback.print_exc() c_error.update([sym]) if dt_error: num_runs = df_sym.shape[0] [ print(ERROR_SUMMARY.format(sym, dt_error[sym])) for sym in dt_error ] print( ERROR_PCT.format(len(dt_error), num_runs, len(dt_error) / num_runs)) ls_skip = [k for k, v in c_error.items() if v > ERROR_THRESHOLD] # skip symbols with too many errors print(MSG_SKIP.format(ls_skip))
def execute(self): statement = self.convert_to_statement() db = DataBase() cursor = db.cursor try: cursor.execute("BEGIN") cursor.execute(statement) results_sql = cursor.fetchall() if len(results_sql) == 0: results_sql = [] else: results_sql = results_sql[0][0] if results_sql is None: results_sql = [] cursor.execute("COMMIT") except Exception as error: return {'error': error.pgerror, 'code': error.pgcode} finally: cursor.close() return results_sql
def update_predictions(): '''Runs an iteration of model predictions on selected symbols and saves output in database ''' global dir_db global tup_model global j_df_proba global date_str global live_data global sym_limit global buffer_seconds db = DataBase([], dir_db) ls_df_proba = [] target_profit = 0.011 target_loss = -0.031 df_sym = get_df_sym_filter(db) df_sym = df_sym.iloc[:sym_limit] while 1: for i, tup in df_sym.iterrows(): if i % 100 == 0: print(i, df_sym.shape[0]) sym = tup['sym'] try: df_c = get_df_c(sym, date_str, live_data, db, target_profit, target_loss) df_proba = get_df_proba(df_c, tup_model) if not df_proba.empty: df_proba.to_sql('proba', db.conn, if_exists='append', index=0) except Exception as e: print(sym, type(e).__name__, e.args) #traceback.print_exc() print( f'Update complete, waiting for {buffer_seconds} seconds till next update...' ) time.sleep(buffer_seconds)
def test_create(new_db: DataBase) -> None: db = new_db assert db.num_tables() == 0 with pytest.raises(Exception): _ = db.get_table('Students') create_students_table(db) assert db.num_tables() == 1 assert db.get_tables_names() == ['Students'] students = db.get_table('Students') add_student(students, 111, Birthday=dt.datetime(1995, 4, 28)) assert students.count() == 1 students.delete_record(1_000_111) assert students.count() == 0 with pytest.raises(ValueError): students.delete_record(key=1_000_111) db1 = DataBase() assert db1.num_tables() == 1 db1.delete_table('Students') assert db1.num_tables() == 0
import matplotlib.pyplot as plt from requests.exceptions import ConnectionError from src.db import DataBase from src.utils_stocks import get_curr_price from src.utils_general import get_yahoo_link from src.utils_general import get_google_link from src.utils_general import suppress_stdout logging.getLogger().setLevel(logging.CRITICAL) # demo config demo = 0 f_demo_df_c = os.path.join(os.getcwd(), 'data', 'demo', 'df_c.parquet') f_demo_df_proba_sm = os.path.join(os.getcwd(), 'data', 'demo', 'df_proba_sm.parquet') dir_db = os.path.join(os.getcwd(), 'data', 'db') if demo: dir_db = os.path.join(os.getcwd(), 'data', 'demo') db = DataBase([], dir_db=dir_db) # system strings ERROR_EXCEPTION = '{} - {}' ERROR_CONNECTION = 'Connection error! Try again in a few seconds.' TEXT_PAGE_TITLE = 'Five Minute Midas' TEXT_TITLE = '''# Five Minute Midas 📈 ### Predicting profitable day trading positions for *{}*. ---''' TEXT_ADVICE = '\n ### Try changing the **Profit Probability.**' TEXT_SYMBOLS_FOUND = '### {} of {} symbols selected.{}\n---' TEXT_FIG = '''## {} - {} {} #### {} - {} {} ''' TEXT_FIG_MULTI = '## All Symbols Summary' TEXT_LINKS = '''[G-News]({}), [Y-Finance]({})'''
ON stocks(sym, quote_type)''', #stocks_error '''CREATE TABLE IF NOT EXISTS stocks_error( sym TEXT)''', #proba '''CREATE TABLE IF NOT EXISTS proba( sym TEXT ,datetime TEXT ,my_index INTEGER ,proba REAL ,datetime_update TEXT)''', ] db = DataBase(ls_init_str, DIR_DB) db_demo = DataBase(ls_init_str, DIR_DB_DEMO) ################# # Update stocks # ################# if UPDATE_STOCKS: print(MSG_STOCKS) ls_sym = get_ls_sym() q = ''' SELECT sym FROM stocks UNION ALL SELECT sym FROM stocks_error --UNION ALL SELECT sym FROM stocks WHERE summary IS NOT NULL ''' ls_sym_exclude = pd.read_sql(q, db.conn)['sym'].to_list() ls_sym = [x for x in ls_sym if x not in ls_sym_exclude]
import yfinance as yf from tqdm import tqdm from src.db import DataBase import matplotlib.pyplot as plt from configparser import ConfigParser from src.utils_beeps import beeps from src.utils_stocks import get_df_c from src.utils_general import get_df_sym from src.utils_general import plot_divergences from src.utils_date import get_ls_date_str_from_db # directories DIR_DB = os.path.join(os.getcwd(), 'data', 'db') DIR_TRAIN = os.path.join(os.getcwd(), 'data', 'train') F_CFG = os.path.join(os.getcwd(), 'config.ini') # objects db = DataBase([], DIR_DB) cfg = ConfigParser() cfg.read(F_CFG) # constants ERROR_EXCEPTION = 'Error: Exception found ({}: {})' ERROR_SUMMARY = '{} - {}' ERROR_PCT = 'Errors: {}/{} {:.3f}' MSG_DATE_RANGE = 'Creating df_train for date range: {} to {}' MSG_SAVED = '{} saved!' FILENAME_TRAIN = 'df_train_{}.parquet' CFG_SECTION = 'get_train_data' LIVE_DATA = cfg.getint(CFG_SECTION, 'LIVE_DATA') TARGET_PROFIT = cfg.getfloat(CFG_SECTION, 'TARGET_PROFIT') TARGET_LOSS = cfg.getfloat(CFG_SECTION, 'TARGET_LOSS') DATE_START = cfg.get(CFG_SECTION, 'DATE_START') DATE_END = cfg.get(CFG_SECTION, 'DATE_END')
ON stocks(sym, quote_type)''', #stocks_error '''CREATE TABLE IF NOT EXISTS stocks_error( sym TEXT)''', #proba '''CREATE TABLE IF NOT EXISTS proba( sym TEXT ,datetime TEXT ,my_index INTEGER ,proba REAL ,datetime_update TEXT)''', ] db = DataBase(ls_init_str, dir_db) db_demo = DataBase(ls_init_str, dir_db_demo) ################# # Update stocks # ################# print('1. Update stocks') ls_sym = get_ls_sym() q = ''' SELECT sym FROM STOCKS UNION ALL SELECT sym FROM stocks_error --SELECT sym FROM stocks WHERE summary IS NOT NULL ''' ls_sym_exclude = pd.read_sql(q, db.conn)['sym'].to_list() ls_sym = [x for x in ls_sym if x not in ls_sym_exclude] # extract and load
def create_students_table(db: DataBase, num_students: int = 0) -> DBTable: table = db.create_table('Students', STUDENT_FIELDS, 'ID') for i in range(num_students): add_student(table, i) return table
def new_db() -> Generator[DataBase, None, None]: db = DataBase() for table in db.get_tables_names(): db.delete_table(table) delete_files(DB_ROOT) yield db
def test_bad_key(new_db: DataBase) -> None: with pytest.raises(ValueError): _ = new_db.create_table('Students', STUDENT_FIELDS, 'BAD_KEY')
class Launcher: def __init__(self): self.args = self.create_parser().parse_args() self.db = DataBase() self.utils = Utils() self.face_recognition = FaceRecognition() self.camera = Camera( int(self.args.camera) if str(self.args.camera).isdigit() else self. args.camera) self.lock = mraa.Gpio(20) self.green_light = mraa.Gpio(32) self.is_door_opened = mraa.Gpio(26) self.quit = mraa.Gpio(18) self.remember_new_face = mraa.Gpio(16) self.pin = mraa.Gpio(12) self.exit_code = 0 self.log_folder = None self.create_logger() @staticmethod def create_parser(): parser = DefaultHelpParser( prog='fr3onn', description='Face Recognition with 3D imaging, ' 'OpenCV and Neural Nets', formatter_class=argparse.RawTextHelpFormatter, add_help=True) parser.add_argument('-c', '--camera', metavar='CAMERA', required=False, default=0, help='Device index, 0 by default') parser.add_argument( '-d', '--db_dir', metavar='DB_DIR', required=False, default=None, help='Path to database, <FR3ONN_DIR>/db by default') parser.add_argument('-v', '--version', action='version', help='Show version and exit', version=__version__) return parser def create_logger(self): self.log_folder = os.path.join(os.getcwd(), 'logs', self.utils.get_formatted_datetime()) init_logger(self.log_folder) def header(self): log.info(self.utils.line_double) log.info('Face Recognition with 3D imaging, OpenCV and Neural Nets') log.info(self.utils.line_double) log.info('Log folder: {}'.format(self.log_folder)) log.info('Python: {}'.format(sys.executable)) log.info(self.utils.line_double) def open_lock(self): self.lock.write(1) time.sleep(1) self.lock.write(0) def green_light_on(self): self.green_light.write(1) def green_light_off(self): self.green_light.write(0) def main(self): try: self.header() self.db.set_db_dir(self.args.db_dir) self.face_recognition.initialize_face_encodings( self.db.get_all_persons()) self.lock.dir(mraa.DIR_OUT) self.green_light.dir(mraa.DIR_OUT) self.is_door_opened.dir(mraa.DIR_IN) self.quit.dir(mraa.DIR_IN) self.remember_new_face.dir(mraa.DIR_IN) self.pin.dir(mraa.DIR_OUT) self.pin.write(1) process_this_frame = True while True: frame = self.camera.get_frame() if process_this_frame: name = self.face_recognition.recognize(frame) if name: log.info(self.utils.line_double) log.info('**Access PROVIDED** to {}'.format( self.db.get_formatted_person_name(name))) self.green_light_on() if not self.is_door_opened.read(): self.open_lock() log.info(self.utils.line_double) else: log.info('**Access DENIED**') self.green_light_off() process_this_frame = not process_this_frame if not self.remember_new_face.read() and frame is not None: log.info('Starting to add you to database...') name = 'Registered User' file_name = self.db.add_person(name, frame) self.face_recognition.add_new_face_encoding(file_name) while not self.remember_new_face.read(): time.sleep(0.1) if not self.quit.read() and frame is not None: log.info('Tumbler switch was pressed. Quit.') break except KeyboardInterrupt: logger.switch_to_summary() log.info(self.utils.line_single) log.error('PROGRAM WAS INTERRUPTED') log.info(self.utils.line_single) self.exit_code = -1 except Exception as ex: logger.switch_to_summary() log.info(self.utils.line_single) log.error('Something goes wrong **FAILED**:\n{} '.format(ex)) log.error('Error: {}'.format(traceback.format_exc())) log.info(self.utils.line_single) self.exit_code = -2 return self.exit_code
target_profit = 0.011 target_loss = -0.031 df_sym = get_df_sym_filter(db) df_sym = df_sym.iloc[:sym_limit] while 1: for i, tup in df_sym.iterrows(): if i % 100 == 0: print(i, df_sym.shape[0]) sym = tup['sym'] try: df_c = get_df_c(sym, date_str, live_data, db, target_profit, target_loss) df_proba = get_df_proba(df_c, tup_model) if not df_proba.empty: df_proba.to_sql('proba', db.conn, if_exists='append', index=0) except Exception as e: print(sym, type(e).__name__, e.args) #traceback.print_exc() print( f'Update complete, waiting for {buffer_seconds} seconds till next update...' ) time.sleep(buffer_seconds) if __name__ == '__main__': db = DataBase([], dir_db) db.execute('DELETE FROM proba') x = threading.Thread(target=update_predictions, daemon=True) x.start() app.run(debug=False, host='0.0.0.0')
from src.utils_general import suppress_stdout logging.getLogger().setLevel(logging.CRITICAL) # directories DIR_DB = os.path.join(os.getcwd(), 'data', 'db') DIR_DEMO = os.path.join(os.getcwd(), 'data', 'demo') F_CFG = os.path.join(os.getcwd(), 'config.ini') # constants and objects cfg = ConfigParser() cfg.read(F_CFG) CFG_SECTION = 'deploy_webapp' IS_DEMO = cfg.getint(CFG_SECTION, 'IS_DEMO') F_DEMO_DF_C = os.path.join(DIR_DEMO, 'df_c.parquet') F_DEMO_DF_PROBA_SM = os.path.join(DIR_DEMO, 'df_proba_sm.parquet') DATI_OLD = '19930417_0000' if IS_DEMO: db = DataBase([], dir_db=DIR_DEMO) else: db = DataBase([], dir_db=DIR_DB) # system strings ERROR_EXCEPTION = '{} - {}' ERROR_CONNECTION = 'Connection error! Try again in a few seconds.' TEXT_PAGE_TITLE = 'Five Minute Midas' TEXT_TITLE = '''# Five Minute Midas 📈 ### Predicting profitable day trading positions for *{}*. ---''' TEXT_ADVICE = '\n ### Try changing the **Profit Probability.**' TEXT_SYMBOLS_FOUND = '### {} of {} symbols selected.{}\n---' TEXT_FIG = '''## {} - {} {} #### {} - {} {} '''
df_proba = get_df_proba(df_c, tup_model) if not df_proba.empty: df_proba.to_sql('proba', db.conn, if_exists='append', index=0) except Exception as e: dt_error[sym] = ERROR_EXCEPTION.format( type(e).__name__, e) # traceback.print_exc() c_error.update([sym]) if dt_error: num_runs = df_sym.shape[0] [ print(ERROR_SUMMARY.format(sym, dt_error[sym])) for sym in dt_error ] print( ERROR_PCT.format(len(dt_error), num_runs, len(dt_error) / num_runs)) ls_skip = [k for k, v in c_error.items() if v > ERROR_THRESHOLD] # skip symbols with too many errors print(MSG_SKIP.format(ls_skip)) if __name__ == '__main__': db = DataBase([], DIR_DB) db.execute('DELETE FROM proba') x = threading.Thread(target=update_predictions, daemon=True) x.start() app.run(debug=False, host='0.0.0.0') x.join()