def get_stock_squeeze_list(): symbols_df = get_symbol_db() squeeze_list = [] for symbol in symbols_df['symbol']: try: df = get_stock_data_db(symbol, num_months_ago=9) df.rename(columns={ 'o': 'open', 'h': 'high', 'l': 'low', 'c': 'close', }, inplace=True) if len(df) > 90 and breaking_out_of_squeeze(df): print(f"{symbol} is coming out the squeeze") squeeze_list.append(symbol) except Exception as e: print(e) continue if squeeze_list: with open(f"{Path(__file__).parent.absolute()}/results/squeeze_list_stocks.json", 'w') as fp: recipient, message, subject = ['*****@*****.**', f'stocks squeeze list successfully generated at {datetime.now()}', 'squeezed'] send_eri_mail(recipient, message, subject) json.dump(squeeze_list, fp)
def cron_stocks(): get_symbols() update_data_db() apply_the_filters() msg = "<p>cron stocks completed</p>" send_eri_mail(recipient='*****@*****.**', message_=msg, subject='cron update')
def cron_crypto(): print('running...cron #1') get_all() print('running...cron #2') save_top_coins() print('running...cron #3') get_crypto_squeeze_list() msg = "<p>cron crypto completed</p>" send_eri_mail(recipient='*****@*****.**', message_=msg, subject='cron update')
def save_files_to_db(): current_file_path = Path(__file__).parent.absolute() for idx, file in enumerate(Path(f"{current_file_path}/data").glob('*-1d-data.zip'), start=1): symbol = Path(file).stem.split('-')[0] df = pd.read_csv(file, parse_dates=['timestamp'], index_col='timestamp') df['interval'] = '1d' df['symbol'] = symbol if_exists = 'replace' if idx == 1 else 'append' pg_db.df_to_db(df, name='binance', if_exists=if_exists, index=True) msg = f'finished {idx} symbols' print(msg) if idx % 1000 == 0: send_eri_mail('*****@*****.**', msg, 'files 2 db progress')
def fn(*args, **kwargs): exception = None for i in range(self.tries): try: return f(*args, **kwargs) except self.exceptions as e: print("Retry, exception: " + str(e)) send_eri_mail('*****@*****.**', "Retry, exception: " + str(e), 'algo error') time.sleep(self.delay * (1 + i)) exception = e # if no success after tries, raise last exception print(f'unsuccessful after {self.tries} attempts', f.__name__, exception) raise exception
def func_wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: trace = log_traceback(e) logger.exception('Exception..') if isinstance(trace, list): msg = '<br/> '.join(trace) else: msg = repr(trace) send_eri_mail('*****@*****.**', message_=msg, subject='algo102 error', message_type='html') print(e)
def get_crypto_squeeze_list(): symbols_file = f"{Path(__file__).parent.absolute()}/results/all_tickers_cmc.json" if not os.path.isfile(symbols_file): top_coins = top_coin_cmc() with open(symbols_file, 'w') as fp: json.dump(top_coins['data'], fp) symbols_df = pd.read_json(symbols_file, 'r') squeeze_list = [] base_symbols = ['USDT', 'BTC', 'ETH'] binance_symbols = [{ 'symbol': symbol + base_symbol, 'quote_symbol': symbol } for base_symbol in base_symbols for symbol in symbols_df['symbol'] if symbol != base_symbol] for item in binance_symbols: # print(symbol) symbol = item['symbol'] try: df = pd.read_csv(f'data/{symbol}-1d-data.zip') if len(df) > 200 and breaking_out_of_squeeze(df): print(f"{symbol} is coming out the squeeze") squeeze_list.append( dict(symbol=symbol, quote_symbol=item['quote_symbol'])) except FileNotFoundError: continue if squeeze_list: with open( f"{Path(__file__).parent.absolute()}/results/squeeze_list.json", 'w') as fp: recipient, message, subject = [ '*****@*****.**', f'crypto squeeze list successfully generated at {datetime.now()}', 'squeezed' ] send_eri_mail(recipient, message, subject) return json.dump(squeeze_list, fp)
def get_stock_squeeze_list(): squeeze_list = [] symbols = gt.get_tickers(NYSE=True, NASDAQ=True, AMEX=True) for symbol in symbols: df = MyYahoo.get_stock_data_db(symbol) if len(df) > 120 and breaking_out_of_squeeze(df): print(f"{symbol} is coming out the squeeze") squeeze_list.append(dict(symbol=symbol)) if squeeze_list: with open( f"{Path(__file__).parent.absolute()}/results/squeeze_list.json", 'w') as fp: recipient, message, subject = [ '*****@*****.**', f'yahoo stock squeeze list successfully generated at {datetime.now()}', 'squeezed' ] send_eri_mail(recipient, message, subject) json.dump(squeeze_list, fp) return True
def forecast(symbols, min_points=120): err_symbols = [] for symbol in symbols: try: df = pg_db.query_df( f"SELECT * FROM stocks_finn_hub WHERE symbol='{symbol}'") if len(df) > min_points: print(f'skipped {symbol}') df['ds'] = df['date'].dt.date df['y'] = df['c'] model_prophet = Prophet(seasonality_mode='additive') model_prophet.add_seasonality(name='monthly', period=30.5, fourier_order=5) model_prophet.fit(df) df_future = model_prophet.make_future_dataframe(periods=182) weekend_index_f = df_future[ df_future['ds'].dt.dayofweek >= 5].index df_future.drop(weekend_index_f, inplace=True) df_pred = model_prophet.predict(df_future) df_pred.to_csv( f'{APP_PATH}/fbprophet/csv_forecast/{symbol}.zip', compression='zip', index=False) print(f'completed {symbol}') print(f'skipping {symbol}') except Exception as e: print(f'error on {symbol}') err_symbols.append(symbol) msg = f'completed {len(symbols)} stocks' msg += f'error on {err_symbols}' send_eri_mail('*****@*****.**', message_=msg, subject='finhubb fbprophet progress', message_type='html') return msg
def cron_yahoo(): main() msg = "<p>cron yahoo completed</p>" send_eri_mail(recipient='*****@*****.**', message_=msg, subject='cron update')
def apply_the_filters(start_row=0, use_forecast=False, pc_higher_sp=10, min_to_52w_low=1.3, min_52w_high=0.75): global df_symbol # %% 0 get all symbols df_symbols = get_symbols() df_forecast = pd.read_csv( '/home2/eproject/veehuen/python/algo102/fbprophet/growth_stocks.csv') # %% df = df_forecast if use_forecast else df_symbols df_metric_list = [] df['condition_1'] = False df['condition_2'] = False df['condition_3'] = False df['sma_50'] = None df['sma_150'] = None df['sma_200'] = None # df['52WeekHigh'] = None # df['52WeekLow'] = None # df['priceRelativeToS&P50013Week'] = None # df['priceRelativeToS&P50026Week'] = None # df['priceRelativeToS&P5004Week'] = None # %% 1 get current price per symbol filtered_symbol = [] for i, row in df[start_row:].iterrows(): symbol = row['symbol'] try: conditions = [False] * 3 # if i % 1500 == 0 and i > 0: # break print(i, symbol, datetime.now()) sql = f"SELECT * FROM stocks_finn_hub WHERE symbol='{symbol}' order by DATE DESC LIMIT 200" df_symbol = pg_db.query_df(sql) # skip if data less than 200 samples if len(df_symbol) < 200: continue df_symbol.index = df_symbol['date'].dt.date df_symbol.sort_index(inplace=True) df_symbol[f'sma_50'] = df_symbol['c'].rolling(50).mean() df_symbol['sma_150'] = df_symbol['c'].rolling(150).mean() df_symbol['sma_200'] = df_symbol['c'].rolling(200).mean() current = df_symbol.iloc[-1]['c'] sma_50 = df_symbol.iloc[-1]['sma_50'] sma_150 = df_symbol.iloc[-1]['sma_150'] sma_200 = df_symbol.iloc[-1]['sma_200'] df.loc[i, 'sma_50'] = sma_50 df.loc[i, 'sma_150'] = sma_150 df.loc[i, 'sma_200'] = sma_200 except Exception as e: print(symbol, e) continue try: if current > sma_50 > sma_150 > sma_200: conditions[0] = True df.loc[i, 'condition_1'] = True except TypeError: continue try: bs = get_basic_financials(symbol, 'price') data_folder = '/home2/eproject/vee-h-phan.com/algo102/data_providers/finnhub/data' dict_to_json_zipped(bs, f'{data_folder}/bs_{symbol}.json.gzip') except Exception as e: send_eri_mail('*****@*****.**', e.__repr__(), 'finhubb error: bs') continue if i % 3 == 0: sleep_time = randint(3, 8) print(i, f'sleeping {sleep_time} seconds') time.sleep(sleep_time) high_52_week = bs.get('metric').get('52WeekHigh') low_52_week = bs.get('metric').get('52WeekLow') try: if current / low_52_week > min_to_52w_low and current / high_52_week > min_52w_high: conditions[1] = True df.loc[i, 'condition_2'] = True except TypeError: continue price_relative_to_SP500 = [ bs.get('metric').get('priceRelativeToS&P50013Week'), bs.get('metric').get('priceRelativeToS&P50026Week'), bs.get('metric').get('priceRelativeToS&P5004Week'), # bs.get('metric').get('priceRelativeToS&P50052Week'), ] # save metric to df row_metric = pd.DataFrame.from_dict(bs.get('metric'), orient='index').T row_metric.index = [i] df_metric_list.append(row_metric) try: if all(i >= pc_higher_sp for i in price_relative_to_SP500): conditions[2] = True df.loc[i, 'condition_3'] = True except TypeError: continue if all(conditions): filtered_symbol.append(symbol) print(filtered_symbol) print(df.loc[i]) if i % 1000 == 0 and i > 0: df.to_csv( f'/home2/eproject/vee-h-phan.com/algo102/data_providers/finnhub/data/growth_stocks_filtered_{i}.csv', index=False) if i % 100 == 0 and i > 0: send_eri_mail('*****@*****.**', f'processed {i} symbols', 'finhubb progress: bs') df_metric = pd.concat(df_metric_list) final_df_filtered = pd.concat([df, df_metric], axis=1) final_df_filtered.to_csv( '/home2/eproject/vee-h-phan.com/algo102/data_providers/finnhub/data/growth_stocks_filtered.csv', index=False) pg_db.df_to_db(final_df_filtered, name='biz_fin', if_exists='replace', index=False)
def cron_test(): msg = "<p>cron test msg</p>" send_eri_mail(recipient='*****@*****.**', message_=msg, subject='cron update')
def update_data_db(): # get symbols stocks_list = get_symbols() stocks_df = pd.DataFrame(stocks_list) # for i, symbol in enumerate(['GOOG', 'AAPL', ]): j = 0 last_slept_at = -1 for i, symbol in enumerate(stocks_df['symbol']): # get last date of symbol in database sql_ = f"SELECT Max(t) as max_date FROM stocks_finn_hub WHERE symbol='{symbol}'" df_last = pg_db.query_df(sql_) # only get data if last day is more thn 1 day before today start = one_year_ago_u if today.day_of_week == 1: min_delta_days = 3 * 24 * 60 * 60 else: min_delta_days = 1 * 24 * 60 * 60 if len(df_last): if df_last.loc[0, 'max_date'] is not None: last_day_in_db = df_last.loc[0, 'max_date'] start = last_day_in_db + 1 * 24 * 60 * 60 # if df_last.loc[0, 'max_date']: # last_day_in_db = df_last.loc[0, 't'] # next_day_in_db = pendulum.from_timestamp() # else: # start_u = one_year_ago # delay => to not break API Limit if today_u - start > min_delta_days: candles_df = get_stock_data(symbol, start, tomorrow_u) # print('sleeping 1 second') # time.sleep(1) j += 1 print(f"j={j}") if j % 5 == 0 and j > 0 and j != last_slept_at: print('sleeping for 5 seconds...') time.sleep(10) last_slept_at = j if candles_df is not None and len(candles_df): try: pg_db.df_to_db(candles_df, name='stocks_finn_hub', if_exists='append', index=False) except Exception as e: print(e) candles_df.to_csv(f'csv/{symbol.csv}') print(f'finished {i} {symbol}') continue else: print(f'skipping {symbol}') if i % 1000 == 0 and i > 0: msg = f"<p>completed {i} stocks....</p>" send_eri_mail('*****@*****.**', message_=msg, subject='finhubb data progress', message_type='html') # update meta sql_ = f"""UPDATE eproject_fx.public.mr_meta SET last_updated = '{pendulum.now(tz='UTC').strftime('%Y-%m-%d %H:%M UTC')}' WHERE job_name='update stock data'""" pg_db.query(sql_)