def insert_earnings_into_db(earnings_list): """ Insert earnings into database Parameters ---------- earnings_list: list List of earnings data """ for stats in earnings_list: symbol = stats[0] name = stats[1] eps_est = stats[2] eps_act = stats[3] surprise = stats[4] earning_date = stats[5] earning_time = stats[6] ticker = yf.Ticker(symbol) information = ticker.info mkt_cap = information["marketCap"] img = information["logo_url"] db.execute("""INSERT INTO earnings_calendar (name, symbol, mkt_cap, eps_est, eps_act, surprise, img_url, earning_date, earning_time) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT (name, symbol) DO UPDATE SET mkt_cap=?, eps_est=?, eps_act=?, surprise=?, img_url=?, earning_date=?, earning_time=? """, (name, symbol, mkt_cap, eps_est, eps_act, surprise, img, earning_date, earning_time, mkt_cap, eps_est, eps_act, surprise, img, earning_date, earning_time)) conn.commit() print(symbol, earning_date, earning_time)
def sell_ticker(date): """ Sell ticker if ticker is outside the Top 10 popular tickers on r/wallstreetbets Note: Run this function after running scheduled_tasks/main.py to get most trending tickers on Reddit Parameters ---------- date: str Format: DD/MM/YYYY HH:MM:SS """ raw_date = date latest_date = date if " " in latest_date: latest_date = latest_date.split()[0] latest_date = datetime.strptime(latest_date, "%d/%m/%Y") db.execute( "SELECT * FROM wallstreetbets where date_updated=? ORDER BY total DESC LIMIT 10", (raw_date, )) rows = db.fetchall() for ticker in rows: symbol = ticker[1] new_bought_ticker.append(symbol) sell = list(set(prev_bought_ticker) - set(new_bought_ticker)) for symbol in sell: ticker = yf.Ticker(symbol) history = ticker.history(period="1mo", interval="1d") try: info = history.loc[latest_date] except KeyError: print("Market not open today! No tickers sold!") break close_price = round(info["Open"], 3) message = "Ticker {} to be sold on {} at ${} during market open.".format( symbol, str(latest_date).split()[0], close_price) print(message) logging.info(message) db.execute("SELECT * FROM reddit_etf WHERE ticker=? AND status='Open'", (symbol, )) stats = db.fetchone() difference = round(close_price - stats[3], 2) PnL = round(difference * stats[4], 2) percentage_diff = round((difference / stats[3]) * 100, 2) db.execute( "UPDATE reddit_etf SET close_date=?, close_price=?, PnL=?, percentage=?, status=? " "WHERE ticker=? AND status=?", (str(latest_date).split()[0], close_price, PnL, percentage_diff, "Close", symbol, "Open")) conn.commit() logging.info("-" * 50)
def get_high_short_interest(): """ Returns a high short interest DataFrame. Adapted from https://github.com/GamestonkTerminal/GamestonkTerminal/tree/main/gamestonk_terminal """ url_high_short_interested_stocks = "https://www.highshortinterest.com" text_soup_high_short_interested_stocks = BeautifulSoup( requests.get(url_high_short_interested_stocks, ).text, "lxml") a_high_short_interest_header = list() for high_short_interest_header in text_soup_high_short_interested_stocks.findAll( "td", {"class": "tblhdr"}): a_high_short_interest_header.append( high_short_interest_header.text.strip("\n").split("\n")[0]) df_high_short_interest = pd.DataFrame(columns=a_high_short_interest_header) stock_list_tr = text_soup_high_short_interested_stocks.find_all("tr") for a_stock in stock_list_tr: a_stock_txt = a_stock.text if a_stock_txt == "": continue shorted_stock_data = a_stock_txt.split("\n") if len(shorted_stock_data) == 8: df_high_short_interest.loc[len( df_high_short_interest.index)] = shorted_stock_data[:-1] db.execute("DELETE FROM short_interest") for index, row in df_high_short_interest.iterrows(): information = yf.Ticker(row["Ticker"]).info db.execute( "INSERT INTO short_interest VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", (row['Ticker'], row['Company'], row['Exchange'], information["previousClose"], row['ShortInt'], row['Float'], row['Outstd'], row['Industry'], information["logo_url"])) conn.commit() print("INSERT {} INTO SHORT INTEREST DATABASE SUCCESSFULLY!".format( row['Ticker']))
def buy_new_ticker(date): """ Buy ticker if ticker is inside the Top 10 popular tickers on r/wallstreetbets Note: Run this function after running scheduled_tasks/main.py to get most trending tickers on Reddit Parameters ---------- date: str Format: DD/MM/YYYY HH:MM:SS """ raw_date = date latest_date = date if " " in latest_date: latest_date = latest_date.split()[0] latest_date = datetime.strptime(latest_date, "%d/%m/%Y") db.execute( "SELECT * FROM wallstreetbets where date_updated=? ORDER BY total DESC LIMIT 10", (raw_date, )) rows = db.fetchall() for y in rows: symbol = y[1] if symbol not in prev_bought_ticker: ticker = yf.Ticker(symbol) logo_url = check_img(symbol, ticker.info) history = ticker.history(period="1mo", interval="1d") try: info = history.loc[latest_date] except KeyError: print("Market not open today! No tickers bought!") break open_price = round(info["Open"], 2) num_shares = round(10000 / open_price, 2) message = "Ticker {} to be bought on {} for ${}.".format( symbol, str(latest_date).split()[0], open_price) print(message) logging.info(message) db.execute( "INSERT INTO reddit_etf VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", (symbol, logo_url, str(latest_date).split()[0], open_price, num_shares, "N/A", "N/A", "N/A", "N/A", "Open")) conn.commit()
def update_bought_ticker_price(): """ Update price of ticker inside the Top 10 popular tickers on r/wallstreetbets Note: Run this function after running scheduled_tasks/main.py to get most trending tickers on Reddit """ print("Updating ticker price now...") db.execute("SELECT * FROM reddit_etf WHERE status='Open'") open_ticker_list = db.fetchall() for ticker in open_ticker_list: ticker_stats = yf.Ticker(ticker[0]) buy_price = ticker[3] today_price = round(ticker_stats.info["regularMarketPrice"], 2) difference = today_price - buy_price PnL = round(difference * ticker[4], 2) percentage_diff = round((difference / ticker[3]) * 100, 2) db.execute( "UPDATE reddit_etf SET close_price=?, PnL=?, percentage=? " "WHERE ticker=? AND status='Open'", (today_price, PnL, percentage_diff, ticker[0])) conn.commit() print("Update {} Successful!".format(ticker))
def print_df(df, filename, writesql, writecsv, subreddit): df.reset_index(inplace=True) df.index += 1 df.reset_index(inplace=True) df.rename(columns={'index': 'rank'}, inplace=True) now = datetime.utcnow() dt_string = now.strftime("%d/%m/%Y %H:%M:%S") df['date_updated'] = dt_string df['subreddit'] = subreddit cols_to_change = [ "rank", "total", "recent", "previous", "rockets", "posts", "upvotes", "comments" ] for col in cols_to_change: df[col] = df[col].fillna(0).astype(float) df['change'] = df['change'].apply(lambda x: round(x, 2)) df['change'] = df['change'].replace(0, "N/A") df['industry'] = df['industry'].str.replace("—", "-") df['recommend'] = df['recommend'].str.replace("_", " ") df['website'] = df['website'].str.replace("alibabagroup.com", "alibaba.com") df['website'] = df['website'].str.replace("tesla.com", "tesla.cn") # Save to sql database if writesql: for row_num in range(len(df)): db.execute( "INSERT INTO {} VALUES " "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NULL)" .format(subreddit), tuple(df.loc[row_num].tolist())) conn.commit() print("Saved to {} SQL Database successfully.".format(subreddit)) # Write to csv if writecsv: completeName = os.path.join(sys.path[0], filename) completeName += '.csv' df.to_csv(completeName, index=False, float_format='%.2f', mode='a', encoding=locale.getpreferredencoding()) print("Wrote to file successfully {}".format(completeName)) # Create past 1 month chart print("Saving last 1 month chart now...") top_35 = df[:35] for index, i in top_35.iterrows(): trending_ticker = i[1] ticker = yf.Ticker(trending_ticker) price_df = ticker.history(interval="1d", period="1mo")["Close"] price_list = price_df.to_list() if price_list: start_price = price_list[0] end_price = price_list[-1] if start_price > end_price: color = "red" else: color = "green" days_list = [i for i in range(len(price_list))] plt.figure(figsize=(1, 0.5)) plt.axis("off") plt.xticks([]) plt.yticks([]) plt.plot(days_list, price_list, color=color) plt.savefig(r"static/graph_chart/{}.svg".format(trending_ticker), transparent=True) plt.close()
def financial(ticker_symbol): """ Get balance sheet of company and save it to json file. Data is from yahoo finance Parameters ---------- ticker_symbol: str ticker symbol (e.g: AAPL) """ balance_list = [] ticker = yf.Ticker(ticker_symbol, session=session) information = ticker.info # To check if input is a valid ticker if "symbol" in information: balance_sheet = ticker.quarterly_balance_sheet.replace(np.nan, 0) # print(balance_sheet) date_list = balance_sheet.columns.astype("str").to_list() balance_col_list = balance_sheet.index.tolist() # print(date_list) # print(balance_col_list) for i in range(len(balance_sheet)): values = balance_sheet.iloc[i].tolist() balance_list.append(values) url_ratings = "https://finance.yahoo.com/calendar/earnings?symbol={}".format( ticker_symbol) text_soup_ratings = BeautifulSoup(get_earnings_html(url_ratings), "lxml") earnings_list, financial_quarter_list = [], [] # [[1, 0.56, 0.64], [2, 0.51, 0.65], [3, 0.7, 0.73], [4, 1.41, 1.68], [5, 0.98]] count = 5 for earning in text_soup_ratings.findAll("tr"): if len(earnings_list) != 5: tds = earning.findAll("td") if len(tds) > 0: earning_date = tds[2].text.rsplit(",", 1)[0] eps_est = tds[3].text eps_act = tds[4].text if eps_act != "-": earnings_list.append([count, eps_est, eps_act]) else: earnings_list.append([count, eps_est]) # Deduce financial quarter based on date of report year_num = earning_date.split()[-1] month_num = earning_date.split()[0] if month_num in ["Jan", "Feb", "Mar"]: year_num = int(year_num) - 1 quarter = "Q4" elif month_num in ["Apr", "May", "Jun"]: quarter = "Q1" elif month_num in ["Jul", "Aug", "Sep"]: quarter = "Q2" else: quarter = "Q3" financial_quarter_list.append("{} {}".format( year_num, quarter)) count -= 1 else: break with open(r"database/financials.json", "r+") as r: data = json.load(r) data[ticker_symbol] = { "date_list": date_list, "balance_list": balance_list, "balance_col_list": balance_col_list, "earnings_list": earnings_list, "financial_quarter_list": financial_quarter_list, } r.seek(0) # reset file position to the beginning. r.truncate() json.dump(data, r, indent=4) print("Financial Data for {} completed".format(ticker_symbol))