def LoadData(): connectionCalv2 = SQL_CURSOR('earningsCalendarForTraining.db') earningsInfoSaved = pd.read_sql('SELECT * FROM earningsInfo', connectionCalv2) return earningsInfoSaved
import os, sys import datetime import pandas as pd import base as b import math from ReadData import is_date, SQL_CURSOR, UpdateTable, GetTimeSlot sqlcursor = SQL_CURSOR(db_name='stocksShort.db') sqlcursorExtra = SQL_CURSOR(db_name='stocksShortExtra.db') expectedKeys = [ 'Index', 'Market Cap', 'Income', 'Sales', 'Book/sh', 'Cash/sh', 'Dividend', 'Dividend %', 'Employees', 'Optionable', 'Shortable', 'Recom', 'P/E', 'Forward P/E', 'PEG', 'P/S', 'P/B', 'P/C', 'P/FCF', 'Quick Ratio', 'Current Ratio', 'Debt/Eq', 'LT Debt/Eq', 'SMA20', 'EPS (ttm)', 'EPS next Y', 'EPS next Q', 'EPS this Y', 'EPS next 5Y', 'EPS past 5Y', 'Sales past 5Y', 'Sales Q/Q', 'EPS Q/Q', 'Earnings', 'SMA50', 'Insider Own', 'Insider Trans', 'Inst Own', 'Inst Trans', 'ROA', 'ROE', 'ROI', 'Gross Margin', 'Oper. Margin', 'Profit Margin', 'Payout', 'SMA200', 'Shs Outstand', 'Shs Float', 'Short Float', 'Short Ratio', 'Target Price', '52W Range', '52W High', '52W Low', 'RSI (14)', 'Rel Volume', 'Avg Volume', 'Volume', 'Perf Week', 'Perf Month', 'Perf Quarter', 'Perf Half Y', 'Perf Year', 'Perf YTD', 'Beta', 'ATR', 'Volatility', 'Prev Close', 'Price', 'Change' ] stock_list = [] today = datetime.date.today() todayDateTime = datetime.datetime.now() all_stocks = b.stock_list + b.etfs #all_stocks=[['UN',0,0,'NASDAQ',],['X',0,0,'NYSE']] for iin in all_stocks:
api = ALPACA_REST() ts = ALPHA_TIMESERIES() ticker = 'SPY' #ticker='TLT' #ticker='QQQ' #ticker='GLD' #ticker='HAL' #ticker='GUSH' #ticker='AVCT' #ticker='RZV' spy = runTicker(api, ticker) stock_info = None spy = None sqlcursor = SQL_CURSOR() readType = 'full' spy, j = ConfigTable(ticker, sqlcursor, ts, readType) print('spy') print(spy) # add info if len(spy) == 0: print('ERROR - empy info %s' % ticker) spy['daily_return'] = spy['adj_close'].pct_change(periods=1) + 1 spy['openclosepct'] = (spy.close - spy.open) / spy.open + 1 spy['closeopenpct'] = (spy.open - spy.shift(1).close) / spy.shift(1).close + 1 spy['afterhourspct'] = (spy.shift(-1).open - spy.close) / spy.close + 1 spy['year'] = spy.index.year
yname=j, saveName='earningDiff', hlines=[], title='earningDiff') MakePlot(merged_stock_earn['e_over_p_diff'], merged_stock_earn[j], xname='e_over_p_diff', yname=j, saveName='e_over_p_diff', hlines=[], title='e_over_p_diff') return merged_stock_earn # collecting spy sqlcursor = SQL_CURSOR() ts = ALPHA_TIMESERIES() j = 0 # reading in the spy data spy, j = ConfigTable('SPY', sqlcursor, ts, readType, hoursdelay=2) AddInfo(spy, spy, debug=debug) # processing new earnings connectionCal = SQL_CURSOR('earningsCalendar.db') connectionCalv2 = SQL_CURSOR('earningsCalendarForTraining.db') fd = ALPHA_FundamentalData() my_3month_calendar = GetUpcomingEarnings(fd, ReDownload) print(my_3month_calendar) it = 0 preLoaded = []
except (pd.io.sql.DatabaseError, KeyError): print('ERROR collecting earnings history for %s' % ticker) pass if debug: print(stockInfoQuarter) print(stockInfoQuarter.dtypes) print(stockInfoAnnual) print(stockInfoAnnual.dtypes) print(company_overview) print(company_overview.dtypes) return stockInfoQuarter, stockInfoAnnual, company_overview if __name__ == "__main__": # execute only if run as a script connectionCal = SQL_CURSOR('earningsCalendar.db') fd = ALPHA_FundamentalData() sqlcursor = SQL_CURSOR() ts = ALPHA_TIMESERIES() api = ALPACA_REST() ticker = 'MPC' ind_map = {} sec_map = {} doEarnings = True #stockInfoQuarter,stockInfoAnnual,company_overview=CollectEarnings(ticker,connectionCal) readType = 'full' j = 0 data_points = [ 'day_return', 'day2_return', 'day3_return', 'day4_return', 'day5_return', 'day15_return', '30d_return', '60d_return', '180d_return', 'volatitilty', '5d_vol', '30d_vol', '180d_vol'
total_table=[] for i in range(0,406): #for i in range(0,2): print(i) URL = 'https://finviz.com/screener.ashx?v=111\&o=industry\&r=%s' %(1+i*20) filename_rec='/tmp/cc%i.html' %i if not os.path.exists(filename_rec): os.system('wget -T 30 -q -O %s %s' %(filename_rec,URL)) table_MN = pd.read_html(filename_rec) for t in table_MN: #print(t.columns) if len(t)>0 and 4 in t.columns and t.loc[0,3]=='Sector': t.columns = t.loc[0].tolist() t=t.drop(0) t=t.drop('No.',axis=1) print(t.dtypes) print(t) t.Change = pd.to_numeric(t.Change.str.rstrip('%'),errors='coerce') / 100.0 for a in ['Market Cap','P/E','Price','Volume']: t[a] = pd.to_numeric(t[a].str.strip('%').replace({'K': 'e3', 'M': 'e6','B': 'e9','T': 'e12'}, regex=True),errors='coerce',downcast='integer') print(t) if len(total_table)==0: total_table = t else: total_table = pd.concat([total_table,t]) print(total_table.dtypes) sqlcursorFull = SQL_CURSOR(db_name='sectorInfo.db') total_table.to_sql('sectors', sqlcursorFull, index=False,if_exists='append') #https://finviz.com/screener.ashx?v=111&f=sec_basicmaterials&r=221
outlist[(g['labelSupportResistance'] + g['labelTurningPoints']).replace( ' ', '_').replace('%', 'perc')] = None #print(outlist) df = pd.DataFrame([outlist]) df['Date'] = datetime.datetime.strftime(yesterday, '%Y-%m-%d') df['Date'] = pd.to_datetime(df['Date']) df = df.set_index('Date') print(df) LoadData(df, sqlcursorExtra, tableName=tableName) return outlist if __name__ == "__main__": # execute only if run as a script sqlcursor = SQL_CURSOR(db_name='stocksPerfHistory.db') #MMOH -> NYSE #200 dma S&P 500 total_table_top_gain = collect( sqlcursor, URLin='https://www.barchart.com/stocks/quotes/\$S5TH/cheat-sheet', tableName='SPY200MA') #50 dma S&P 500, S5FI total_table_top_gain = collect( sqlcursor, URLin='https://www.barchart.com/stocks/quotes/\$S5FI/cheat-sheet', tableName='SPY50MA') #100 dma S&P 500, S5OH total_table_top_gain = collect( sqlcursor,
from ReadData import ALPACA_REST,runTicker,ConfigTable,ALPHA_TIMESERIES,GetTimeSlot,SQL_CURSOR import sys import sqlite3 ts = ALPHA_TIMESERIES() sqlcursor = SQL_CURSOR() sc = sqlcursor.cursor() doClean=False doReload=False ticker='DUG' daily_prices,j = ConfigTable(ticker, sqlcursor,ts,'full',hoursdelay=18) daily_prices_365d = GetTimeSlot(daily_prices, days=365) split_dates = daily_prices_365d[daily_prices_365d.splitcoef!=1.0] if len(split_dates)>0: print(split_dates) #print(daily_prices.to_string()) #sc.execute('DROP TABLE DUG') #list_of_tables = table_names = sc.execute("SELECT name from sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';").fetchall() print("tables: %s" %len(table_names)) #splitID = sc.execute('SELECT COUNT(splitcoef) from DUG WHERE splitcoef!=1.0 AND Date>2021-01-23').fetchall()[0][0] splitID = sc.execute("SELECT COUNT(splitcoef) from SPY WHERE splitcoef!=1.0 AND Date>'2021-01-23'").fetchall()[0][0] #splitID = sc.execute("SELECT * from DUG WHERE splitcoef!=1.0 AND Date>'2021-01-23'").fetchall()[0][0] splitIDl = sc.execute("SELECT * from DUG WHERE splitcoef!=1.0 AND Date>'2021-01-23'").fetchall()[0] print(splitID) print(splitIDl) # create a list to reload
def GenerateSignal(ticker, out_file_name='out_bull_instructions.csv', price_targets=[]): connectionCal = SQL_CURSOR('earningsCalendarv2.db') fd = ALPHA_FundamentalData() sqlcursor = SQL_CURSOR() ts = ALPHA_TIMESERIES() api = ALPACA_REST() stockInfoQuarter, stockInfoAnnual, company_overview = CollectEarnings( ticker, connectionCal) # annual balance sheet balance_sheet_annual = fd.get_balance_sheet_annual(ticker)[0] balance_sheet_annual['fiscalDateEnding'] = pd.to_datetime( balance_sheet_annual['fiscalDateEnding'], errors='coerce') for d in balance_sheet_annual.columns: if d not in ['fiscalDateEnding', 'totalAssets']: balance_sheet_annual[d] = pd.to_numeric(balance_sheet_annual[d], errors='coerce') # quarterly income statement income_statement_quarterly = fd.get_income_statement_quarterly(ticker)[0] for d in income_statement_quarterly.columns: if d not in ['fiscalDateEnding', 'reportedCurrency']: income_statement_quarterly[d] = pd.to_numeric( income_statement_quarterly[d], errors='coerce') for d in ['fiscalDateEnding']: income_statement_quarterly[d] = pd.to_datetime( income_statement_quarterly[d], errors='coerce') if debug: print(income_statement_quarterly) print(income_statement_quarterly.dtypes) tstock_info, j = ConfigTable(ticker, sqlcursor, ts, 'full') spy, j = ConfigTable('SPY', sqlcursor, ts, 'compact') #print(spy.columns) est = pytz.timezone('US/Eastern') today = datetime.now(tz=est) + maindatetime.timedelta(minutes=-40) #today = datetime.utcnow() + maindatetime.timedelta(minutes=-30) d1 = today.strftime("%Y-%m-%dT%H:%M:%S-04:00") five_days = ( today + maindatetime.timedelta(days=-7)).strftime("%Y-%m-%dT%H:%M:%S-04:00") minute_prices = [] ntry = 0 while ntry < 3: try: minute_prices = runTicker(api, ticker, timeframe=TimeFrame.Minute, start=five_days, end=d1) break except (requests.exceptions.ConnectionError): ntry += 1 # may want to restrict to NYSE open times try: spy = AddInfo(spy, spy) tstock_info = AddInfo(tstock_info, spy, AddSupport=True) except (ValueError, KeyError): print('Error processing adding info %s' % ticker) recent_quotes = getQuotes(api, ticker) if debug: print(tstock_info[[ 'adj_close', 'sma20', 'sma20cen', 'vwap10cen', 'vwap10' ]][50:-10]) earn = Earnings(ticker, income_statement_quarterly, company_overview, balance_sheet_annual, stockInfoQuarter, stockInfoAnnual, tstock_info, minute_prices, recent_quotes) #earn.BuildPDF() earn.WriteCSV(out_file_name, price_targets)
recent_quotes = getQuotes(api, ticker) if debug: print(tstock_info[[ 'adj_close', 'sma20', 'sma20cen', 'vwap10cen', 'vwap10' ]][50:-10]) earn = Earnings(ticker, income_statement_quarterly, company_overview, balance_sheet_annual, stockInfoQuarter, stockInfoAnnual, tstock_info, minute_prices, recent_quotes) #earn.BuildPDF() earn.WriteCSV(out_file_name, price_targets) if __name__ == "__main__": # execute only if run as a script connectionCal = SQL_CURSOR('earningsCalendarv2.db') fd = ALPHA_FundamentalData() sqlcursor = SQL_CURSOR() ts = ALPHA_TIMESERIES() api = ALPACA_REST() ticker = 'MPC' ticker = 'RIOT' ticker = 'X' ticker = 'HZO' ticker = 'WOOF' #ticker='GOOGL' #ticker='F' stockInfoQuarter, stockInfoAnnual, company_overview = CollectEarnings( ticker, connectionCal) # annual balance sheet
from ReadData import SQL_CURSOR import sqlite3 s = SQL_CURSOR() sc = s.cursor() table_names = sc.execute( "SELECT name from sqlite_master WHERE type ='table' AND name NOT LIKE 'sqlite_%';" ).fetchall() for tname in table_names: #print(tname[0]) if tname[0].count('-'): continue try: #print(sc.execute('SELECT MIN(rowid) from SPY GROUP BY Date').fetchall()) distin = sc.execute('SELECT COUNT(DISTINCT Date) from %s' % tname[0]).fetchall()[0][0] allD = sc.execute('SELECT COUNT(Date) from %s' % tname[0]).fetchall()[0][0] if abs(allD - distin) > 0: print(distin, allD, tname[0]) #print(sc.execute('SELECT Date from %s' %tname[0]).fetchall()) print( 'DELETE FROM %s WHERE rowid NOT IN ( SELECT MIN(rowid) from %s GROUP BY Date)' % (tname[0], tname[0])) #sc.execute('DELETE FROM %s WHERE rowid NOT IN ( SELECT MIN(rowid) from %s GROUP BY Date)' %(tname[0],tname[0])) sc.execute('DROP TABLE %s' % (tname[0])) #print(sc.execute('SELECT COUNT( Date) from %s' %tname[0]).fetchall()[0][0]) except sqlite3.OperationalError: print('Could not load!') sc.close() #DELETE FROM lipo
import pickle import base as b import time from scipy.stats.stats import pearsonr import matplotlib.pyplot as plt debug = False draw = False outdir = b.outdir doStocks = True loadFromPickle = False loadSQL = True readType = 'full' import zigzag from zigzag import * sqlcursorShort = SQL_CURSOR(db_name='stocksShort.db') sqlcursorExtra = SQL_CURSOR(db_name='stocksShortExtra.db') def readShortInfo(ticker): stock = None try: stock = pd.read_sql('SELECT * FROM %s' % ticker, sqlcursorExtra) #,index_col='Date') stock['LogDate'] = pd.to_datetime(stock.LogDate.astype(str), format='%Y-%m-%d') stock['LogDate'] = pd.to_datetime(stock['LogDate']) stock = stock.set_index('LogDate') stock = stock.sort_index() entryShort = 0 if len(stock) > 0:
from Earnings import GetIncomeStatement, GetPastEarnings, GetStockOverview, GetBalanceSheetQuarterly, GetBalanceSheetAnnual import base import plotly.graph_objects as go import plotly.figure_factory as ff from plotly.subplots import make_subplots import matplotlib.dates as mdates import pytz import datetime est = pytz.timezone('US/Eastern') api = ALPACA_REST() ts = ALPHA_TIMESERIES() STOCK_DB_PATH = os.getenv('STOCK_DB_PATH') sqlcursor = SQL_CURSOR('%s/stocksAV.db' % STOCK_DB_PATH) matplotlib.use("agg") sns.set_style('darkgrid') _lock = RendererAgg.lock def clear_form(): st.session_state["tickerKey"] = "Select" def FitWithBand(my_index, arr_prices, doMarker=True, ticker='X',