def getData(startDate, endDate, period): tickerSymbols = [] sp500 = finsymbols.get_sp500_symbols() nasdaq = finsymbols.get_nasdaq_symbols() amex = finsymbols.get_amex_symbols() nyse = finsymbols.get_nyse_symbols() for companies in amex: tickerSymbols.append(companies.get("symbol")) print tickerSymbols return # start a period before the startdate to obtain the data print startDate, " ", endDate startDate = startDate - timedelta(days=period) print startDate data = {} for company in tickerSymbols: print company c = Share(company) histData = c.get_historical(str(startDate), str(endDate)) histData = histData[::-1] # reverse data so its easier to use data[company] = histData addTools(data, period) with open("Data/Amex_data.json", "w") as fp: json.dump(data, fp, indent=2) return data
def get_all_symbols(): sp500 = finsymbols.get_sp500_symbols() df_sp500=pd.DataFrame(sp500) df_sp500['exchange']='SPY500' amex_symbols=finsymbols.get_amex_symbols() df_amex=pd.DataFrame(amex_symbols) df_amex['exchange']='AMEX' nyse_symbols=finsymbols.get_nyse_symbols() df_nyse=pd.DataFrame(nyse_symbols) df_nyse['exchange']='NYSE' nasdaq_symbols=finsymbols.get_nasdaq_symbols() df_nasdaq=pd.DataFrame(nasdaq_symbols) df_nasdaq['exchange']='NASDAQ' result=pd.concat([df_sp500,df_amex,df_nyse,df_nasdaq]) dbdao.save_dataframe(result, "df_symbol_new")
def make_spy(): date = (datetime.datetime.today()- datetime.timedelta(hours = 6)).strftime('%Y-%m-%d') sp500 = finsymbols.get_sp500_symbols() dic = {} for i in sp500: i = i['symbol'] try: tech = Tech(i,time = "daily") dic[i] = {'signal':tech.signals(), "RSI":tech.rsi(), "Stochastic":tech.slow_stoch()} print ("%s successful"%i) except Exception as e: print (e) print ("%s unsuccessful"%i) try: con = lite.connect('/home/yaschaffel/mysite/ALERT_DATA_HISTORY.db') except: con = lite.connect('ALERT_DATA_HISTORY.db') cur = con.cursor() cur.execute("CREATE TABLE IF NOT EXISTS SPY_HISTORY(DAY DATE,NAME TEXT,SIGNAL INT,RSI INT,SLOWK INT,SLOWD INT)") for i in dic: cur.execute("INSERT INTO SPY_HISTORY(DAY,NAME,SIGNAL,RSI,SLOWK,SLOWD) VALUES (?,?,?,?,?,?)",(date,i,dic[i]['signal'],dic[i]['RSI'],dic[i]['Stochastic'][0],dic[i]['Stochastic'][0])) con.commit() con.close()
def generate_eod_tasks(): ''' Task responsible for generating work items used to obtain end of day data for stocks using get_eod_data() task ''' db = MongoDBUtil() symbol_sets = set() #Gets all symbols sp500 = finsymbols.get_sp500_symbols() amex = finsymbols.get_amex_symbols() nyse = finsymbols.get_nyse_symbols() nasdaq = finsymbols.get_nasdaq_symbols() #Adds all symbols to set which removes duplicates symbol_sets.update(_get_symbol_set(sp500)) symbol_sets.update(_get_symbol_set(amex)) symbol_sets.update(_get_symbol_set(nyse)) symbol_sets.update(_get_symbol_set(nasdaq)) now = datetime.datetime.now() end_date = '-'.join([str(now.year), str(now.month), str(now.day)]) his_symbols = db.has_historical_data(symbol_sets) if (len(his_symbols) >= 1): start_date = '1980-01-01' hist_job = group( get_eod_data.s(symbol, start_date, end_date) for symbol in symbol_sets) hist_job.apply_async() # Obtain data for current date job = group( get_eod_data.s(symbol, end_date, end_date) for symbol in symbol_sets) job.apply_async()
def get_stock_symbols(): nyse=[company['symbol'] for company in ss.get_nyse_symbols() if '$' not in company['symbol']] amex=[company['symbol'] for company in ss.get_amex_symbols() if '$' not in company['symbol']] ndaq=[company['symbol'] for company in ss.get_nasdaq_symbols() if '$' not in company['symbol']] sp500=[company['symbol'] for company in ss.get_sp500_symbols() if '$' not in company['symbol']] tsx=get_tsx_companies() return sp500,amex,nyse,ndaq,tsx
def generate_eod_tasks(): ''' Task responsible for generating work items used to obtain end of day data for stocks using get_eod_data() task ''' db = MongoDBUtil() symbol_sets = set() #Gets all symbols sp500 = finsymbols.get_sp500_symbols() amex = finsymbols.get_amex_symbols() nyse = finsymbols.get_nyse_symbols() nasdaq = finsymbols.get_nasdaq_symbols() #Adds all symbols to set which removes duplicates symbol_sets.update(_get_symbol_set(sp500)) symbol_sets.update(_get_symbol_set(amex)) symbol_sets.update(_get_symbol_set(nyse)) symbol_sets.update(_get_symbol_set(nasdaq)) now = datetime.datetime.now() end_date = '-'.join([str(now.year),str(now.month),str(now.day)]) his_symbols = db.has_historical_data(symbol_sets) if(len(his_symbols) >= 1): start_date = '1980-01-01' hist_job = group(get_eod_data.s(symbol,start_date,end_date) for symbol in symbol_sets) hist_job.apply_async() # Obtain data for current date job = group(get_eod_data.s(symbol,end_date,end_date) for symbol in symbol_sets) job.apply_async()
def get_sp500_str(): sp500 = finsymbols.get_sp500_symbols() sp500_str = "" for co in sp500: sp500_str += co['symbol'] sp500_str += " " return sp500_str.strip()
def seed(): """ This will seed the dbs with everything we need """ sp500 = get_sp500_symbols() symbols = [s['symbol'] for s in sp500] tempodb = TempoDB() tempodb_mapping = tempodb.get_mapping(symbols) for co in sp500: logger.info('Seeding: ' + co['symbol']) company = Company.objects.create( name=clean_str(co['company']), symbol=clean_str(co['symbol']), hq=clean_str(co['headquarters']), industry=clean_str(co['industry']), sector=clean_str(co['sector']), tempodb=tempodb_mapping[co['symbol']] ) company.update_prices.delay() company.save() return
def compileSPY(): totaltime = datetime.now()-datetime.now() count = 0 driver = MarketWatch() SPY = finsymbols.get_sp500_symbols() for ticker in SPY: # Time testing start=datetime.now() company_name = str(ticker['company'].decode("utf8")) symbol = str(ticker['symbol'].decode("utf8")) current_price = driver.get_price(symbol) current_marketcap = driver.get_marketcap(symbol) firm = Company(company_name,symbol,current_price,current_marketcap) # Time testing endtime = datetime.now()-start totaltime += endtime count+=1 print(firm.name,",",firm.ticker,",",firm.price,",",firm.marketcap,",",endtime) print("=== runtime ===") print("Total:",totaltime) print("Avg:",totaltime/count)
def getSP500Symbols(self): if len(self.sp500symbols) == 0: dict1 = finsymbols.get_sp500_symbols() self.sp500symbols = [] for element in dict1: self.sp500symbols.append(element["symbol"]) return self.sp500symbols else: return self.sp500symbols
def download_sp500_data(): sp500 = finsymbols.get_sp500_symbols() data = [] for stock in sp500: if 'symbol' in stock: data.append((get_data(stock['symbol']), stock['symbol'])) return data
def finsymbol(): """ Pull S&P500 symbol list using finsymbol package. This method is not recommended because of stability issue. :return: S&P500 list """ import finsymbols stock_list = [] stock_info = finsymbols.get_sp500_symbols() for i in range(len(stock_info)): stock_list.append(stock_info[i]['symbol']) return stock_list
def initialize(last_x, date): key = os.getenv("QUANDL_API_KEY") if not key: raise Exception("Must input a Quandl API key as QUANDL_API_KEY") quandl.ApiConfig.api_key = key sp500_data = get_sym_data("INDEX_GSPC") sp500_syms = map(lambda x: x['symbol'], get_sp500_symbols()) rows = [] for sym in sp500_syms: row = get_company_key_data_by_last_x_and_date(sym, 100, sp500_data, date) if row: rows.append(row) return rows
def load_sp500_data(): sp500 = finsymbols.get_sp500_symbols() spData = [] for stock in sp500: if 'symbol' in stock: symbol = stock['symbol'] symbolData = (load_data(symbol), symbol) if symbolData is None: spData.append((get_data(symbol), symbol)) else: spData.append(symbolData) return spData
def obtain_parse_snp500(): """Obtain and parse all symbols from S&P 500. Returns a list of tuples for to add to MySQL.""" # Stores the current time, for the created_at record now = datetime.datetime.utcnow() # Use finsymbols to download stock symbols, industry and S&P 500 data sp500=finsymbols.get_sp500_symbols() # Obtain the symbol information for each row in the S&P500 constituent table symbols = [] for index, symbol in enumerate(sp500): sd = {'ticker': sp500[index]['symbol'], 'name': sp500[index]['company'], 'sector': sp500[index]['industry']} # Create a tuple (for the DB format) and append to the grand list symbols.append( (sd['ticker'], 'stock', sd['name'], sd['sector'], 'USD', now, now) ) return symbols
def obtain_parse_snp500(): """Obtain and parse all symbols from S&P 500. Returns a list of tuples for to add to MySQL.""" # Stores the current time, for the created_at record now = datetime.datetime.utcnow() # Use finsymbols to download stock symbols, industry and S&P 500 data sp500 = finsymbols.get_sp500_symbols() # Obtain the symbol information for each row in the S&P500 constituent table symbols = [] for index, symbol in enumerate(sp500): sd = { 'ticker': sp500[index]['symbol'], 'name': sp500[index]['company'], 'sector': sp500[index]['industry'] } # Create a tuple (for the DB format) and append to the grand list symbols.append( (sd['ticker'], 'stock', sd['name'], sd['sector'], 'USD', now, now)) return symbols
def main(): # cum_label = 0 # cum_non_label = 0 # # result = [[140308, 2417065], [316270, 1792759], [476341, 1422235], [663469, 1221244], [833018, 999542], [1055281, 841267], [1351301, 711644], [1674877, 558758], [2157544, 392063], [1833046, 140792]] # result = [[61261, 1037613], [136272, 766345], [206275, 609507], [285135, 523400], [358020, 428905], [453139, 360710], [578466, 304938], [716058, 240122], [922954, 169132], [783448, 60819]] # l1 = map(lambda x : x[0], result) # l2 = map(lambda x: x[1], result) # # total_lables = sum(l1) # total_non_labels = sum(l2) # print "total_labels", total_lables, " total_non_labels ", total_non_labels # max_diff = 0.0 # for j in result: # cum_label += j[0] # cum_non_label += j[1] # diff = np.abs(100.0 * float(cum_label) / total_lables - 100.0 * float(cum_non_label) / total_non_labels) # print "cum_lables, " , j[0], ",", j[1], ",", cum_label, ",", cum_non_label, ",", 100.0 * float(j[0])/total_lables, ",", 100.0 * float(j[1])/total_lables, ",", 100.0 * float(cum_label) / total_lables, ",", 100.0 * float(cum_non_label) / total_non_labels # if diff > max_diff: # max_diff = diff start_time = datetime.datetime(1990, 10, 1) end_time = datetime.datetime(2016, 10, 8) # all_stocks = finsymbols.get_sp500_symbols() all_stocks = finsymbols.get_sp500_symbols() all_stocks.append(finsymbols.get_nasdaq_symbols()) all_stocks.append(finsymbols.get_nyse_symbols()) logger.info("starting %s ", datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")) num_rounds = 5 for j in range(num_rounds): print 'start_round\t', j logger.info('start round %s', j) run_round(all_stocks, start_time, end_time, 1) print 'end_round\t', j print "the one end"
def generate_eod_tasks(): ''' Task responsible for generating work items used to obtain end of day data for stocks using get_eod_data() task ''' symbol_sets = Set() #Gets all symbols sp500 = finsymbols.get_sp500_symbols() #amex = finsymbols.get_amex_symbols() #nyse = finsymbols.get_nyse_symbols() #nasdaq = finsymbols.get_nasdaq_symbols() #Adds all symbols to set which removes duplicates symbol_sets.update(get_symbol_set(sp500)) #symbol_sets.update(get_symbol_set(amex)) #symbol_sets.update(get_symbol_set(nyse)) #symbol_sets.update(get_symbol_set(nasdaq)) job = group(get_eod_data.s(symbol) for symbol in symbol_sets) job.apply_async() load_mongo.done()
def generate_eod_tasks(): """ Task responsible for generating work items used to obtain end of day data for stocks using get_eod_data() task """ symbol_sets = Set() # Gets all symbols sp500 = finsymbols.get_sp500_symbols() # amex = finsymbols.get_amex_symbols() # nyse = finsymbols.get_nyse_symbols() # nasdaq = finsymbols.get_nasdaq_symbols() # Adds all symbols to set which removes duplicates symbol_sets.update(get_symbol_set(sp500)) # symbol_sets.update(get_symbol_set(amex)) # symbol_sets.update(get_symbol_set(nyse)) # symbol_sets.update(get_symbol_set(nasdaq)) job = group(get_eod_data.s(symbol) for symbol in symbol_sets) job.apply_async() load_mongo.done()
def get_sp500_stocks(): return finsymbols.get_sp500_symbols()
import finsymbols sp500 = finsymbols.get_sp500_symbols()
def populate_sp500_q(): sp500 = finsymbols.get_sp500_symbols() for co in sp500: sp500_q.put(co['symbol'])
except OSError, e: if e.errno != errno.EEXIST: raise try: os.makedirs("saved/") except OSError, e: if e.errno != errno.EEXIST: raise stock_ex = raw_input("Which stock market would you like to screen: ") repull_data = raw_input("Pull stock tickers names(y/n): ") if "y" in repull_data: if "nyse" in stock_ex: stock_list = fin.get_nyse_symbols() if "sp500" in stock_ex: stock_list = fin.get_sp500_symbols() if "nasdaq" in stock_ex: stock_list = fin.get_nasdaq_symbols() if "amex" in stock_ex: stock_list = fin.get_amex_symbols() ticker_list = [] for stock_n in stock_list: ticker_list.append(str(stock_n["symbol"])) target = open(stock_ex + "_list.txt", "w") for ticker in ticker_list: target.write(ticker + "\n") target.close() if "n" in repull_data: ticker_list = [] target = open(stock_ex + "_list.txt", "r") pull_list = target.readlines()
# Give me a list of symbols in the S&P500 that are down more than 10% in one day. # # Joey <*****@*****.**> # pip install yahoo-finance # pip install finsymbols from finsymbols import get_sp500_symbols from yahoo_finance import Share sp500 = get_sp500_symbols() for d in sp500: symbol = d['symbol'] #print "Checking: %s" % symbol stockblob = Share(symbol) close = stockblob.get_prev_close() close = float(close) change = stockblob.get_change() change = float(change) if change < 0: # Negative number (stock is down) change = abs(change) percent = (change / close) * 100 if percent > 10: # Down more than 10%, looks interesting. print "%s is down %s" % (symbol, percent)
str_start_date = '2015-12-31' str_end_date = time.strftime('%Y-%m-%d') print 'Retrieving valid trading days from quotes up until ' + str_start_date + ' to ' + str_end_date # Retrieve list of valid trading days between start and end dates date_list = gtc.get_trading_days(str_start_date, str_end_date) print 'Expecting to find quotes for ' + str( len(date_list )) + ' trading days from ' + str_start_date + ' to ' + str_end_date print 'Retrieving list of S&P500 Constituents' # Retrieve list of S&P500 constituents sp500 = finsymbols.get_sp500_symbols() print 'Retrieved ' + str(len(sp500)) + ' constituents' # Login to alternate data source in case it is needed login_url = 'http://api.kibot.com/?action=login&user=guest&password=guest' request = Request(login_url) response = urlopen(request) print 'Retrieving YTD Returns for Constituents' progress = 0.0 # for j in range(0, 50): # entry = sp500[j] for entry in sp500: progress += 1
import Quandl import numpy as np import matplotlib.pyplot as plt import pandas as pd import finsymbols as fin def smooth(x, N): cumsum = np.cumsum(x) return (cumsum[N:] - cumsum[:-N]) / N sp500 = fin.get_sp500_symbols() symbols = [info['symbol'] for info in sp500] authtoken = 'axGKor2CVwTbMk45mzzA' stock_data = Quandl.get('WIKI/'+symbols[0], authtoken=authtoken) days = np.array([(date - stock_data.index[0]).days for date in stock_data.index ]) adj_close = stock_data['Adj. Close'].values N = 365 adj_close_avg = smooth(adj_close, N) #plot(days[N:], adj_close_avg, days[N:], adj_close[N:]) sigma = std(adj_close[N:] - adj_close_avg) greater = (adj_close[N:] - adj_close_avg) > 2*sigma less = (adj_close[N:] - adj_close_avg) < -2*sigma other = np.logical_not(greater + less) plt.style.use('ggplot') fig, ax = plt.subplots() ax.set_title('Adjusted closing prices of ' + symbols[0]) ax.set_xlabel('Days since first recorded price') ax.set_ylabel('Dollars')
def fetch_company_info(self): self.company_objects = finsymbols.get_sp500_symbols()