def find_options(): tickers = gt.get_tickers_filtered(mktcap_min=mkmin, mktcap_max= mkmax) # Holds of the API Calls that have been executed amt = 0 Stock_Failure = 0 Stocks_Not_Imprted = 0 collection = {} # Iterator i = 0 while(i < len(tickers)) and (amt < 1800): try: stock = tickers[i] collection[str(stock)] = len(options_in_limit(str(stock))) time.sleep(2) amt+=1 i+=1 except ValueError: print("Yahoo Finance Backend Error, Fixing...") if Stock_Failure > 5: i+=1 Stocks_Not_Imprted += 1 amt+=1 Stock_Failure+=1 print("Amount of stocks successfully imported: " + str(i-Stocks_Not_Imprted)) return collection
def get_filtered_tickers_industry(sect='Technology' ,mc_min=400): ##get tickers filtered by sector: Technology Finance ,Miscellaneous, consumer goods, Utilities Telecommunications.. try: ft = gt.get_tickers_filtered(mktcap_min=400, sectors=sect) return ft except: ##library failed - due to anti-scraping stock-exchange return None
def get_filtered_tickers_mktcap(mc_min=500, mc_max=2000): ##get tickers filtered by market cap (in millions). Mid-cap=.5-2billion try: ft = gt.get_tickers_filtered(mktcap_min=mc_min, mktcap_max=mc_max) return ft except: ##library failed - due to anti-scraping stock-exchange return None
def get_all_stocks_by_sector(sector): from get_all_tickers import get_tickers stocks = get_tickers.get_tickers_filtered(sectors=sector) print(f"LISTA DO SETOR: {sector}") for s in stocks: print(f"No setor {sector} temos a stock {s}")
def get_sectors(): """ Get sector tickers """ for sector in Sectors: tickers = [] tickers = gt.get_tickers_filtered(mktcap_min=500, mktcap_max=2000, sectors = sector.value) df = get_stock_details(tickers) df.to_csv("reports/" + sector.name + ".csv", index = False) print(sector.name) print(df.to_string(index = False))
# Necessary Libraries import yfinance as yf, pandas as pd, shutil, os, time, glob, smtplib, ssl from get_all_tickers import get_tickers as gt # List of the stocks we are interested in analyzing. At the time of writing this, it narrows the list of stocks down to 44. # If you have a list of your own you would like to use just create a new list instead of using this, for example: tickers = ["FB", "AMZN", ...] tickers = gt.get_tickers_filtered(mktcap_min=150000, mktcap_max=10000000) # Check that the amount of tickers isn't more than 2000 print("The amount of stocks chosen to observe: " + str(len(tickers))) # These two lines remove the Stocks folder and then recreate it in order to remove old stocks. shutil.rmtree("<Your Path>\\Daily_Stock_Report\\Stocks\\") os.mkdir("<Your Path>\\Daily_Stock_Report\\Stocks\\") # Holds the amount of API calls we executed Amount_of_API_Calls = 0 # This while loop is reponsible for storing the historical data for each ticker in our list. Note that yahoo finance sometimes incurs json.decode errors and because of this we are sleeping for 2 # seconds after each iteration, also if a call fails we are going to try to execute it again. # Also, do not make more than 2,000 calls per hour or 48,000 calls per day or Yahoo Finance may block your IP. The clause "(Amount_of_API_Calls < 1800)" below will stop the loop from making # too many calls to the yfinance API. # Prepare for this loop to take some time. It is pausing for 2 seconds after importing each stock. # Used to make sure we don't waste too many API calls on one Stock ticker that could be having issues Stock_Failure = 0 Stocks_Not_Imported = 0 # Used to iterate through our list of tickers i=0 while (i < len(tickers)) and (Amount_of_API_Calls < 1800):
from main.models import Ticker import praw from psaw import PushshiftAPI import datetime as dt from datetime import datetime, timedelta import pytz import re import sys sys.path.insert(0, 'vaderSentiment-master/vaderSentiment') from vaderSentiment import SentimentIntensityAnalyzer sys.path.insert(1, 'get_all_tickers-master/') from get_all_tickers import get_tickers as gt from alpha_vantage.timeseries import TimeSeries from json import dumps list_of_tickers = gt.get_tickers_filtered(mktcap_min=1000) lookup = set(list_of_tickers) ticker_dict = {} reddit = praw.Reddit( # YOUR INFO ) api = PushshiftAPI(reddit) blacklist_words = [ "YOLO", "TOS", "CEO", "CFO", "CTO", "DD", "BRO", "ARE", "BTFD", "WSB", "OK", "RH", "KYS", "FD", "TYS", "US", "USA", "IT", "ATH", "RIP", "BMW", "GDP", "OTM", "ATM", "ITM", "IMO", "LOL", "DOJ", "BE", "HAS", "PR", "PC", "ICE", "TYS", "ISIS", "PRAY", "PT", "FBI", "SEC", "GOD", "NOT", "POS", "COD", "AYYMD", "FOMO", "TL;DR", "EDIT", "STILL", "LGMA", "WTF", "RAW",
def get_min_mkt_cap(mkt_cap=1000): tickers = get_tickers.get_tickers_filtered(mktcap_min=mkt_cap) return sorted(tickers)