def scrape(self): url = 'https://finance.yahoo.com/quote/' + self.symbol + '/balance-sheet?p=' + self.symbol table = scraper(self.symbol).__table__(url) try: table = pd.concat(table, sort=True).astype(float, errors='ignore') except: table = scraper(self.symbol).__general__(url) return table
def dividends(self): symbol = self.symbol start = format_date(self.start) end = format_date(self.end) url = "https://finance.yahoo.com/quote/" + symbol + "/history?period1=" + str(start) + "&period2="+ str(end) + "&interval=div%7Csplit&filter=div&frequency=1d" dividends = scraper(url).__table__() return dividends[0]
def dividends(self): url = "https://finance.yahoo.com/quote/" + self.symbol + "/history?interval=div%7Csplit&filter=div&frequency=1d" dividends = scraper(self.symbol).__table__(url) if len(dividends)>1: dividends = dividends.drop(4) dividends = dividends.set_index('Date') dividends = dividends['Dividends'] dividends = dividends.str.replace(r'\Dividend', '').astype(float) dividends.name = self.symbol return dividends
def history(self): start = int(time.mktime(datetime.strptime(self.start.strftime("%Y-%m-%d"), "%Y-%m-%d").timetuple())) end = int(time.mktime(datetime.strptime(self.end.strftime("%Y-%m-%d"), "%Y-%m-%d").timetuple())) url = 'https://finance.yahoo.com/quote/' + self.symbol + "/history?" + "period1=" + str(start) + "&period2=" + str(end) + "&interval=1d&filter=history&frequency=1d" history = scraper(self.symbol).__table__(url) if len(history)>0: history = pd.concat(history, sort=True).astype(float, errors='ignore') else: history = self.symbol, 'Error occurred in history method. Double check you entered the symbol correctly.' try: history = history.drop(len(history) - 1) history = history.set_index('Date') except: print(self.symbol, ': Error cleaning history dataframe. Is it the right symbol?') finally: return history
def search(): s = request.args.get('s') if s != None: s = " ".join(s.split("%20")) scrap = scraper(s) if 'email' in session: return render_template('search.html', title=s, output=scrap.products, sstring=s, msgs=request.args.get('msgs'), categories=cats, user=session['email']) else: return render_template('search.html', title=s, output=scrap.products, sstring=s, msgs=request.args.get('msgs'), categories=cats) else: return redirect('/index')
def scrape(self): url = 'https://finance.yahoo.com/quote/' + self.symbol + '/cash-flow?p=' + self.symbol table = scraper(self.symbol).__table__(url) table = pd.concat(table, sort=True).astype(float, errors='ignore') return table
def scrape(self): url = 'https://finance.yahoo.com/quote/' + self.symbol + '/analysis?p=' + self.symbol a = scraper(self.symbol).__table__(url) return a
def __init__(self, date=date.today()): self.date = date self.url = "https://ca.finance.yahoo.com/calendar/earnings?&day=" + str( self.date) self.calendar = scraper(self.url).__table__()
def scrape(self): '''set sector and description ''' url="https://finance.yahoo.com/quote/" + self.symbol + "/profile?p=" + self.symbol s = scraper(self.symbol).__general__(url) self.sector(s) self.description(s)