def test_BuildsIncomeStatementAddress(self): scraper = WSJdownloader(Storage("")) for ticker in tickers: scraper_address = scraper.get_address(ticker, "income") expected_address = annual_income_statement.replace( "[ticker]", ticker) self.assertEqual(scraper_address, expected_address)
def storeValuationSummaryBrief(tickers = None): store = Storage() if tickers is None: xls = XLSio(store) xls.loadWorkbook("StockSummary") xls.table = xls.table[xls.table["P/E Ratio (TTM)"].notnull()] tickers = xls.getTickers() summary = {} count = 1.0 errors = {} print("Assessing " + str(len(tickers)) + " companies") for ticker in tickers: try: reporter = Reporter(ticker) summary[ticker] = reporter.oneLineValuation() except MissingStatementEntryError as E: errors[ticker] = E.message except InsufficientDataError as E: errors[ticker] = E.message except Exception as E: errors[ticker] = E.message if count % max(len(tickers) / 4, 1) == 0: pct_complete = round(100.0 * count / len(tickers)) print(str(pct_complete) + "% complete") count += 1 index = summary.items()[0][1].index summary = pandas.DataFrame(summary, index = index).T summary.to_excel(store.excel("ValuationSummary")) print(str(len(errors)) + " Failed") print(str(len(summary)) + " Succeeded") return errors
def setUp(self): self.existing_tickers = ["1PG", "ONT", "1ST", "TGP", "TIX"] self.existing_headings = [ "Company name", "ASX code", "GICS industry group" ] self.xls = XLSio(Storage("..\\testData\\")) self.xls.loadWorkbook("ASXListedCompanies")
def setUp(self): self.filename = "test_file" store = Storage("..\\testData\\") self.filepath = store.excel("test_file") self.xls = XLSio(store) self.xls.table = Mock() self.xls.table.to_excel = Mock()
def setUp(self): self.ticker = "GNG" self.type = "balance" data_dir = "..\\testData\\" store = Storage(data_dir) self.assets = pandas.read_pickle(data_dir + "GNG\\GNGassets.pkl") self.liabilities = pandas.read_pickle(data_dir + "GNG\\GNGliabilities.pkl") self.scraper = WSJscraper(store)
def retrieveOverviewData(storage_dir, headings = None): store = Storage(storage_dir) xls = XLSio(store) scraper = WSJscraper(store) xls.loadWorkbook("ASXListedCompanies") tickers = xls.getTickers() new_data = {} for ticker in tickers: scraper.load_overview(ticker) try: new_data[ticker] = scraper.keyStockData() except Exception: print("Problem with: " + ticker) xls.updateTable(new_data) xls.saveAs("StockSummary")
def saveAnalysisToExcel(ticker): results = Reporter(ticker) store = Storage() writer = pandas.ExcelWriter(store.excel(ticker + "analysis")) results.summaryTable().to_excel(writer, "Summary") results.financials.income.income_sheet.to_excel(writer, "Income") assets = results.financials.balance.asset_sheet liabilities = results.financials.balance.liabilities assets.to_excel(writer, "Balance") liabilities.to_excel(writer, "Balance", startrow = len(assets) + 1) operating = results.financials.cashflow.operating investing = results.financials.cashflow.investing financing = results.financials.cashflow.financing operating.to_excel(writer, "Cashflow") investing.to_excel(writer, "Cashflow", startrow = len(operating) + 1) financing.to_excel(writer, "Cashflow", startrow = len(operating) + len(investing) + 2) writer.save()
def setUp(self): self.ticker = "SRV" self.type = "overview" self.data_dir = "..\\testData\\" self.store = Storage(self.data_dir) self.expected = { "P/E Ratio (TTM)": 21.68, "EPS (TTM)": 0.34, "Market Cap": 698.87, "Shares Outstanding": 98.43, "Public Float": 43.03, "Yield": 3.01, "Latest Dividend": 0.11, "Ex-Dividend Date": "09/07/15" } self.scraper = WSJscraper(self.store) self.scraper.overview = BeautifulSoup( open(self.store.html(self.ticker, self.type)), "lxml")
def setUp(self): self.root_location = "..\\testData\\" self.store = Storage(self.root_location)
def buildFinancialAnalyst(ticker, storage_dir = "D:\\Investing\\Data"): resource = StockFinancialsResource(Storage(storage_dir)) income = IncomeStatement(resource.getFinancials(ticker, "income")) balance = BalanceSheet(resource.getFinancials(ticker, "assets"), resource.getFinancials(ticker, "liabilities")) cashflow = CashflowStatement(resource.getFinancials(ticker, "operating"), resource.getFinancials(ticker, "investing"), resource.getFinancials(ticker, "financing")) return FinanceAnalyst(income, balance, cashflow)
def buildPriceAnalyser(ticker, storage_dir = "D:\\Investing\\Data"): store = Storage(storage_dir) prices = pandas.read_pickle(store.yahoo(ticker)) return PriceAnalyser(prices)
def setUp(self): self.ticker = "SRV" self.type = "overview" self.data_dir = "..\\testData\\" self.store = Storage(self.data_dir) self.scraper = WSJscraper(self.store)
def test_BuildsCashFlowAddress(self): scraper = WSJdownloader(Storage("")) for ticker in tickers: scraper_address = scraper.get_address(ticker, "cashflow") expected_address = annual_cash_flow.replace("[ticker]", ticker) self.assertEqual(scraper_address, expected_address)
def test_BuildsBalanceSheetAddress(self): scraper = WSJdownloader(Storage("")) for ticker in tickers: scraper_address = scraper.get_address(ticker, "balance") expected_address = annual_balance_sheet.replace("[ticker]", ticker) self.assertEqual(scraper_address, expected_address)
def test_BuildsFinancialsOverviewAddress(self): scraper = WSJdownloader(Storage("")) for ticker in tickers: scraper_address = scraper.get_address(ticker, "financials") expected_address = financials_overview.replace("[ticker]", ticker) self.assertEqual(scraper_address, expected_address)