class Test_ScrapingOverviewData(unittest.TestCase): def setUp(self): self.ticker = "SRV" self.type = "overview" self.data_dir = "..\\testData\\" self.store = Storage(self.data_dir) self.expected = {"P/E Ratio (TTM)" : 21.68, "EPS (TTM)" : 0.34, "Market Cap" : 698.87, "Shares Outstanding" : 98.43, "Public Float" : 43.03, "Yield" : 3.01, "Latest Dividend" : 0.11, "Ex-Dividend Date" : "09/07/15"} self.scraper = WSJscraper(self.store) self.scraper.overview = BeautifulSoup(open(self.store.html(self.ticker, self.type)), "lxml") def test_RetrievesKeyStockDataTable(self): data = self.scraper.keyStockData() # scraper should return a dictionary self.assertIsInstance(data, dict) self.assertEqual(data.keys().sort(), self.expected.keys().sort()) self.assertEqual(data, self.expected)
class Test_ScrapingOverviewData(unittest.TestCase): def setUp(self): self.ticker = "SRV" self.type = "overview" self.data_dir = "..\\testData\\" self.store = Storage(self.data_dir) self.expected = { "P/E Ratio (TTM)": 21.68, "EPS (TTM)": 0.34, "Market Cap": 698.87, "Shares Outstanding": 98.43, "Public Float": 43.03, "Yield": 3.01, "Latest Dividend": 0.11, "Ex-Dividend Date": "09/07/15" } self.scraper = WSJscraper(self.store) self.scraper.overview = BeautifulSoup( open(self.store.html(self.ticker, self.type)), "lxml") def test_RetrievesKeyStockDataTable(self): data = self.scraper.keyStockData() # scraper should return a dictionary self.assertIsInstance(data, dict) self.assertEqual(data.keys().sort(), self.expected.keys().sort()) self.assertEqual(data, self.expected)
class Test_LoadingPages(unittest.TestCase): def setUp(self): self.ticker = "SRV" self.type = "overview" self.data_dir = "..\\testData\\" self.store = Storage(self.data_dir) self.scraper = WSJscraper(self.store) def test_StoresSoupObjectOnPageLoad(self): self.scraper.load_overview(self.ticker) expected = BeautifulSoup(open(self.store.html(self.ticker, "overview")), "lxml") self.assertIsInstance(self.scraper.overview, BeautifulSoup) self.assertEqual(self.scraper.overview.title, expected.title) def test_ChecksForValidYears(self): years = [u'2015', u' ', u'012', u'.3'] self.assertRaises(InsufficientDataError, self.scraper.check_years, years = years) self.assertIsNone(self.scraper.check_years([u'2015', u'2014', u'2013']))
class Test_Storage(unittest.TestCase): def setUp(self): self.root_location = "..\\testData\\" self.store = Storage(self.root_location) def test_StorageLocationForExcel(self): filename = "test" expected_filepath = self.root_location + filename + ".xlsx" self.assertEqual(self.store.excel(filename), expected_filepath) def test_StorageLocationForHTML(self): ticker = "AAA" type = "overview" expected_filepath = self.root_location + ticker + "\\" + ticker + type + ".html" self.assertEqual(self.store.html(ticker, type), expected_filepath) def test_StorageLocationForStockPickle(self): ticker = "AAA" type = "income" expected_filepath = self.root_location + ticker + "\\pickles\\" + type + ".pkl" self.assertEqual(self.store.stock_pickle(ticker, type), expected_filepath)
class Test_LoadingPages(unittest.TestCase): def setUp(self): self.ticker = "SRV" self.type = "overview" self.data_dir = "..\\testData\\" self.store = Storage(self.data_dir) self.scraper = WSJscraper(self.store) def test_StoresSoupObjectOnPageLoad(self): self.scraper.load_overview(self.ticker) expected = BeautifulSoup( open(self.store.html(self.ticker, "overview")), "lxml") self.assertIsInstance(self.scraper.overview, BeautifulSoup) self.assertEqual(self.scraper.overview.title, expected.title) def test_ChecksForValidYears(self): years = [u'2015', u' ', u'012', u'.3'] self.assertRaises(InsufficientDataError, self.scraper.check_years, years=years) self.assertIsNone(self.scraper.check_years([u'2015', u'2014', u'2013']))