def test_html_is_valid(self): """Probar que el html sea valido""" url = "" single_date = date(2019, 3, 4) coins = {} with patch.object(BCRASMLScraper, 'fetch_content', return_value=''' <table class="table table-BCRA table-bordered table-hover table-responsive"> <thead> </thead> <tbody> </tbody> </table> '''): scraper = BCRASMLScraper(url, coins, False) content = scraper.fetch_content(single_date) soup = BeautifulSoup(content, "html.parser") table = soup.find('table') head = table.find('thead') if table else None body = table.find('tbody') if table else None assert table is not None assert head is not None assert body is not None
def test_fetch_content_invalid_url_patching_driver(self): """Probar fetch content con url invalida""" coins = {} url = '' mocked_driver = MagicMock() mocked_driver.page_source = 400 with patch.object(BCRASMLScraper, 'get_browser_driver', return_value=mocked_driver): with patch.object(BCRASMLScraper, 'validate_coin_in_configuration_file', return_value=True): scraper = BCRASMLScraper(url, coins, False) content = scraper.fetch_content(coins) assert content == 400
def test_html_is_not_valid(self): """Probar que el html no sea valido""" url = "" single_date = date(2019, 3, 4) coins = {} with patch.object(BCRASMLScraper, 'fetch_content', return_value=' '): scraper = BCRASMLScraper(url, coins, False) content = scraper.fetch_content(single_date) soup = BeautifulSoup(content, "html.parser") table = soup.find('table') head = table.find('thead') if table else None body = table.find('tbody') if table else None assert table is None assert head is None assert body is None
def test_fetch_content_patching_driver(self): """Probar fetch content""" coins = {} url = '' mocked_driver = MagicMock() mocked_driver.page_source = "foo" mocked_driver.status_code = 200 with patch.object( BCRASMLScraper, 'get_browser_driver', return_value=mocked_driver ): with patch.object( BCRASMLScraper, 'validate_coin_in_configuration_file', return_value=True ): scraper = BCRASMLScraper(url, coins, intermediate_panel_path=None, use_intermediate_panel=False) content = scraper.fetch_content(coins) assert content == "foo"