def test_makeFromScrapee_shouldRaiseOnScraperNotFound(self): # Given sut = ScraperFactory() nonFindableScrapee = Mock(spec_set=Scrapable) nonFindableScrapee.url = "https://non-findable-url.com" # When / Then with self.assertRaises(LookupError): sut.makeFromScrapee(scrapee=nonFindableScrapee, scrapeeRepo=Mock(), requestClass=Mock, session=Mock(), messenger=Mock())
def test_makeFromScrapee(self): # Given sut = ScraperFactory() class ScraperStub(Scraper): URL = "https://www.test_scraper_config_02.com" def __init__(self, scrapee, scrapeeRepo, request: Request, messenger): super().__init__(scrapee=scrapee, scrapeeRepo=scrapeeRepo, request=request, messenger=messenger) def run(self) -> None: raise NotImplementedError self.scraperStubClass = ScraperStub scrapee = Mock(spec_set=Scrapable) scrapeeRepo = Mock(spec_set=ShopRepo) session = Mock(spec=Session) messengerRequest = RequestMock() messenger = MessengerMock(request=messengerRequest) # Expecting values from ScraperConfigRepoMonkeyPatch repository for # scraper URL https://www.test_scraper_config_02.com expectedIterSleep = (7, 16, 1.0) expectedRequestTimeout = 5 expectedRequestMaxRetries = 5 expectedRequestUseRandomProxy = False scrapee.url = "https://www.test_scraper_config_02.com" scrapee.name = "The huge factory shop" sut.register(class_=self.scraperStubClass) # When createdScraper = sut.makeFromScrapee(scrapee=scrapee, scrapeeRepo=scrapeeRepo, requestClass=RequestMock, session=session, messenger=messenger) # Then self.assertIsInstance(createdScraper, self.scraperStubClass) self.assertIsInstance(createdScraper._scrapeeRepo, ShopRepo) self.assertIsInstance(createdScraper._request, Request) self.assertEqual(self.scraperStubClass.URL, createdScraper.URL) self.assertEqual(scrapee.name, createdScraper._scrapee.name) # Other attributes which are not initializable from outside self.assertEqual(False, createdScraper._isCancelLoop) self.assertEqual(0, createdScraper._failCount) self.assertEqual(expectedIterSleep, createdScraper._iterSleep) self.assertEqual(expectedRequestTimeout, createdScraper._request._timeout) self.assertEqual(expectedRequestMaxRetries, createdScraper._request._maxRetries) self.assertEqual(expectedRequestUseRandomProxy, createdScraper._request._useRandomProxy)
def test_makeFromScrapee_shouldRaiseOnMultipleScrapersFound(self): # Given sut = ScraperFactory() scrapee = Mock(spec_set=Scrapable) scrapee.url.return_value = "does not matter here" class ScraperStub(Scraper): URL: str = "does not matter here" def run(self) -> None: pass self.scraperStubClass = ScraperStub # When sut._scraperClasses.append(self.scraperStubClass) sut._scraperClasses.append(self.scraperStubClass) with self.assertRaises(LookupError): sut.makeFromScrapee(scrapee=scrapee, scrapeeRepo=Mock(), session=Mock(), requestClass=Mock, messenger=Mock())