async def runner():
            session = AioHttpSession(proxyRepo=self.proxyRepo,
                                     userAgentRepo=self.userAgentRepo)
            request = AioHttpRequest(session)

            try:
                # 1. Get shop response
                fetchParams = request.Params(url=shop.url)
                response = await request.fetch(params=fetchParams)

                if response.text:
                    soup = BeautifulSoup(response.text, "html.parser")
                    with open(str(shopResponsePath), "w+",
                              encoding="utf-8") as file:
                        file.write(soup.prettify())

                # 2. Get product response
                fetchParams.url = product.url
                response = await request.fetch(params=fetchParams)
                if response.text:
                    soup = BeautifulSoup(response.text, "html.parser")
                    with open(str(productResponsePath), "w+",
                              encoding="utf-8") as file:
                        file.write(soup.prettify())

            finally:
                await session.close()
Esempio n. 2
0
        async def runner():
            # Given
            session = AioHttpSession(
                proxyRepo=self.netHelper.proxyRepo,
                userAgentRepo=self.netHelper.userAgentRepo)
            request = AioHttpRequest(session)
            messenger = self.netHelper.createDiscordMessenger(request=request)

            # When
            try:
                sut = FootdistrictShopScraper(
                    scrapee=self.shopHelper.shop,
                    scrapeeRepo=self.shopHelper.shopRepo,
                    request=request,
                    messenger=messenger)

                await sut.run()

            # Then
            except Exception as e:
                self.fail(
                    f"Expected test to run without exception, but got {e}")

            finally:
                await session.close()
Esempio n. 3
0
    async def _fetchRunner(self, urls: list, timeout: int) -> List[Response]:
        # Init a persistent session for all requests
        session = AioHttpSession(
            proxyRepo=self.netHelper.proxyRepo,
            userAgentRepo=self.netHelper.userAgentRepo)

        sut = AioHttpRequest(session=session)
        sut.configure(maxRetries=4, timeout=timeout, useRandomProxy=True)

        tasks = [self._fetchData(sut, url, timeout) for url in urls]
        responses: List[Response] = await asyncio.gather(*tasks)

        # Do not forget to close session...
        await session.close()

        return responses
    async def initSUT(self):
        messengerDao = messenger.DiscordTinyDao(
            path=TEST_MESSENGERS_INTEGRATION_REPO_PATH)
        self._messengerRepo = messenger.Repo(dao=messengerDao)

        self._session = AioHttpSession()
        self._request = AioHttpRequest(session=self._session)
        self.sut = messenger.Discord(request=self._request,
                                     repo=self._messengerRepo)
Esempio n. 5
0
    async def _fetchData(sut: AioHttpRequest, url: str, timeout: int):
        """ Test-helper method
        """
        result = ""
        fetchParams = sut.Params(url=url)
        response: Response = await sut.fetch(params=fetchParams)
        if response.error:
            result = "Response is None."
            result += f"\nError object: {repr(response.error)}"
        elif response.data:
            result = str(response.data.status)
            result += f"\n✅ Content text exists: {response.text is not None}"
            result += f"\n✅ Error object is None"

        print("-" * 60, f"{url}", result, "-" * 60, sep="\n")
        print()

        return response
    async def _setScrapers(self):
        if not self.shops:
            raise AttributeError("Unable to create scrapers: Shops are not set.")
        if not self.session:
            raise AttributeError("Unable to create scrapers: Session not set.")

        messengerRequest: Request = AioHttpRequest(session=self.session)
        discordMessenger = msn.Discord(request=messengerRequest, repo=self.discordMessengerRepo)

        scraperFactory = ScraperFactory()
        self.scrapers = scraperFactory.makeFromScrapees(
            scrapees=self.shops,
            scrapeeRepo=self.shopRepo,
            session=self.session,
            requestClass=AioHttpRequest,
            messenger=discordMessenger)

        if not self.scrapers:
            raise LookupError("No scrapers were generated.")