Esempio n. 1
0
    def test_find_shouldRaiseOnInvalidArgumentKey(self):
        # Given
        dao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)

        # When / Then
        with self.assertRaises(KeyError):
            with dao as sut:
                sut.find(someInvalidParamKey="")  # noqa
Esempio n. 2
0
    def test_find_scraperConfigByUrl(self):
        # Given
        dao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)

        # ---------- Test 1 ----------

        scraperUrl_01 = "https://www.test_scraper_config_01.com"
        # Expect data of the fixture file
        expectedConfig_01 = ScraperConfig(
            iterSleepFromScnds=8,
            iterSleepToScnds=15,
            iterSleepSteps=0.5,
            fetchTimeoutScnds=8,
            fetchMaxRetries=4,
            fetchUseRandomProxy=True,
            postTimeoutScnds=7,
            postMaxRetries=3,
            postUseRandomProxies=True)

        # When
        with dao as sut:
            foundConfig_01 = sut.find(scraperConfigByUrl=scraperUrl_01)
        # Then
        self.assertEqual(expectedConfig_01, foundConfig_01)

        # ---------- Test 2 ----------

        # Given
        scraperUrl_02 = "https://www.test_scraper_config_02.com"
        # Expect data of the fixture file
        expectedConfig_02 = ScraperConfig(
            iterSleepFromScnds=7,
            iterSleepToScnds=16,
            iterSleepSteps=1.0,
            fetchTimeoutScnds=5,
            fetchMaxRetries=5,
            fetchUseRandomProxy=False,
            postTimeoutScnds=9,
            postMaxRetries=2,
            postUseRandomProxies=False)

        # When
        with dao as sut:
            foundConfig_02 = sut.find(scraperConfigByUrl=scraperUrl_02)
        # Then
        self.assertEqual(expectedConfig_02, foundConfig_02)
Esempio n. 3
0
    def test_find_loggerConfig_shouldFallbackToRescueDefaultsIfNotFound(self):
        # Given
        import debug.logger as clog
        dao = TinyConfigDao(path=TEST_EMPTY_DATABASE_CONFIGURATION_PATH)
        # Expect hard coded data
        expectedRescueConfig = LoggerConfig(
            isConsoleLogging=True,
            isFileLogging=False,
            consoleLogLevel=clog.INFO,
            fileLogLevel=clog.NOTSET)

        # When
        with dao as sut:
            foundConfig = sut.find(loggerConfig=True)

        # Then
        self.assertEqual(expectedRescueConfig, foundConfig)
Esempio n. 4
0
    def test_find_loggerConfig(self):
        # Given
        import debug.logger as clog
        dao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)
        # Expect data of the fixture file
        expectedConfig = LoggerConfig(
            isConsoleLogging=True,
            isFileLogging=False,
            consoleLogLevel=clog.DEBUG,
            fileLogLevel=clog.NOTSET)

        # When
        with dao as sut:
            foundConfig = sut.find(loggerConfig=True)

        # Then
        self.assertEqual(expectedConfig, foundConfig)
Esempio n. 5
0
    def test_find_scraperCommonConfig_shouldFallbackToRescueDefaultsIfNotFound(self):
        # Given
        dao = TinyConfigDao(path=TEST_EMPTY_DATABASE_CONFIGURATION_PATH)
        # Expect hard coded data
        expectedRescueConfig = ScraperConfig(
            iterSleepFromScnds=20,
            iterSleepToScnds=30,
            iterSleepSteps=0.5,
            fetchTimeoutScnds=8,
            fetchMaxRetries=4,
            fetchUseRandomProxy=True,
            postTimeoutScnds=8,
            postMaxRetries=4,
            postUseRandomProxies=True)

        # When
        with dao as sut:
            foundDefaultConfig = sut.find(scraperCommonConfig=True)

        # Then
        self.assertEqual(expectedRescueConfig, foundDefaultConfig)
Esempio n. 6
0
    def test_find_scraperCommonConfig(self):
        # Given
        dao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)
        # Expect data of the fixture file
        expectedDefaultConfig = ScraperConfig(
            iterSleepFromScnds=25,
            iterSleepToScnds=35,
            iterSleepSteps=1.0,
            fetchTimeoutScnds=8,
            fetchMaxRetries=5,
            fetchUseRandomProxy=True,
            postTimeoutScnds=7,
            postMaxRetries=4,
            postUseRandomProxies=True)

        # When
        with dao as sut:
            foundDefaultConfig = sut.find(scraperCommonConfig=True)

        # Then
        self.assertEqual(expectedDefaultConfig, foundDefaultConfig)
Esempio n. 7
0
    def test_find_scraperConfigByUrl_ShouldFallbackToPersistentDefault(self):
        # Given
        dao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)
        nonExistingScraperConfigUrl = "https://www.this-scraper-default-does-not-exist.org"
        # Expect data of the fixture file
        expectedDefaultConfig = ScraperConfig(
            iterSleepFromScnds=25,
            iterSleepToScnds=35,
            iterSleepSteps=1.0,
            fetchTimeoutScnds=8,
            fetchMaxRetries=5,
            fetchUseRandomProxy=True,
            postTimeoutScnds=7,
            postMaxRetries=4,
            postUseRandomProxies=True)

        # When
        with dao as sut:
            foundDefaultConfig = sut.find(scraperConfigByUrl=nonExistingScraperConfigUrl)
        # Then
        self.assertEqual(expectedDefaultConfig, foundDefaultConfig)
Esempio n. 8
0
 def setUpClass(cls) -> None:
     super().setUpClass()
     cls.testConfigDao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)
Esempio n. 9
0
 def start():
     import scraper.base
     dao = TinyConfigDao(path=TEST_VALID_CONFIGURATION_PATH)
     configRepoFixture = ConfigRepo(dao=dao)
     # Monkey patch
     scraper.base.APP_CONFIG_REPO = configRepoFixture