Esempio n. 1
0
    def test_getLinks(self):
        @stringioify
        def mocked_urlopener(url):
            html_wrap = "<html><body>\n%s\n</body></html>"
            if 'ONE' in url:
                return html_wrap % """
                <a href='One.html'>One.html</a>
                """
            raise NotImplementedError(url)

        self.urllib2.side_effect = mocked_urlopener
        self.assertEqual(
            ftpscraper.getLinks('ONE'),
            []
        )
        self.assertEqual(
            ftpscraper.getLinks('ONE', startswith='One'),
            ['One.html']
        )
        self.assertEqual(
            ftpscraper.getLinks('ONE', endswith='.html'),
            ['One.html']
        )
        self.assertEqual(
            ftpscraper.getLinks('ONE', startswith='Two'),
            []
        )
Esempio n. 2
0
    def test_getLinks(self):
        @stringioify
        def mocked_urlopener(url):
            html_wrap = "<html><body>\n%s\n</body></html>"
            if 'ONE' in url:
                return html_wrap % """
                <a href='One.html'>One.html</a>
                """
            raise NotImplementedError(url)

        self.urllib2.side_effect = mocked_urlopener
        eq_(
            ftpscraper.getLinks('ONE'),
            []
        )
        eq_(
            ftpscraper.getLinks('ONE', startswith='One'),
            ['One.html']
        )
        eq_(
            ftpscraper.getLinks('ONE', endswith='.html'),
            ['One.html']
        )
        eq_(
            ftpscraper.getLinks('ONE', startswith='Two'),
            []
        )
Esempio n. 3
0
    def test_getLinks_with_page_not_found(self):
        @stringioify
        def mocked_urlopener(url):
            raise urllib2.HTTPError(url, 404, "Not Found", {}, None)

        self.urllib2.side_effect = mocked_urlopener
        self.assertEqual(ftpscraper.getLinks('ONE'), [])
Esempio n. 4
0
    def test_getLinks_with_timeout_retries(self, mocked_time):

        sleeps = []

        def mocked_sleeper(seconds):
            sleeps.append(seconds)

        mocked_time.sleep = mocked_sleeper

        mock_calls = []

        @stringioify
        def mocked_urlopener(url):
            mock_calls.append(url)
            if len(mock_calls) == 1:
                raise urllib2.HTTPError(url, 500, "Server Error", {}, None)
            if len(mock_calls) == 2:
                raise urllib2.HTTPError(url, 504, "Timeout", {}, None)
            if len(mock_calls) == 3:
                raise urllib2.URLError("BadStatusLine")

            html_wrap = "<html><body>\n%s\n</body></html>"
            if 'ONE' in url:
                return html_wrap % """
                <a href='One.html'>One.html</a>
                """
            raise NotImplementedError(url)

        self.urllib2.side_effect = mocked_urlopener
        self.assertEqual(
            ftpscraper.getLinks('ONE', startswith='One'),
            ['One.html']
        )
        # it had to go to sleep 3 times
        self.assertEqual(len(sleeps), 3)
Esempio n. 5
0
    def test_getLinks(self):
        @stringioify
        def mocked_urlopener(url):
            html_wrap = "<html><body>\n%s\n</body></html>"
            if "ONE" in url:
                return (
                    html_wrap
                    % """
                <a href='One.html'>One.html</a>
                """
                )
            raise NotImplementedError(url)

        self.urllib2.side_effect = mocked_urlopener
        self.assertEqual(ftpscraper.getLinks("ONE"), [])
        self.assertEqual(ftpscraper.getLinks("ONE", startswith="One"), ["One.html"])
        self.assertEqual(ftpscraper.getLinks("ONE", endswith=".html"), ["One.html"])
        self.assertEqual(ftpscraper.getLinks("ONE", startswith="Two"), [])
Esempio n. 6
0
    def test_getLinks_with_page_not_found(self):
        @stringioify
        def mocked_urlopener(url):
            raise urllib2.HTTPError(url, 404, "Not Found", {}, None)

        self.urllib2.side_effect = mocked_urlopener
        eq_(
            ftpscraper.getLinks('ONE'),
            []
        )