Ejemplo n.º 1
0
    def test_log_debug(self):
        with LogCapture() as l:
            settings = {'DUPEFILTER_DEBUG': True,
                        'DUPEFILTER_CLASS': __name__  + '.FromCrawlerRFPDupeFilter'}
            crawler = get_crawler(SimpleSpider, settings_dict=settings)
            scheduler = Scheduler.from_crawler(crawler)
            spider = SimpleSpider.from_crawler(crawler)

            dupefilter = scheduler.df
            dupefilter.open()

            r1 = Request('http://scrapytest.org/index.html')
            r2 = Request('http://scrapytest.org/index.html',
                headers={'Referer': 'http://scrapytest.org/INDEX.html'}
            )

            dupefilter.log(r1, spider)
            dupefilter.log(r2, spider)

            assert crawler.stats.get_value('dupefilter/filtered') == 2
            l.check_present(('scrapy.dupefilters', 'DEBUG',
                ('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' (referer: None)')))
            l.check_present(('scrapy.dupefilters', 'DEBUG',
                ('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' (referer: http://scrapytest.org/INDEX.html)')))

            dupefilter.close('finished')
Ejemplo n.º 2
0
    def test_log(self):
        with LogCapture() as l:
            settings = {
                'DUPEFILTER_DEBUG': False,
                'DUPEFILTER_CLASS': __name__ + '.FromCrawlerRFPDupeFilter'
            }
            crawler = get_crawler(SimpleSpider, settings_dict=settings)
            scheduler = Scheduler.from_crawler(crawler)
            spider = SimpleSpider.from_crawler(crawler)

            dupefilter = scheduler.df
            dupefilter.open()

            r1 = Request('http://scrapytest.org/index.html')
            r2 = Request('http://scrapytest.org/index.html')

            dupefilter.log(r1, spider)
            dupefilter.log(r2, spider)

            assert crawler.stats.get_value('dupefilter/filtered') == 2
            l.check_present(('scrapy.dupefilters', 'DEBUG', (
                'Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' - no more duplicates will be shown'
                ' (see DUPEFILTER_DEBUG to show all duplicates)')))

            dupefilter.close('finished')
Ejemplo n.º 3
0
    def test_log_debug_default_dupefilter(self):
        with LogCapture() as log:
            settings = {
                'DUPEFILTER_DEBUG': True,
                'REQUEST_FINGERPRINTER_IMPLEMENTATION': 'VERSION'
            }
            crawler = get_crawler(SimpleSpider, settings_dict=settings)
            spider = SimpleSpider.from_crawler(crawler)
            dupefilter = _get_dupefilter(crawler=crawler)

            r1 = Request('http://scrapytest.org/index.html')
            r2 = Request(
                'http://scrapytest.org/index.html',
                headers={'Referer': 'http://scrapytest.org/INDEX.html'})

            dupefilter.log(r1, spider)
            dupefilter.log(r2, spider)

            assert crawler.stats.get_value('dupefilter/filtered') == 2
            log.check_present((
                'scrapy.dupefilters', 'DEBUG',
                'Filtered duplicate request: <GET http://scrapytest.org/index.html> (referer: None)'
            ))
            log.check_present((
                'scrapy.dupefilters', 'DEBUG',
                'Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' (referer: http://scrapytest.org/INDEX.html)'))

            dupefilter.close('finished')
Ejemplo n.º 4
0
    def test_log(self):
        with LogCapture() as log:
            settings = {
                'DUPEFILTER_DEBUG': False,
                'DUPEFILTER_CLASS': FromCrawlerRFPDupeFilter,
                'REQUEST_FINGERPRINTER_IMPLEMENTATION': 'VERSION'
            }
            crawler = get_crawler(SimpleSpider, settings_dict=settings)
            spider = SimpleSpider.from_crawler(crawler)
            dupefilter = _get_dupefilter(crawler=crawler)

            r1 = Request('http://scrapytest.org/index.html')
            r2 = Request('http://scrapytest.org/index.html')

            dupefilter.log(r1, spider)
            dupefilter.log(r2, spider)

            assert crawler.stats.get_value('dupefilter/filtered') == 2
            log.check_present((
                'scrapy.dupefilters', 'DEBUG',
                'Filtered duplicate request: <GET http://scrapytest.org/index.html> - no more'
                ' duplicates will be shown (see DUPEFILTER_DEBUG to show all duplicates)'
            ))

            dupefilter.close('finished')
Ejemplo n.º 5
0
    def test_log_debug(self):
        with LogCapture() as l:
            settings = {'DUPEFILTER_DEBUG': True,
                        'DUPEFILTER_CLASS': __name__  + '.FromCrawlerRFPDupeFilter'}
            crawler = get_crawler(SimpleSpider, settings_dict=settings)
            scheduler = Scheduler.from_crawler(crawler)
            spider = SimpleSpider.from_crawler(crawler)

            dupefilter = scheduler.df
            dupefilter.open()

            r1 = Request('http://scrapytest.org/index.html')
            r2 = Request('http://scrapytest.org/index.html',
                headers={'Referer': 'http://scrapytest.org/INDEX.html'}
            )
            
            dupefilter.log(r1, spider)
            dupefilter.log(r2, spider)

            assert crawler.stats.get_value('dupefilter/filtered') == 2
            l.check_present(('scrapy.dupefilters', 'DEBUG',
                ('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' (referer: None)')))
            l.check_present(('scrapy.dupefilters', 'DEBUG',
                ('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' (referer: http://scrapytest.org/INDEX.html)')))

            dupefilter.close('finished')
Ejemplo n.º 6
0
    def test_log(self):
        with LogCapture() as l:
            settings = {'DUPEFILTER_DEBUG': False,
                        'DUPEFILTER_CLASS': __name__  + '.FromCrawlerRFPDupeFilter'}
            crawler = get_crawler(SimpleSpider, settings_dict=settings)
            scheduler = Scheduler.from_crawler(crawler)
            spider = SimpleSpider.from_crawler(crawler)

            dupefilter = scheduler.df
            dupefilter.open()

            r1 = Request('http://scrapytest.org/index.html')
            r2 = Request('http://scrapytest.org/index.html')
            
            dupefilter.log(r1, spider)
            dupefilter.log(r2, spider)

            assert crawler.stats.get_value('dupefilter/filtered') == 2
            l.check_present(('scrapy.dupefilters', 'DEBUG', 
                ('Filtered duplicate request: <GET http://scrapytest.org/index.html>'
                ' - no more duplicates will be shown'
                ' (see DUPEFILTER_DEBUG to show all duplicates)')))

            dupefilter.close('finished')