示例#1
0
文件: check.py 项目: xacprod/ve1
    def run(self, args, opts):
        # load contracts
        contracts = build_component_list(
            self.settings['SPIDER_CONTRACTS_BASE'],
            self.settings['SPIDER_CONTRACTS'],
        )
        self.conman = ContractsManager([load_object(c) for c in contracts])
        self.results = TextTestRunner(verbosity=opts.verbose)._makeResult()

        # contract requests
        contract_reqs = defaultdict(list)

        spman_cls = load_object(self.settings['SPIDER_MANAGER_CLASS'])
        spiders = spman_cls.from_settings(self.settings)

        for spider in args or spiders.list():
            spider = spiders.create(spider)
            requests = self.get_requests(spider)

            if opts.list:
                for req in requests:
                    contract_reqs[spider.name].append(req.callback.__name__)
            elif requests:
                crawler = self.crawler_process.create_crawler(spider.name)
                crawler.crawl(spider, requests)

        # start checks
        if opts.list:
            for spider, methods in sorted(contract_reqs.iteritems()):
                print spider
                for method in sorted(methods):
                    print '  * %s' % method
        else:
            self.crawler_process.start()
            self.results.printErrors()
示例#2
0
    def run(self, args, opts):
        # load contracts
        contracts = build_component_list(
            self.settings['SPIDER_CONTRACTS_BASE'],
            self.settings['SPIDER_CONTRACTS'],
        )
        self.conman = ContractsManager([load_object(c) for c in contracts])

        # contract requests
        contract_reqs = defaultdict(list)
        self.crawler.engine.has_capacity = lambda: True

        for spider in args or self.crawler.spiders.list():
            spider = self.crawler.spiders.create(spider)
            requests = self.get_requests(spider)

            if opts.list:
                for req in requests:
                    contract_reqs[spider.name].append(req.callback.__name__)
            else:
                self.crawler.crawl(spider, requests)

        # start checks
        if opts.list:
            for spider, methods in sorted(contract_reqs.iteritems()):
                print spider
                for method in sorted(methods):
                    print '  * %s' % method
        else:
            self.crawler.start()
示例#3
0
    def test_scrapes(self):
        conman = ContractsManager(self.contracts)

        spider = TestSpider()
        response = ResponseMock()

        # scrapes_item_ok
        request = conman.from_method(spider.scrapes_item_ok, fail=True)
        output = request.callback(response)
        self.assertEqual(map(type, output), [TestItem])

        # scrapes_item_fail
        request = conman.from_method(spider.scrapes_item_fail, fail=True)
        self.assertRaises(ContractFail, request.callback, response)
示例#4
0
    def test_contracts(self):
        conman = ContractsManager(self.contracts)

        # extract contracts correctly
        contracts = conman.extract_contracts(TestSpider.returns_request)
        self.assertEqual(len(contracts), 2)
        self.assertEqual(frozenset(map(type, contracts)),
                         frozenset([UrlContract, ReturnsContract]))

        # returns request for valid method
        request = conman.from_method(TestSpider.returns_request)
        self.assertNotEqual(request, None)

        # no request for missing url
        request = conman.from_method(TestSpider.parse_no_url)
        self.assertEqual(request, None)
示例#5
0
    def run(self, args, opts):
        # load contracts
        # 获取系统基础的contracts类路径
        contracts = build_component_list(
            self.settings.getwithbase('SPIDER_CONTRACTS'))
        # 使用ContractManager进行contract加载
        conman = ContractsManager(load_object(c) for c in contracts)
        # 实例化一个TextTestRunner对象
        runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
        # 返回处理后的TextTestResult对象
        result = TextTestResult(runner.stream, runner.descriptions,
                                runner.verbosity)

        # contract requests
        # contract 请求
        contract_reqs = defaultdict(list)
        # 实例化爬虫加载器
        spider_loader = self.crawler_process.spider_loader

        for spidername in args or spider_loader.list():
            spidercls = spider_loader.load(spidername)
            spidercls.start_requests = lambda s: conman.from_spider(s, result)
            # 获取测试方法
            tested_methods = conman.tested_methods_from_spidercls(spidercls)
            if opts.list:
                for method in tested_methods:
                    contract_reqs[spidercls.name].append(method)
            elif tested_methods:  #如果有测试方法,则进行crawl方法
                self.crawler_process.crawl(spidercls)

        # start checks
        # 开始检验
        if opts.list:
            for spider, methods in sorted(contract_reqs.items()):
                if not methods and not opts.verbose:
                    continue
                print(spider)
                for method in sorted(methods):
                    print('  * %s' % method)
        else:
            start = time.time()
            self.crawler_process.start()
            stop = time.time()

            result.printErrors()
            result.printSummary(start, stop)
            self.exitcode = int(not result.wasSuccessful())
示例#6
0
    def run(self, args, opts):
        # load contracts
        contracts = build_component_list(
            self.settings['SPIDER_CONTRACTS_BASE'],
            self.settings['SPIDER_CONTRACTS'],
        )
        conman = ContractsManager([load_object(c) for c in contracts])
        runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
        result = TextTestResult(runner.stream, runner.descriptions,
                                runner.verbosity)

        # contract requests
        contract_reqs = defaultdict(list)

        spman_cls = load_object(self.settings['SPIDER_MANAGER_CLASS'])
        spiders = spman_cls.from_settings(self.settings)

        for spider in args or spiders.list():
            spider = spiders.create(spider)
            requests = self.get_requests(spider, conman, result)
            contract_reqs[spider.name] = []

            if opts.list:
                for req in requests:
                    contract_reqs[spider.name].append(req.callback.__name__)
            elif requests:
                crawler = self.crawler_process.create_crawler(spider.name)
                crawler.crawl(spider, requests)

        # start checks
        if opts.list:
            for spider, methods in sorted(contract_reqs.items()):
                if not methods and not opts.verbose:
                    continue
                print(spider)
                for method in sorted(methods):
                    print('  * %s' % method)
        else:
            start = time.time()
            self.crawler_process.start()
            stop = time.time()

            result.printErrors()
            result.printSummary(start, stop)
            self.exitcode = int(not result.wasSuccessful())
示例#7
0
    def run(self, args, opts):
        # load contracts
        contracts = build_component_list(
            self.settings.getwithbase('SPIDER_CONTRACTS'))
        conman = ContractsManager(load_object(c) for c in contracts)
        runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
        result = TextTestResult(runner.stream, runner.descriptions,
                                runner.verbosity)

        # contract requests
        contract_reqs = defaultdict(list)

        spider_loader = self.crawler_process.spider_loader

        with set_environ(SCRAPY_CHECK='true'):
            for spidername in args or spider_loader.list():
                spidercls = spider_loader.load(spidername)
                spidercls.start_requests = lambda s: conman.from_spider(
                    s, result)

                tested_methods = conman.tested_methods_from_spidercls(
                    spidercls)
                if opts.list:
                    for method in tested_methods:
                        contract_reqs[spidercls.name].append(method)
                elif tested_methods:
                    self.crawler_process.crawl(spidercls)

        # start checks
        if opts.list:
            for spider, methods in sorted(contract_reqs.items()):
                if not methods and not opts.verbose:
                    continue
                print(spider)
                for method in sorted(methods):
                    print('  * %s' % method)
        else:
            start = time.time()
            self.crawler_process.start()
            stop = time.time()

            result.printErrors()
            result.printSummary(start, stop)
            self.exitcode = int(not result.wasSuccessful())
示例#8
0
    def test_returns(self):
        conman = ContractsManager(self.contracts)

        spider = TestSpider()
        response = ResponseMock()

        # returns_item
        request = conman.from_method(spider.returns_item, fail=True)
        output = request.callback(response)
        self.assertEqual(map(type, output), [TestItem])

        # returns_request
        request = conman.from_method(spider.returns_request, fail=True)
        output = request.callback(response)
        self.assertEqual(map(type, output), [Request])

        # returns_fail
        request = conman.from_method(spider.returns_fail, fail=True)
        self.assertRaises(ContractFail, request.callback, response)
示例#9
0
 def setUp(self):
     self.spider = TestBeibeiSpider()
     self.conman = ContractsManager(self.contracts)
     self.results = TextTestResult(stream=None,
                                   descriptions=False,
                                   verbosity=0)
示例#10
0
 def setUp(self):
     self.conman = ContractsManager(self.contracts)
     self.results = TextTestRunner()._makeResult()
     self.results.stream = None