def parse(self, response): logging.info(utils.get_time_now() + " target status==> " + str(response.status)) ipl = get_ip_proxy() if response.status == 200 and ipl: # yield Request(url=self.start_urls[0], callback=self.parse_item, dont_filter=True, meta={'proxy': "http://%s:%s" % ('HTTP', '195.154.231.43', '3128')}) for item in ipl: yield Request(url=self.start_urls[0], callback=self.parse_item, dont_filter=True, meta={'proxy': "http://%s:%s" % (item['ip'], item['port'])})
def parse_item(self, response): status = response.status if status == 200: proxy = response.request.meta['proxy'] check_ip = response.xpath("//h1[@id='ipd']/span/text()").extract() logging.info(utils.get_time_now() + " " + proxy + " " + check_ip[0]) else: utils.disable_ip(response.request.meta['ip'], response.request.meta['port'])
def parse(self, response): logging.info(utils.get_time_now() + " target status==> " + str(response.status)) ipl = get_ip_proxy() if response.status == 200 and ipl: # yield Request(url=self.start_urls[0], callback=self.parse_item, dont_filter=True, meta={'proxy': "http://%s:%s" % ('HTTP', '195.154.231.43', '3128')}) for item in ipl: yield Request(url=self.start_urls[0], callback=self.parse_item, dont_filter=True, meta={ 'proxy': "http://%s:%s" % (item['ip'], item['port']) })