def __init__(self, name, spec, item_schemas, all_extractors, settings=None, **kw): self.start_url_generators = { 'start_urls': IdentityGenerator(), 'generated_urls': UrlGenerator(settings, kw), 'url': IdentityGenerator(), 'feed': FeedGenerator(self.parse), 'generated': FragmentGenerator(), } self.generic_form = GenericForm(**kw) super(IblSpider, self).__init__(name, **kw) spec = deepcopy(spec) self._add_spider_args_to_spec(spec, kw) self._configure_js(spec, settings) self.plugins = self._configure_plugins(settings, spec, item_schemas, all_extractors) self.login_requests, self.form_requests = [], [] self._start_urls = self._create_start_urls(spec) self._start_requests = self._create_start_requests(spec) self._create_init_requests(spec) self._add_allowed_domains(spec) self.page_actions = spec.get('page_actions', [])
def test_advanced_search_form_regex(self): url = 'http://www.ebay.com/sch/ebayadvsearch/?rt=nc' body = open(join(_PATH, "data", "ebay_advanced_search.html")).read() form_descriptor = json.loads("""{ "type": "form", "form_url": "http://www.ebay.com/sch/ebayadvsearch/?rt=nc", "xpath": "//form[@name='adv_search_from']", "fields": [ { "xpath": ".//*[@name='_nkw']", "type": "constants", "value": ["Cars"] }, { "xpath": ".//*[@name='_in_kw']", "type": "iterate", "value": "[1-2]" } ] }""") generic_form = GenericForm() start_requests = list(generic_form.fill_generic_form(url, body, form_descriptor)) expected_requests = [([('_adv', '1'), ('_ex_kw', ''), ('_ftrv', '1'), ('_ftrt', '901'), ('_sabdlo', u''), ('_sabdhi', u''), ('_sop', '12'), ('_samihi', u''), ('_ipg', '50'), ('_salic', '1'), ('_sasl', ''), ('_udlo', ''), ('_okw', u''), ('_fsradio', '&LH_SpecificSeller=1'), ('_udhi', ''), ('_in_kw', '1'), ('_nkw', u'Cars'), ('_sacat', '0'), ('_oexkw', u''), ('_dmd', '1'), ('_saslop', '1'), ('_samilow', u'')], 'http://www.ebay.com/sch/i.html', 'GET'), ([('_adv', '1'), ('_ex_kw', ''), ('_ftrv', '1'), ('_ftrt', '901'), ('_sabdlo', u''), ('_sabdhi', u''), ('_sop', '12'), ('_samihi', u''), ('_ipg', '50'), ('_salic', '1'), ('_sasl', ''), ('_udlo', ''), ('_okw', u''), ('_fsradio', '&LH_SpecificSeller=1'), ('_udhi', ''), ('_in_kw', '2'), ('_nkw', u'Cars'), ('_sacat', '0'), ('_oexkw', u''), ('_dmd', '1'), ('_saslop', '1'), ('_samilow', u'')], 'http://www.ebay.com/sch/i.html', 'GET')] self.assertEqual(start_requests, expected_requests)
def __init__(self, name, spec, item_schemas, all_extractors, **kw): super(IblSpider, self).__init__(name, **kw) spec = deepcopy(spec) for key, val in kw.items(): if isinstance(val, basestring) and key in ['start_urls', 'exclude_patterns', 'follow_patterns', 'allowed_domains']: val = val.splitlines() spec[key] = val self._item_template_pages = sorted(( [t['scrapes'], dict_to_page(t, 'annotated_body'), t.get('extractors', [])] \ for t in spec['templates'] if t.get('page_type', 'item') == 'item' ), key=lambda pair: pair[0]) # generate ibl extractor for links pages _links_pages = [dict_to_page(t, 'annotated_body') for t in spec['templates'] if t.get('page_type') == 'links'] _links_item_descriptor = create_slybot_item_descriptor({'fields': {}}) self._links_ibl_extractor = InstanceBasedLearningExtractor([(t, _links_item_descriptor) for t in _links_pages]) \ if _links_pages else None self._ipages = [page for _, page, _ in self._item_template_pages] self.html_link_extractor = HtmlLinkExtractor() self.rss_link_extractor = RssLinkExtractor() self.build_url_filter(spec) self.itemcls_info = {} for itemclass_name, triplets in itertools.groupby(self._item_template_pages, operator.itemgetter(0)): page_extractors_pairs = map(operator.itemgetter(1, 2), triplets) schema = item_schemas[itemclass_name] item_cls = SlybotItem.create_iblitem_class(schema) page_descriptor_pairs = [] for page, template_extractors in page_extractors_pairs: item_descriptor = create_slybot_item_descriptor(schema) apply_extractors(item_descriptor, template_extractors, all_extractors) page_descriptor_pairs.append((page, item_descriptor)) extractor = InstanceBasedLearningExtractor(page_descriptor_pairs) self.itemcls_info[itemclass_name] = { 'class': item_cls, 'descriptor': item_descriptor, 'extractor': extractor, } self.login_requests = [] self.form_requests = [] self._start_requests = [] self.generic_form = GenericForm(**kw) self._create_init_requests(spec.get("init_requests", [])) self._process_start_urls(spec) self.allowed_domains = spec.get('allowed_domains', self._get_allowed_domains(self._ipages)) if not self.allowed_domains: self.allowed_domains = None
def test_simple_search_form_with_file_type(self): url = 'http://www.ebay.com/sch/ebayadvsearch/?rt=nc' body = open(join(_PATH, "data", "ebay_advanced_search.html")).read() form_descriptor = json.loads("""{ "type": "form", "form_url": "http://www.ebay.com/sch/ebayadvsearch/?rt=nc", "xpath": "//form[@name='adv_search_from']", "fields": [ { "name": "my_param", "type": "inurl", "value": "file://%s/test_params.txt", "file_values": ["Cars", "Boats", "Houses", "Electronics"] } ] }""" % join(_PATH, "data")) generic_form = GenericForm() start_requests = list( generic_form.fill_generic_form(url, body, form_descriptor)) expected_requests = [ ([('_adv', '1'), ('_ex_kw', ''), ('_ftrv', '1'), ('_ftrt', '901'), ('_sabdlo', u''), ('_sabdhi', u''), ('_sop', '12'), ('_samihi', u''), ('_ipg', '50'), ('_salic', '1'), (u'my_param', u'Cars'), ('_sasl', ''), ('_udlo', ''), ('_okw', u''), ('_fsradio', '&LH_SpecificSeller=1'), ('_udhi', ''), ('_in_kw', '1'), ('_nkw', ''), ('_sacat', '0'), ('_oexkw', u''), ('_dmd', '1'), ('_saslop', '1'), ('_samilow', u'')], 'http://www.ebay.com/sch/i.html', 'GET'), ([('_adv', '1'), ('_ex_kw', ''), ('_ftrv', '1'), ('_ftrt', '901'), ('_sabdlo', u''), ('_sabdhi', u''), ('_sop', '12'), ('_samihi', u''), ('_ipg', '50'), ('_salic', '1'), (u'my_param', u'Boats'), ('_sasl', ''), ('_udlo', ''), ('_okw', u''), ('_fsradio', '&LH_SpecificSeller=1'), ('_udhi', ''), ('_in_kw', '1'), ('_nkw', ''), ('_sacat', '0'), ('_oexkw', u''), ('_dmd', '1'), ('_saslop', '1'), ('_samilow', u'')], 'http://www.ebay.com/sch/i.html', 'GET'), ([('_adv', '1'), ('_ex_kw', ''), ('_ftrv', '1'), ('_ftrt', '901'), ('_sabdlo', u''), ('_sabdhi', u''), ('_sop', '12'), ('_samihi', u''), ('_ipg', '50'), ('_salic', '1'), (u'my_param', u'Houses'), ('_sasl', ''), ('_udlo', ''), ('_okw', u''), ('_fsradio', '&LH_SpecificSeller=1'), ('_udhi', ''), ('_in_kw', '1'), ('_nkw', ''), ('_sacat', '0'), ('_oexkw', u''), ('_dmd', '1'), ('_saslop', '1'), ('_samilow', u'')], 'http://www.ebay.com/sch/i.html', 'GET'), ([('_adv', '1'), ('_ex_kw', ''), ('_ftrv', '1'), ('_ftrt', '901'), ('_sabdlo', u''), ('_sabdhi', u''), ('_sop', '12'), ('_samihi', u''), ('_ipg', '50'), ('_salic', '1'), (u'my_param', u'Electronics'), ('_sasl', ''), ('_udlo', ''), ('_okw', u''), ('_fsradio', '&LH_SpecificSeller=1'), ('_udhi', ''), ('_in_kw', '1'), ('_nkw', ''), ('_sacat', '0'), ('_oexkw', u''), ('_dmd', '1'), ('_saslop', '1'), ('_samilow', u'')], 'http://www.ebay.com/sch/i.html', 'GET') ] self.assertEqual(request_to_set(start_requests), request_to_set(expected_requests))
def __init__(self, name, spec, item_schemas, all_extractors, settings=None, **kw): super(IblSpider, self).__init__(name, **kw) self._job_id = settings.get('JOB', '') spec = deepcopy(spec) for key, val in kw.items(): if isinstance(val, six.string_types) and key in STRING_KEYS: val = val.splitlines() spec[key] = val self._item_template_pages = sorted( ((t['scrapes'], t) for t in spec['templates'] if t.get('page_type', 'item') == 'item'), key=itemgetter(0)) self._templates = [templ for _, templ in self._item_template_pages] self.plugins = IndexedDict() for plugin_class, plugin_name in zip(load_plugins(settings), load_plugin_names(settings)): instance = plugin_class() instance.setup_bot(settings, spec, item_schemas, all_extractors) self.plugins[plugin_name] = instance self.js_enabled = False self.SPLASH_HOST = None if settings.get('SPLASH_URL'): self.SPLASH_HOST = urlparse(settings.get('SPLASH_URL')).hostname self.js_enabled = spec.get('js_enabled', False) if self.js_enabled and (settings.get('SPLASH_PASS') is not None or settings.get('SPLASH_USER') is not None): self.splash_auth = basic_auth_header( settings.get('SPLASH_USER', ''), settings.get('SPLASH_PASS', '')) self._filter_js_urls = self._build_js_url_filter(spec) self.login_requests = [] self.form_requests = [] self._start_requests = [] self.generic_form = GenericForm(**kw) self._create_init_requests(spec.get("init_requests", [])) self._process_start_urls(spec) self.allowed_domains = spec.get( 'allowed_domains', self._get_allowed_domains(self._templates)) self.page_actions = spec.get('page_actions', []) if not self.allowed_domains: self.allowed_domains = None
def __init__(self, name, spec, item_schemas, all_extractors, settings=None, **kw): super(IblSpider, self).__init__(name, **kw) spec = deepcopy(spec) for key, val in kw.items(): if isinstance(val, basestring) and key in STRING_KEYS: val = val.splitlines() spec[key] = val self._item_template_pages = sorted( ((t['scrapes'], t) for t in spec['templates'] if t.get('page_type', 'item') == 'item'), key=itemgetter(0)) self._templates = [templ for _, templ in self._item_template_pages] self.plugins = IndexedDict() for plugin_class, plugin_name in zip(load_plugins(settings), load_plugin_names(settings)): instance = plugin_class() instance.setup_bot(settings, spec, item_schemas, all_extractors) self.plugins[plugin_name] = instance self.login_requests = [] self.form_requests = [] self._start_requests = [] self.generic_form = GenericForm(**kw) self._create_init_requests(spec.get("init_requests", [])) self._process_start_urls(spec) self.allowed_domains = spec.get( 'allowed_domains', self._get_allowed_domains(self._templates)) if not self.allowed_domains: self.allowed_domains = None
def __init__(self, name, spec, item_schemas, all_extractors, settings=None, **kw): self.start_url_generators = { 'start_urls': IdentityGenerator(), 'generated_urls': UrlGenerator(settings, kw), 'url': IdentityGenerator(), 'feed': FeedGenerator(self.parse), 'generated': FragmentGenerator(), } self.generic_form = GenericForm(**kw) super(IblSpider, self).__init__(name, **kw) spec = deepcopy(spec) self._add_spider_args_to_spec(spec, kw) #if actions configured, then set js_enabled as true, and put url of each action to js_enable_patterns. self.actions = spec.get('actions', []) ''' if len(self.actions): spec['js_enabled']= True enable_patterns = spec.get('js_enable_patterns', [] ) for action in self.actions: enable_patterns.append(action.get('url')) spec['js_enable_patterns']= enable_patterns ''' self._configure_js(spec, settings) self.plugins = self._configure_plugins( settings, spec, item_schemas, all_extractors) self.login_requests, self.form_requests = [], [] self._start_urls = self._create_start_urls(spec) self._start_requests = self._create_start_requests(spec) self._create_init_requests(spec) self._add_allowed_domains(spec) self.page_actions = spec.get('page_actions', [])
def __init__(self, name, spec, item_schemas, all_extractors, **kw): super(IblSpider, self).__init__(name, **kw) self._item_template_pages = sorted(( [t['scrapes'], dict_to_page(t, 'annotated_body'), t.get('extractors', [])] \ for t in spec['templates'] if t.get('page_type', 'item') == 'item' ), key=lambda pair: pair[0]) # generate ibl extractor for links pages _links_pages = [dict_to_page(t, 'annotated_body') for t in spec['templates'] if t.get('page_type') == 'links'] _links_item_descriptor = create_slybot_item_descriptor({'fields': {}}) self._links_ibl_extractor = InstanceBasedLearningExtractor([(t, _links_item_descriptor) for t in _links_pages]) \ if _links_pages else None self._ipages = [page for _, page, _ in self._item_template_pages] self.start_urls = self.start_urls or spec.get('start_urls') if isinstance(self.start_urls, basestring): self.start_urls = self.start_urls.splitlines() self.html_link_extractor = HtmlLinkExtractor() self.rss_link_extractor = RssLinkExtractor() self.allowed_domains = spec.get('allowed_domains', self._get_allowed_domains(self._ipages)) if not self.allowed_domains: self.allowed_domains = None self.build_url_filter(spec) self.itemcls_info = {} for itemclass_name, triplets in itertools.groupby(self._item_template_pages, operator.itemgetter(0)): page_extractors_pairs = map(operator.itemgetter(1, 2), triplets) schema = item_schemas[itemclass_name] item_cls = get_iblitem_class(schema) page_descriptor_pairs = [] for page, template_extractors in page_extractors_pairs: item_descriptor = create_slybot_item_descriptor(schema) apply_extractors(item_descriptor, template_extractors, all_extractors) page_descriptor_pairs.append((page, item_descriptor)) extractor = InstanceBasedLearningExtractor(page_descriptor_pairs) self.itemcls_info[itemclass_name] = { 'class': item_cls, 'descriptor': item_descriptor, 'extractor': extractor, } self.login_requests = [] self.form_requests = [] for rdata in spec.get("init_requests", []): if rdata["type"] == "login": request = Request(url=rdata.pop("loginurl"), meta=rdata, callback=self.parse_login_page, dont_filter=True) self.login_requests.append(request) elif rdata["type"] == "form": self.generic_form = GenericForm(**kw) self.form_requests.append(self.get_generic_form_start_request(rdata))