def _request_from_dict(cls, d, spider=None, method=None, properties=None): """Create Request object from a dict. If a spider is given, it will try to resolve the callbacks looking at the spider for methods with the same name. """ cb = d.get("callback", None) if cb and spider: cb = _get_method(spider, cb) eb = d.get("errback", None) if eb and spider: eb = _get_method(spider, eb) request_cls = load_object(d.get("_class", None)) if "_class" in d else Request return request_cls( url=to_unicode(d.get("url", None)), callback=cb, errback=eb, method=d.get("method", None), headers=d.get("headers", None), body=d.get("body", None), cookies=d.get("cookies", None), meta=d.get("meta", None), encoding=d.get("_encoding", None), priority=d.get("priority", 0), dont_filter=d.get("dont_filter", True), flags=d.get("flags", None), )
def simple_request_from_dict(d, spider=None): """Create Request object from a dict. If a spider is given, it will try to resolve the callbacks looking at the spider for methods with the same name. """ cb = d['callback'] if cb and spider: cb = _get_method(spider, cb) eb = d['errback'] if eb and spider: eb = _get_method(spider, eb) request_cls = load_object(d['_class']) if '_class' in d else Request return request_cls( url=to_native_str(d['url']), callback=cb, errback=eb, # method=d['method'], # headers=d['headers'], # body=d['body'], # cookies=d['cookies'], meta=d['meta'], # encoding=d['_encoding'], # priority=d['priority'], # dont_filter=d['dont_filter'], # flags=d.get('flags'), # cb_kwargs=d.get('cb_kwargs'), )
def _request_from_dict(d, spider=None): """对 scrapy.utils.reqser.request_from_dict 方法的重写, 使其能处理formdata的post请求""" cb = d.setdefault('callback', 'parse') if cb and spider: cb = _get_method(spider, cb) eb = d.setdefault('errback', None) if eb and spider: eb = _get_method(spider, eb) kwargs = dict(url=to_unicode(d['url']), callback=cb, errback=eb, method=d.setdefault('method', 'GET'), headers=d.setdefault('headers', None), body=d.setdefault('body', None), cookies=d.setdefault('cookies', None), meta=d.setdefault('meta', None), encoding=d.setdefault('_encoding', 'utf-8'), priority=d.setdefault('priority', 0), dont_filter=d.setdefault('dont_filter', False), flags=d.setdefault('flags', None)) if 'formdata' in d: d['_class'] = 'scrapy.FormRequest' kwargs['method'] = 'POST' kwargs['formdata'] = { key: _to_str(d['formdata'][key]) for key in d['formdata'] } request_cls = load_object(d['_class']) if '_class' in d else Request return request_cls(**kwargs)
def get_config_requests(test_dir, spider, max_fixtures): curr_fixture_count = get_num_fixtures(test_dir) config = get_cb_settings(test_dir) try: requests_to_add = config.REQUESTS_TO_ADD except AttributeError: return [] defaults = { 'method': 'GET', 'headers': None, 'body': None, 'cookies': None, 'meta': None, '_encoding': 'utf-8', 'priority': 0, 'dont_filter': False, 'errback': None, 'flags': None, 'cb_kwargs': None } complete_requests = [] for req in requests_to_add: if curr_fixture_count < max_fixtures: for key, val in defaults.items(): req[key] = req.get(key, val) req['callback'] = _get_method(spider, test_dir.split('/')[-1]) req['meta']['_update'] = 1 req['meta']['_fixture'] = curr_fixture_count + 1 complete_requests.append(req) curr_fixture_count += 1 else: break complete_requests = [request_from_dict(req) for req in complete_requests] return complete_requests