def test_settings(self): def test(): r = requests.get(httpbin('')) r.raise_for_status() with requests.settings(timeout=0.0000001): self.assertRaises(requests.Timeout, test) with requests.settings(timeout=100): requests.get(httpbin(''))
def open(url, **kwargs): """ Open a remote table. """ # Possibly transform the URL. url = apply_transforms(url) # Figure out which parser to use. parser = kwargs.pop('parser', None) if callable(parser): pass elif parser is None: if 'filename' in kwargs: parser = parsers.guess_parser(kwargs['filename']) else: parser = parsers.guess_parser(url) else: parser = parsers.get_parser(parser) # Pull all the kwargs that might apply to the request out of kwargs so that # we can pass them onto `requests.request()`. To avoid clashes with parser # kwargs these are named `request_*` in the kwargs to open(). That is, # `request_method` translates to the `method` argument to # `requests.request()`. # # We'll pop the kwargs out of the dict here so that the remaining kwargs can # be passed safely to the parser. request_kwargs = {} for k in inspect.getargspec(requests.request).args: if ('request_%s' % k) in kwargs: request_kwargs[k] = kwargs.pop('request_%s' % k) # The two requirement arguments to request. request_kwargs['url'] = url request_kwargs.setdefault('method', 'get') # A couple of other defaults that more closely match my expectations. request_kwargs.setdefault('allow_redirects', True) # Save `select` and `omit` arguments for later. select_func = kwargs.pop('select', None) omit_func = kwargs.pop('omit', None) # Grab the data. with requests.settings(accept_gzip=False): response = requests.request(**request_kwargs) # If we got a `filename` argument then assume this is a zipfile. # XXX: would be nice to support gzip, etc. if 'filename' in kwargs: zf = zipfile.ZipFile(StringIO(response.content)) stream = zf.open(kwargs.pop('filename')) else: stream = response.raw # Now parse the stream. results = parser(stream, **kwargs).read() # Process select/omit. if select_func: results = itertools.ifilter(select_func, results) if omit_func: results = itertools.ifilter(lambda item: not omit_func(item), results) return list(results)
import cookielib import re import requests requests.settings(timeout=5.0) def dash(url, tenant='admin', user='******', password='******'): crsf_regex = re.compile("name='csrfmiddlewaretoken' value='([^']*)'") login_regex = re.compile("auth") error_regex = re.compile("Error") jar = cookielib.CookieJar() r = requests.get(url+'/auth/login/', cookies=jar) assert r.status_code == 200, 'unable to access login page' assert not re.match(error_regex, r.content), 'error displayed on login page' match = re.search(crsf_regex, r.content) assert match, 'Unable to find CRSF token' auth = {'csrfmiddlewaretoken': match.groups(1)[0], 'method': 'Login', 'username': user, 'password': password} r = requests.post(url+'/auth/login/', data=auth, cookies=jar) assert r.status_code == 200, 'fail to send auth credentials' assert not re.search(error_regex, r.content), 'error displayed on auth' r = requests.get(url+'/dash/', cookies=jar) assert r.status_code == 200, 'fail to access user dash' assert not re.search(login_regex, r.url), 'user dash fail (redirected to login)'
def test_settings(self): with requests.settings(timeout=0.0001): self.assertRaises(requests.Timeout, requests.get, 'http://google.com') with requests.settings(timeout=10): requests.get('http://google.com')