def __init__(self, key, iv=None): if isinstance(key, str): # pragma: no cover key = str(key).encode("utf-8") if isinstance(iv, str): # pragma: no cover iv = str(iv).encode("utf-8") self.key = key self.iv = iv or self.rand_bytes(16)
def test_invalid_timeout(self): with pytest.raises(ValueError) as e: requests.get(httpbin("get"), timeout=(3, 4, 5)) assert "(connect, read)" in str(e) with pytest.raises(ValueError) as e: requests.get(httpbin("get"), timeout="foo") assert "must be an int or float" in str(e)
def test_invalid_timeout(self): with pytest.raises(ValueError) as e: requests.get(httpbin('get'), timeout=(3, 4, 5)) assert '(connect, read)' in str(e) with pytest.raises(ValueError) as e: requests.get(httpbin('get'), timeout="foo") assert 'must be an int or float' in str(e)
def line(self): """Return Status-Line""" original = self._orig.raw._original_response return str('HTTP/{version} {status} {reason}'.format( version='.'.join(str(original.version)), status=original.status, reason=original.reason ))
def make_response_message(response): """Make an `HTTPMessage` from `requests.models.Response`.""" encoding = response.encoding or 'ISO-8859-1' original = response.raw._original_response response_headers = response.headers return HTTPMessage( line='HTTP/{version} {status} {reason}'.format( version='.'.join(str(original.version)), status=original.status, reason=original.reason,), headers=str(original.msg), body=response.content.decode(encoding) if response.content else '', content_type=response_headers.get('Content-Type'))
def safe_str(obj): """ return the byte string representation of obj """ try: return str(obj) except UnicodeEncodeError: # obj is unicode return unicode(obj).encode('unicode_escape')
def test_iter_lines(self): lines = (0, 2, 10, 100) for i in lines: r = get(httpbin('stream', str(i)), prefetch=False) lines = list(r.iter_lines()) len_lines = len(lines) self.assertEqual(i, len_lines) # Tests that trailing whitespaces within lines do not get stripped. # Tests that a trailing non-terminated line does not get stripped. quote = ('''Agamemnon \n''' '''\tWhy will he not upon our fair request\r\n''' '''\tUntent his person and share the air with us?''') # Make a request and monkey-patch its contents r = get(httpbin('get'), prefetch=False) r.raw = StringIO(quote) lines = list(r.iter_lines()) len_lines = len(lines) self.assertEqual(len_lines, 3) joined = lines[0] + '\n' + lines[1] + '\r\n' + lines[2] self.assertEqual(joined, quote)
def update_cookies(cookies, resp): if 'SNUID' not in cookies: p = re.compile(r'(?<=SNUID=)\w+') cookies['SNUID'] = p.findall(resp.text)[0] suv = ''.join([str(int(time.time()*1000000) + random.randint(0, 1000))]) cookies['SUV'] = suv return cookies
def test_iter_lines(self): lines = (0, 2, 10, 100) for i in lines: r = get(httpbin('stream', str(i)), prefetch=False) lines = list(r.iter_lines()) len_lines = len(lines) self.assertEqual(i, len_lines) # Test 'dangling' fragment in responses that do not terminate in # a newline. quote = ( '''Why will he not upon our fair request\n''' '''Untent his person and share the air with us?''' ) # Make a request and monkey-patch its contents r = get(httpbin('get')) r.raw = StringIO(quote) # Make sure iter_lines doesn't chop the trailing bit lines = '\n'.join(r.iter_lines()) self.assertEqual(lines, quote)
def test_iter_lines(self): lines = (0, 2, 10, 100) for i in lines: r = get(httpbin('stream', str(i)), prefetch=False) lines = list(r.iter_lines()) len_lines = len(lines) self.assertEqual(i, len_lines) # Tests that trailing whitespaces within lines do not get stripped. # Tests that a trailing non-terminated line does not get stripped. quote = ( '''Agamemnon \n''' '''\tWhy will he not upon our fair request\r\n''' '''\tUntent his person and share the air with us?''' ) # Make a request and monkey-patch its contents r = get(httpbin('get'), prefetch=False) r.raw = StringIO(quote) lines = list(r.iter_lines()) len_lines = len(lines) self.assertEqual(len_lines, 3) joined = lines[0] + '\n' + lines[1] + '\r\n' + lines[2] self.assertEqual(joined, quote)
def pad(text): if not text: # pragma: no cover return text if isinstance(text, str): text = str(text).encode("utf-8") to_pad = 16 - (len(text) % 16) return text + (six.int2byte(to_pad) * to_pad)
def test_urllib3_pool_connection_closed(httpbin): s = requests.Session() s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0)) try: s.get(httpbin('status/200')) except ConnectionError as e: assert u"Pool is closed." in str(e)
def safe_unicode(obj, *args): """ return the unicode representation of obj """ try: return unicode(obj, *args) except UnicodeDecodeError: # obj is byte string ascii_text = str(obj).encode('string_escape') return unicode(ascii_text)
def test_set_environ_raises_exception(): """Tests set_environ will raise exceptions in context when the value parameter is None.""" with pytest.raises(Exception) as exception: with set_environ('test1', None): raise Exception('Expected exception') assert 'Expected exception' in str(exception.value)
def test_unicode_header_name(self): requests.put( httpbin('put'), headers={ str('Content-Type'): 'application/octet-stream' # compat.str is unicode. }, data='\xff' )
def test_http_error(self): error = requests.exceptions.HTTPError() assert not error.response response = requests.Response() error = requests.exceptions.HTTPError(response=response) assert error.response == response error = requests.exceptions.HTTPError("message", response=response) assert str(error) == "message" assert error.response == response
def test_http_error(self): error = requests.exceptions.HTTPError() self.assertEqual(error.response, None) response = requests.Response() error = requests.exceptions.HTTPError(response=response) self.assertEqual(error.response, response) error = requests.exceptions.HTTPError('message', response=response) self.assertEqual(str(error), 'message') self.assertEqual(error.response, response)
def test_http_error(self): error = requests.exceptions.HTTPError() assert not error.response response = requests.Response() error = requests.exceptions.HTTPError(response=response) assert error.response == response error = requests.exceptions.HTTPError('message', response=response) assert str(error) == 'message' assert error.response == response
def _get_response(args): auto_json = args.data and not args.form if args.json or auto_json: # JSON if 'Content-Type' not in args.headers: args.headers['Content-Type'] = TYPE_JSON if 'Accept' not in args.headers: # Default Accept to JSON as well. args.headers['Accept'] = 'application/json' if isinstance(args.data, dict): # If not empty, serialize the data `dict` parsed from arguments. # Otherwise set it to `None` avoid sending "{}". args.data = json.dumps(args.data) if args.data else None elif args.form: # Form if not args.files and 'Content-Type' not in args.headers: # If sending files, `requests` will set # the `Content-Type` for us. args.headers['Content-Type'] = TYPE_FORM # Fire the request. try: credentials = None if args.auth: auth_type = (requests.auth.HTTPDigestAuth if args.auth_type == 'digest' else requests.auth.HTTPBasicAuth) credentials = auth_type(args.auth.key, args.auth.value) return requests.request( method=args.method.lower(), url=args.url if '://' in args.url else 'http://%s' % args.url, headers=args.headers, data=args.data, verify={'yes': True, 'no': False}.get(args.verify, args.verify), timeout=args.timeout, auth=credentials, proxies=dict((p.key, p.value) for p in args.proxy), files=args.files, allow_redirects=args.allow_redirects, params=args.queries, ) except (KeyboardInterrupt, SystemExit): sys.stderr.write('\n') sys.exit(1) except Exception as e: if args.traceback: raise sys.stderr.write(str(e.message) + '\n') sys.exit(1)
def _get_response(parser, args, stdin, stdin_isatty): if not stdin_isatty: if args.data: parser.error('Request body (stdin) and request ' 'data (key=value) cannot be mixed.') args.data = stdin.read() if args.json or (not args.form and args.data): # JSON if not args.files and ('Content-Type' not in args.headers and (args.data or args.json)): args.headers['Content-Type'] = TYPE_JSON if stdin_isatty: # Serialize the parsed data. args.data = json.dumps(args.data) if 'Accept' not in args.headers: # Default Accept to JSON as well. args.headers['Accept'] = 'application/json' elif not args.files and 'Content-Type' not in args.headers: # Form args.headers['Content-Type'] = TYPE_FORM # Fire the request. try: credentials = None if args.auth: auth_type = (requests.auth.HTTPDigestAuth if args.auth_type == 'digest' else requests.auth.HTTPBasicAuth) credentials = auth_type(args.auth.key, args.auth.value) return requests.request( method=args.method.lower(), url=args.url if '://' in args.url else 'http://%s' % args.url, headers=args.headers, data=args.data, verify={ 'yes': True, 'no': False }.get(args.verify, args.verify), timeout=args.timeout, auth=credentials, proxies=dict((p.key, p.value) for p in args.proxy), files=args.files, allow_redirects=args.allow_redirects, ) except (KeyboardInterrupt, SystemExit): sys.stderr.write('\n') sys.exit(1) except Exception as e: if args.traceback: raise sys.stderr.write(str(e.message) + '\n') sys.exit(1)
def _get_response(parser, args, stdin, stdin_isatty): if not stdin_isatty: if args.data: parser.error('Request body (stdin) and request ' 'data (key=value) cannot be mixed.') args.data = stdin.read() if args.json or (not args.form and args.data): # JSON if not args.files and ( 'Content-Type' not in args.headers and (args.data or args.json)): args.headers['Content-Type'] = TYPE_JSON if stdin_isatty: # Serialize the parsed data. args.data = json.dumps(args.data) if 'Accept' not in args.headers: # Default Accept to JSON as well. args.headers['Accept'] = 'application/json' elif not args.files and 'Content-Type' not in args.headers: # Form args.headers['Content-Type'] = TYPE_FORM # Fire the request. try: credentials = None if args.auth: auth_type = (requests.auth.HTTPDigestAuth if args.auth_type == 'digest' else requests.auth.HTTPBasicAuth) credentials = auth_type(args.auth.key, args.auth.value) return requests.request( method=args.method.lower(), url=args.url if '://' in args.url else 'http://%s' % args.url, headers=args.headers, data=args.data, verify={'yes': True, 'no': False}.get(args.verify, args.verify), timeout=args.timeout, auth=credentials, proxies=dict((p.key, p.value) for p in args.proxy), files=args.files, allow_redirects=args.allow_redirects, ) except (KeyboardInterrupt, SystemExit): sys.stderr.write('\n') sys.exit(1) except Exception as e: if args.traceback: raise sys.stderr.write(str(e.message) + '\n') sys.exit(1)
def _encrypt(self, aes, data): if isinstance(data, bytes): return data.decode("utf-8") elif isinstance(data, dict): return self._encrypt_dict(aes, data) elif not isinstance(data, str) and hasattr(data, "__iter__"): return list(map(lambda item: self._encrypt(aes, item), data)) # pragma: no cover return str(data).lower()
def converte_dicionario_unicode_em_ascii(data): if isinstance(data, str): try: return str(data) except: return data elif isinstance(data, collections.Mapping): return dict(list(map(converte_dicionario_unicode_em_ascii, iter(list(data.items()))))) elif isinstance(data, collections.Iterable): return type(data)(list(map(converte_dicionario_unicode_em_ascii, data))) else: return data
def headers(self): headers = dict(self._orig.headers) content_type = headers.get('Content-Type') if isinstance(content_type, bytes): # Happens when uploading files. # TODO: submit a bug report for Requests headers['Content-Type'] = str(content_type) if 'Host' not in headers: headers['Host'] = urlparse(self._orig.url).netloc return '\n'.join('%s: %s' % (name, value) for name, value in headers.items())
def register_device(self): logging.debug('Call register_device') data = { 'client_type': self.client_type, 'device_hash': '4BE7D6F1BD040DE45A371FD831167BC108554111', 'device_name': 'Opera-Browser-Client' } result = self.post('/v4/register_device', data) self.device_id = result['data']['device_id'] logging.debug('Device id: %s' % self.device_id) self.device_id_hash = hashlib.sha1( str(self.device_id).encode('ascii')).hexdigest().upper() self.device_password = result['data']['device_password'] logging.debug('Device registered')
def make_request_message(request): """Make an `HTTPMessage` from `requests.models.Request`.""" url = urlparse(request.url) request_headers = dict(request.headers) if 'Host' not in request_headers: request_headers['Host'] = url.netloc return HTTPMessage( line='{method} {path} HTTP/1.1'.format( method=request.method, path=url.path or '/'), headers=NEW_LINE.join(str('%s: %s') % (name, value) for name, value in request_headers.items()), body=request._enc_data, content_type=request_headers.get('Content-Type') )
def wrapped_function(*args, **kwargs): if automatic_options and request.method == 'OPTIONS': resp = current_app.make_default_options_response() else: resp = make_response(f(*args, **kwargs)) if not attach_to_all and request.method != 'OPTIONS': return resp h = resp.headers h['Access-Control-Allow-Origin'] = origin h['Access-Control-Allow-Methods'] = get_methods() h['Access-Control-Max-Age'] = str(max_age) if headers is not None: h['Access-Control-Allow-Headers'] = headers return resp
def register_subscriber(self): logging.debug('Call register_subscriber') email = '%s@%s.surfeasy.vpn' % (uuid.uuid4(), self.client_type) password = uuid.uuid4() password_hash = hashlib.sha1( str(password).encode('ascii')).hexdigest().upper() logging.debug('Your SurfEasy email: %s' % email) logging.debug('Your SurfEasy password: %s' % password) logging.debug('Your SurfEasy password hash: %s' % password_hash) logging.debug("These are not the credentials you are looking for " "(you won't probably need these, ever)") data = {'email': email, 'password': password_hash} result = self.post('/v4/register_subscriber', data) logging.debug('Subscriber registered') return result
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status. """ if env.is_windows and not env.stdout_isatty: env.stderr.write( 'http: error: Output redirection is not supported on Windows.' ' Please use `--output FILE\' instead.\n') return 1 try: args = parser.parse_args(args=args, env=env) response = get_response(args, env) status = 0 if args.check_status: status = get_exist_status(response.status_code, args.allow_redirects) if status and not env.stdout_isatty: err = 'http error: %s %s\n' % ( response.raw.status, response.raw.reason) env.stderr.write(err) try: # We are writing bytes so we use buffer on Python 3 buffer = env.stdout.buffer except AttributeError: buffer = env.stdout for chunk in output_stream(args, env, response.request, response): buffer.write(chunk) if env.stdout_isatty: env.stdout.flush() except (KeyboardInterrupt, SystemExit): env.stderr.write('\n') return 1 except Exception as e: if '--debug' in args: raise env.stderr.write(str(repr(e) + '\n')) return 1 return status
def headers(self): original = self._orig.raw._original_response status_line = 'HTTP/{version} {status} {reason}'.format( version='.'.join(str(original.version)), status=original.status, reason=original.reason) headers = [status_line] try: # `original.msg` is a `http.client.HTTPMessage` on Python 3 # `_headers` is a 2-tuple headers.extend('%s: %s' % header for header in original.msg._headers) except AttributeError: # and a `httplib.HTTPMessage` on Python 2.x # `headers` is a list of `name: val<CRLF>`. headers.extend(h.strip() for h in original.msg.headers) return '\r\n'.join(headers)
def headers(self): original = self._orig.raw._original_response status_line = 'HTTP/{version} {status} {reason}'.format( version='.'.join(str(original.version)), status=original.status, reason=original.reason ) headers = [status_line] try: # `original.msg` is a `http.client.HTTPMessage` on Python 3 # `_headers` is a 2-tuple headers.extend( '%s: %s' % header for header in original.msg._headers) except AttributeError: # and a `httplib.HTTPMessage` on Python 2.x # `headers` is a list of `name: val<CRLF>`. headers.extend(h.strip() for h in original.msg.headers) return '\r\n'.join(headers)
def cmd_line(args=None, kph=None): parser = argparse.ArgumentParser( prog="keepasshttp", description='Fetch credentials from keepass') parser.add_argument( '-c', '--config', dest='config_path', help= "alternative path for keepasshttp's AES exchange key (default: ~/.python_keepass_http)", ) parser.add_argument( '-u', '--url', default="http://localhost:19455/", help= "alternative url for keepasshttp server (default: 'http://localhost:19455/')", ) parser.add_argument( '-f', '--format', choices=formatters.keys(), default="text", help="output format for credentials", ) parser.add_argument( 'credentials', metavar='credential', nargs='+', help='Url or name to match credentials from keepass database', ) args = parser.parse_args(args) # kph is only set for unittest if not kph: # pragma: no cover kph = KeePassHTTP(storage=args.config_path, url=args.url) credentials = OrderedDict() for credential in args.credentials: credentials[credential] = kph.get(credential) return str(formatters[args.format](credentials))
def _parse_items(self, args): """Parse `args.items` into `args.headers`, `args.data`, `args.`, and `args.files`. """ args.headers = CaseInsensitiveDict() args.data = ParamDict() if args.form else OrderedDict() args.files = OrderedDict() args.params = ParamDict() try: parse_items(items=args.items, headers=args.headers, data=args.data, files=args.files, params=args.params) except ParseError as e: if args.traceback: raise self.error(str(e)) if args.files and not args.form: # `http url @/path/to/file` file_fields = list(args.files.keys()) if file_fields != ['']: self.error( 'Invalid file fields (perhaps you meant --form?): %s' % ','.join(file_fields)) fn, fd = args.files[''] args.files = {} self._body_from_file(args, fd) if 'Content-Type' not in args.headers: mime, encoding = mimetypes.guess_type(fn, strict=False) if mime: content_type = mime if encoding: content_type = '%s; charset=%s' % (mime, encoding) args.headers['Content-Type'] = content_type
def line(self): """Return Request-Line""" url = urlparse(self._orig.url) # Querystring qs = '' if url.query or self._orig.params: qs = '?' if url.query: qs += url.query # Requests doesn't make params part of ``request.url``. if self._orig.params: if url.query: qs += '&' #noinspection PyUnresolvedReferences qs += type(self._orig)._encode_params(self._orig.params) # Request-Line return str('{method} {path}{query} HTTP/1.1'.format( method=self._orig.method, path=url.path or '/', query=qs ))
def test_iter_lines(self): lines = (0, 2, 10, 100) for i in lines: r = get(httpbin('stream', str(i)), prefetch=False) lines = list(r.iter_lines()) len_lines = len(lines) self.assertEqual(i, len_lines) # Test 'dangling' fragment in responses that do not terminate in # a newline. quote = ('''Why will he not upon our fair request\n''' '''Untent his person and share the air with us?''') # Make a request and monkey-patch its contents r = get(httpbin('get')) r.raw = StringIO(quote) # Make sure iter_lines doesn't chop the trailing bit lines = '\n'.join(r.iter_lines()) self.assertEqual(lines, quote)
def headers(self): return str(self._orig.raw._original_response.msg)
def test_invalid_timeout(self, httpbin, timeout, error_text): with pytest.raises(ValueError) as e: requests.get(httpbin('get'), timeout=timeout) assert error_text in str(e)
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status. """ def error(msg, *args): msg = msg % args env.stderr.write('\nhttp: error: %s\n' % msg) debug = '--debug' in args traceback = debug or '--traceback' in args status = EXIT.OK if debug: sys.stderr.write('HTTPie version: %s\n' % httpie_version) sys.stderr.write('Requests version: %s\n' % requests_version) sys.stderr.write('Pygments version: %s\n' % pygments_version) try: args = parser.parse_args(args=args, env=env) kwargs = get_requests_kwargs(args) if args.debug: sys.stderr.write( '\n>>> requests.request(%s)\n\n' % pformat(kwargs)) response = requests.request(**kwargs) if args.check_status: status = get_exist_status(response.status_code, args.allow_redirects) if status and not env.stdout_isatty: error('%s %s', response.raw.status, response.raw.reason) stream = output_stream(args, env, response.request, response) try: write(stream=stream, outfile=env.stdout, flush=env.stdout_isatty or args.stream) except IOError as e: if not traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write('\n') else: raise except (KeyboardInterrupt, SystemExit): if traceback: raise env.stderr.write('\n') status = EXIT.ERROR except requests.Timeout: status = EXIT.ERROR_TIMEOUT error('Request timed out (%ss).', args.timeout) except Exception as e: # TODO: distinguish between expected and unexpected errors. # network errors vs. bugs, etc. if traceback: raise error('%s: %s', type(e).__name__, str(e)) status = EXIT.ERROR return status
def test_unicode_header_name(self): requests.put( httpbin("put"), headers={str("Content-Type"): "application/octet-stream"}, data="\xff" ) # compat.str is unicode.
def test_unicode_header_name(self): requests.put(httpbin('put'), headers={str('Content-Type'): 'application/octet-stream'}, data='\xff') # compat.str is unicode.
def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. #: https://github.com/requests/requests/pull/2238 if isinstance(url, bytes): url = url.decode('utf8') else: url = str(url) # Remove leading whitespaces from url url = url.lstrip() need_quote = True if url.startswith(key_unquote): need_quote = False url = url.replace(key_unquote, "") # Don't do any URL preparation for non-HTTP schemes like `mailto`, # `data` etc to work around exceptions from `url_parse`, which # handles RFC 3986 only. if ':' in url and not url.lower().startswith('http'): self.url = url return # Support for unicode domain names and paths. try: scheme, auth, host, port, path, query, fragment = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if not scheme: error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") error = error.format(to_native_string(url, 'utf8')) raise MissingSchema(error) if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) # In general, we want to try IDNA encoding the hostname if the string contains # non-ASCII characters. This allows users to automatically get the correct IDNA # behaviour. For strings containing only ASCII characters, we need to also verify # it doesn't start with a wildcard (*), before allowing the unencoded hostname. if not unicode_is_ascii(host): try: host = self._get_idna_encoded_host(host) except UnicodeError: raise InvalidURL('URL has an invalid label.') elif host.startswith(u'*'): raise InvalidURL('URL has an invalid label.') # Carefully reconstruct the network location netloc = auth or '' if netloc: netloc += '@' netloc += host if port: netloc += ':' + str(port) # Bare domains aren't valid URLs. if not path: path = '/' if isinstance(params, (str, bytes)): params = to_native_string(params) enc_params = self._encode_params(params) if enc_params: if query: query = '%s&%s' % (query, enc_params) else: query = enc_params if need_quote: url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) else: url = urlunparse([scheme, netloc, path, None, query, fragment]) self.url = url
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status. """ if env.config.default_options: args = env.config.default_options + args def error(msg, *args): msg = msg % args env.stderr.write("\nhttp: error: %s\n" % msg) debug = "--debug" in args traceback = debug or "--traceback" in args status = exit.OK if debug: print_debug_info(env) if args == ["--debug"]: sys.exit(exit.OK) try: args = parser.parse_args(args=args, env=env) response = get_response(args, config_dir=env.config.directory) if args.check_status: status = get_exist_status(response.status_code, args.follow) if status and not env.stdout_isatty: error("%s %s", response.raw.status, response.raw.reason) stream = output_stream(args, env, response.request, response) write_kwargs = {"stream": stream, "outfile": env.stdout, "flush": env.stdout_isatty or args.stream} try: if env.is_windows and is_py3 and "colors" in args.prettify: write_with_colors_win_p3k(**write_kwargs) else: write(**write_kwargs) except IOError as e: if not traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write("\n") else: raise except (KeyboardInterrupt, SystemExit): if traceback: raise env.stderr.write("\n") status = exit.ERROR except requests.Timeout: status = exit.ERROR_TIMEOUT error("Request timed out (%ss).", args.timeout) except Exception as e: # TODO: distinguish between expected and unexpected errors. # network errors vs. bugs, etc. if traceback: raise error("%s: %s", type(e).__name__, str(e)) status = exit.ERROR return status
def build_digest_header(self, method, url): """ :rtype: str """ realm = self._thread_local.chal['realm'] nonce = self._thread_local.chal['nonce'] qop = self._thread_local.chal.get('qop') algorithm = self._thread_local.chal.get('algorithm') opaque = self._thread_local.chal.get('opaque') hash_utf8 = None if algorithm is None: _algorithm = 'MD5' else: _algorithm = algorithm.upper() # lambdas assume digest modules are imported at the top level if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': def md5_utf8(x): if isinstance(x, str): x = x.encode('utf-8') return hashlib.md5(x).hexdigest() hash_utf8 = md5_utf8 elif _algorithm == 'SHA': def sha_utf8(x): if isinstance(x, str): x = x.encode('utf-8') return hashlib.sha1(x).hexdigest() hash_utf8 = sha_utf8 elif _algorithm == 'SHA-256': def sha256_utf8(x): if isinstance(x, str): x = x.encode('utf-8') return hashlib.sha256(x).hexdigest() hash_utf8 = sha256_utf8 KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) if hash_utf8 is None: return None # XXX not implemented yet entdig = None p_parsed = urlparse(url) #: path is request-uri defined in RFC 2616 which should not be empty path = p_parsed.path or "/" if p_parsed.query: path += '?' + p_parsed.query A1 = '%s:%s:%s' % (self.username, realm, self.password) A2 = '%s:%s' % (method, path) HA1 = hash_utf8(A1) HA2 = hash_utf8(A2) if nonce == self._thread_local.last_nonce: self._thread_local.nonce_count += 1 else: self._thread_local.nonce_count = 1 ncvalue = '%08x' % self._thread_local.nonce_count s = str(self._thread_local.nonce_count).encode('utf-8') s += nonce.encode('utf-8') s += time.ctime().encode('utf-8') s += os.urandom(8) cnonce = (hashlib.sha1(s).hexdigest()[:16]) if _algorithm == 'MD5-SESS': HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) if not qop: respdig = KD(HA1, "%s:%s" % (nonce, HA2)) elif qop == 'auth' or 'auth' in qop.split(','): noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, 'auth', HA2) respdig = KD(HA1, noncebit) else: # XXX handle auth-int. return None self._thread_local.last_nonce = nonce # XXX should the partial digests be encoded too? base = 'username="******", realm="%s", nonce="%s", uri="%s", ' \ 'response="%s"' % (self.username, realm, nonce, path, respdig) if opaque: base += ', opaque="%s"' % opaque if algorithm: base += ', algorithm="%s"' % algorithm if entdig: base += ', digest="%s"' % entdig if qop: base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) return 'Digest %s' % (base)
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status. """ if env.config.default_options: args = env.config.default_options + args def error(msg, *args): msg = msg % args env.stderr.write('\nhttp: error: %s\n' % msg) debug = '--debug' in args traceback = debug or '--traceback' in args status = exit.OK if debug: print_debug_info(env) if args == ['--debug']: sys.exit(exit.OK) try: args = parser.parse_args(args=args, env=env) response = get_response(args, config_dir=env.config.directory) if args.check_status: status = get_exist_status(response.status_code, args.follow) if status and not env.stdout_isatty: error('%s %s', response.raw.status, response.raw.reason) stream = output_stream(args, env, response.request, response) write_kwargs = { 'stream': stream, 'outfile': env.stdout, 'flush': env.stdout_isatty or args.stream } try: if env.is_windows and is_py3 and 'colors' in args.prettify: write_with_colors_win_p3k(**write_kwargs) else: write(**write_kwargs) if env.stdout_isatty: from subprocess import Popen, PIPE Popen(env.pager, shell=True, stdin=PIPE).communicate(input=env.stdout.getvalue()) except IOError as e: if not traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write('\n') else: raise except (KeyboardInterrupt, SystemExit): if traceback: raise env.stderr.write('\n') status = exit.ERROR except requests.Timeout: status = exit.ERROR_TIMEOUT error('Request timed out (%ss).', args.timeout) except Exception as e: # TODO: distinguish between expected and unexpected errors. # network errors vs. bugs, etc. if traceback: raise error('%s: %s', type(e).__name__, str(e)) status = exit.ERROR return status
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status code. """ if env.config.default_options: args = env.config.default_options + args def error(msg, *args, **kwargs): msg = msg % args level = kwargs.get('level', 'error') env.stderr.write('http: %s: %s\n' % (level, msg)) debug = '--debug' in args traceback = debug or '--traceback' in args exit_status = ExitStatus.OK if debug: print_debug_info(env) if args == ['--debug']: return exit_status try: args = parser.parse_args(args=args, env=env) response = get_response(args, config_dir=env.config.directory) if args.check_status: exit_status = get_exit_status(response.status_code, args.follow) if not env.stdout_isatty and exit_status != ExitStatus.OK: error('HTTP %s %s', response.raw.status, response.raw.reason, level='warning') write_kwargs = { 'stream': build_output_stream(args, env, response.request, response), 'outfile': env.stdout, 'flush': env.stdout_isatty or args.stream } try: if env.is_windows and is_py3 and 'colors' in args.prettify: write_with_colors_win_p3k(**write_kwargs) else: write(**write_kwargs) except IOError as e: if not traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write('\n') else: raise except (KeyboardInterrupt, SystemExit): if traceback: raise env.stderr.write('\n') exit_status = ExitStatus.ERROR except requests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT error('Request timed out (%ss).', args.timeout) except Exception as e: # TODO: Better distinction between expected and unexpected errors. # Network errors vs. bugs, etc. if traceback: raise error('%s: %s', type(e).__name__, str(e)) exit_status = ExitStatus.ERROR return exit_status
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status. """ if env.config.default_options: args = env.config.default_options + args def error(msg, *args): msg = msg % args env.stderr.write('\nhttp: error: %s\n' % msg) debug = '--debug' in args traceback = debug or '--traceback' in args status = exit.OK if debug: print_debug_info(env) if args == ['--debug']: sys.exit(exit.OK) try: args = parser.parse_args(args=args, env=env) response = get_response(args, config_dir=env.config.directory) if args.check_status: status = get_exist_status(response.status_code, args.follow) if status and not env.stdout_isatty: error('%s %s', response.raw.status, response.raw.reason) stream = output_stream(args, env, response.request, response) write_kwargs = { 'stream': stream, 'outfile': env.stdout, 'flush': env.stdout_isatty or args.stream } try: if env.is_windows and is_py3 and 'colors' in args.prettify: write_with_colors_win_p3k(**write_kwargs) else: write(**write_kwargs) except IOError as e: if not traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write('\n') else: raise except (KeyboardInterrupt, SystemExit): if traceback: raise env.stderr.write('\n') status = exit.ERROR except requests.Timeout: status = exit.ERROR_TIMEOUT error('Request timed out (%ss).', args.timeout) except Exception as e: # TODO: distinguish between expected and unexpected errors. # network errors vs. bugs, etc. if traceback: raise error('%s: %s', type(e).__name__, str(e)) status = exit.ERROR return status
import sys import json import pydoc try: from collections import OrderedDict except ImportError: OrderedDict = dict import requests from requests.compat import urlparse, str from requests.structures import CaseInsensitiveDict from . import cli from . import pretty from . import __version__ as version NEW_LINE = str('\n') DEFAULT_UA = 'HTTPie/%s' % version TYPE_FORM = 'application/x-www-form-urlencoded; charset=utf-8' TYPE_JSON = 'application/json; charset=utf-8' class HTTPMessage(object): def __init__(self, line, headers, body, content_type=None): # {Request,Status}-Line self.line = line self.headers = headers self.body = body self.content_type = content_type
def main(args=None, stdin=sys.stdin, stdin_isatty=sys.stdin.isatty(), stdout=sys.stdout, stdout_isatty=sys.stdout.isatty()): parser = cli.parser args = parser.parse_args(args if args is not None else sys.argv[1:]) do_prettify = (args.prettify is True or (args.prettify == cli.PRETTIFY_STDOUT_TTY_ONLY and stdout_isatty)) # Parse request headers and data from the command line. headers = CaseInsensitiveDict() headers['User-Agent'] = DEFAULT_UA data = OrderedDict() files = OrderedDict() try: cli.parse_items(items=args.items, headers=headers, data=data, files=files) except cli.ParseError as e: if args.traceback: raise parser.error(e.message) if files and not args.form: # We could just switch to --form automatically here, # but I think it's better to make it explicit. parser.error( ' You need to set the --form / -f flag to' ' to issue a multipart request. File fields: %s' % ','.join(files.keys())) if not stdin_isatty: if data: parser.error('Request body (stdin) and request ' 'data (key=value) cannot be mixed.') data = stdin.read() # JSON/Form content type. if args.json or (not args.form and data): if stdin_isatty: data = json.dumps(data) if not files and ('Content-Type' not in headers and (data or args.json)): headers['Content-Type'] = TYPE_JSON elif not files and 'Content-Type' not in headers: headers['Content-Type'] = TYPE_FORM # Fire the request. try: response = requests.request( method=args.method.lower(), url=args.url if '://' in args.url else 'http://%s' % args.url, headers=headers, data=data, verify=True if args.verify == 'yes' else args.verify, timeout=args.timeout, auth=(args.auth.key, args.auth.value) if args.auth else None, proxies=dict((p.key, p.value) for p in args.proxy), files=files, allow_redirects=args.allow_redirects, ) except (KeyboardInterrupt, SystemExit): sys.stderr.write(NEW_LINE) sys.exit(1) except Exception as e: if args.traceback: raise sys.stderr.write(str(e.message) + NEW_LINE) sys.exit(1) prettifier = pretty.PrettyHttp(args.style) if do_prettify else None output_request = (cli.OUT_REQUEST_HEADERS in args.output_options or cli.OUT_REQUEST_BODY in args.output_options) output_response = (cli.OUT_RESPONSE_HEADERS in args.output_options or cli.OUT_RESPONSE_BODY in args.output_options) buf = list() if output_request: buf.append(format_http_message( message=make_request_message(response.request), prettifier=prettifier, with_headers=cli.OUT_REQUEST_HEADERS in args.output_options, with_body=cli.OUT_REQUEST_BODY in args.output_options )) if output_response: buf.append(NEW_LINE) if output_response: buf.append(format_http_message( message=make_response_message(response), prettifier=prettifier, with_headers=cli.OUT_RESPONSE_HEADERS in args.output_options, with_body=cli.OUT_RESPONSE_BODY in args.output_options ) + NEW_LINE) if args.no_pager or stdout != sys.stdout: # don't want to use pager or output is redirected stdout.write(''.join(buf)) else: pager(''.join(buf), stdout)
def escape(s): return "\"{0}\"".format(str(s).replace("\"", "\\\""))