def check_updates(env: Environment) -> None: if env.config.get('disable_update_warnings'): return None file = env.config.version_info_file update_status = _get_update_status(env) if not update_status: return None # If the user quickly spawns multiple httpie processes # we don't want to end in a race. with open_with_lockfile(file) as stream: version_info = json.load(stream) # We don't want to spam the user with too many warnings, # so we'll only warn every once a while (WARN_INTERNAL). current_date = datetime.now() last_warned_date = version_info['last_warned_date'] if last_warned_date is not None: earliest_warn_date = ( datetime.fromisoformat(last_warned_date) + WARN_INTERVAL ) if current_date < earliest_warn_date: return None env.log_error(update_status, level=LogLevel.INFO) version_info['last_warned_date'] = current_date.isoformat() with open_with_lockfile(file, 'w') as stream: json.dump(version_info, stream)
def upgrade_session(env: Environment, args: argparse.Namespace, hostname: str, session_name: str): session = get_httpie_session(env=env, config_dir=env.config.directory, session_name=session_name, host=hostname, url=hostname, refactor_mode=True) session_name = session.path.stem if session.is_new(): env.log_error(f'{session_name!r} @ {hostname!r} does not exist.') return ExitStatus.ERROR fixers = [ fixer for version, fixer in FIXERS_TO_VERSIONS.items() if is_version_greater(version, session.version) ] if len(fixers) == 0: env.stdout.write( f'{session_name!r} @ {hostname!r} is already up to date.\n') return ExitStatus.SUCCESS for fixer in fixers: fixer(session, hostname, args) session.save(bump_version=True) env.stdout.write( f'Upgraded {session_name!r} @ {hostname!r} to v{session.version}\n') return ExitStatus.SUCCESS
def __init__(self, jwt_token, jwt_secret): self.jwt_token = jwt_token if jwt_secret == '': self.jwt_secret = None self.jwt_verify = False else: self.jwt_secret = jwt_secret self.jwt_verify = True config = Environment().config self.jwt_cookie = config.get('jwt_cookie_name', 'access_token') self.jwt_xsrf = config.get('jwt_xsrf_header', 'X-XSRF-TOKEN')
def visit_immutation(self, node, children): context = self._final_context() child_type = children[0].expr_name if child_type == 'preview': if self.tool == 'httpie': command = ['http'] + context.httpie_args(self.method, quote=True) else: assert self.tool == 'curl' command = ['curl'] + context.curl_args(self.method, quote=True) click.echo(' '.join(command)) elif child_type == 'action': output = BytesIO() try: env = Environment(stdout=output, is_windows=False) httpie_main(context.httpie_args(self.method), env=env) content = output.getvalue() finally: output.close() # XXX: Work around a bug of click.echo_via_pager(). When you pass # a bytestring to echo_via_pager(), it converts the bytestring with # str(b'abc'), which makes it "b'abc'". if six.PY2: content = unicode(content, 'utf-8') # noqa else: content = str(content, 'utf-8') click.echo_via_pager(content) return node
def __init__(self, env=Environment(), **kwargs): super().__init__(**kwargs) if env.stdout_isatty: # Use the encoding supported by the terminal. output_encoding = env.stdout_encoding else: # Preserve the message encoding. output_encoding = self.msg.encoding # Default to utf8 when unsure. self.output_encoding = output_encoding or 'utf8'
def _call_httpie_main(self): context = self._final_context() args = extract_args_for_httpie_main(context, self.method) env = Environment(stdout=self.output, stdin=sys.stdin, is_windows=False) env.stdout_isatty = self.output.isatty() env.stdin_isatty = sys.stdin.isatty() # XXX: httpie_main() doesn't provide an API for us to get the # HTTP response object, so we use this super dirty hack - # sys.settrace() to intercept get_response() that is called in # httpie_main() internally. The HTTP response intercepted is # assigned to self.last_response, which self.listener may be # interested in. sys.settrace(self._trace_get_response) try: httpie_main(args, env=env) finally: sys.settrace(None)
def __init__(self, env=Environment(), **kwargs): """ :param env: an class:`Environment` instance :param kwargs: additional keyword argument that some processor might require. """ self.enabled = True self.env = env self.kwargs = kwargs
def __init__(self, groups, env=Environment(), **kwargs): """ :param groups: names of processor groups to be applied :param env: Environment :param kwargs: additional keyword arguments for processors """ self.enabled = [] for group in groups: for cls in self.available[group]: p = cls(env, **kwargs) if p.enabled: self.enabled.append(p)
def __init__(self, groups: List[str], env=Environment(), **kwargs): """ :param groups: names of processor groups to be applied :param env: Environment :param kwargs: additional keyword arguments for processors """ available_plugins = plugin_manager.get_formatters_grouped() self.enabled_plugins = [] for group in groups: for cls in available_plugins[group]: p = cls(env=env, **kwargs) if p.enabled: self.enabled_plugins.append(p)
def main(args: List[Union[str, bytes]] = sys.argv, env: Environment = Environment()) -> ExitStatus: from httpie.core import raw_main try: return raw_main( parser=parser, main_program=main_program, args=args, env=env ) except argparse.ArgumentError: program_args = args[1:] if is_http_command(program_args, env): env.stderr.write(MSG_COMMAND_CONFUSION.format(args=' '.join(program_args)) + "\n") return ExitStatus.ERROR
def is_http_command(args: List[Union[str, bytes]], env: Environment) -> bool: """Check whether http/https parser can parse the arguments.""" from httpie.cli.definition import parser as http_parser from httpie.manager.cli import COMMANDS # If the user already selected a top-level sub-command, never # show the http/https version. E.g httpie plugins pie.dev/post if len(args) >= 1 and args[0] in COMMANDS: return False with env.as_silent(): try: http_parser.parse_args(env=env, args=args) except (Exception, SystemExit): return False else: return True
def make_app(): """Make a WSGI app that has all the HTTPie pieces baked in.""" env = Environment() args = parser.parse_args(args=['/'], env=env) args.output_options = 'HB' # Output only requests. server = 'HTTPony/{0}'.format(__version__) def application(environ, start_response): # The WSGI server puts content length and type in the environment # even when not provided with the request. Drop them if they are empty. if environ.get('CONTENT_LENGTH') == '': del environ['CONTENT_LENGTH'] if environ.get('CONTENT_TYPE') == '': del environ['CONTENT_TYPE'] wrequest = WerkzeugRequest(environ) data = wrequest.get_data() request = Request( method=wrequest.method, url=wrequest.url, headers=wrequest.headers, data=data, ) prepared = request.prepare() stream = streams.build_output_stream( args, env, prepared, response=None, output_options=args.output_options) streams.write_stream(stream, env.stdout, env.stdout_isatty) # When there is data in the request, give the next one breathing room. if data: print("\n", file=env.stdout) # Make dreams come true. response = Response(headers={'Server': server}) return response(environ, start_response) return application
def make_app(): """Make a WSGI app that has all the HTTPie pieces baked in.""" env = Environment() # STDIN is ignored because HTTPony runs a server that doesn't care. # Additionally, it is needed or else pytest blows up. args = parser.parse_args(args=["/", "--ignore-stdin"], env=env) args.output_options = "HB" # Output only requests. server = "HTTPony/{0}".format(__version__) def application(environ, start_response): # The WSGI server puts content length and type in the environment # even when not provided with the request. Drop them if they are empty. if environ.get("CONTENT_LENGTH") == "": del environ["CONTENT_LENGTH"] if environ.get("CONTENT_TYPE") == "": del environ["CONTENT_TYPE"] wrequest = WerkzeugRequest(environ) data = wrequest.get_data() request = Request( method=wrequest.method, url=wrequest.url, headers=wrequest.headers, data=data, ) prepared = request.prepare() stream = streams.build_output_stream( args, env, prepared, response=None, output_options=args.output_options ) streams.write_stream(stream, env.stdout, env.stdout_isatty) # When there is data in the request, give the next one breathing room. if data: print("\n", file=env.stdout) # Make dreams come true. response = Response(headers={"Server": server}) return response(environ, start_response) return application
def test_windows_colorized_output(self, httpbin): # Spits out the colorized output. http(httpbin.url + '/get', env=Environment())
def main( args: List[Union[str, bytes]] = sys.argv, env=Environment(), ) -> ExitStatus: """ The main function. Pre-process args, handle some special types of invocations, and run the main program with error handling. Return exit status code. """ program_name, *args = args env.program_name = os.path.basename(program_name) args = decode_raw_args(args, env.stdin_encoding) plugin_manager.load_installed_plugins() from httpie.cli.definition import parser if env.config.default_options: args = env.config.default_options + args include_debug_info = '--debug' in args include_traceback = include_debug_info or '--traceback' in args if include_debug_info: print_debug_info(env) if args == ['--debug']: return ExitStatus.SUCCESS exit_status = ExitStatus.SUCCESS try: parsed_args = parser.parse_args( args=args, env=env, ) except KeyboardInterrupt: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR_CTRL_C except SystemExit as e: if e.code != ExitStatus.SUCCESS: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR else: try: exit_status = program( args=parsed_args, env=env, ) except KeyboardInterrupt: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR_CTRL_C except SystemExit as e: if e.code != ExitStatus.SUCCESS: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except requests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT env.log_error(f'Request timed out ({parsed_args.timeout}s).') except requests.TooManyRedirects: exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS env.log_error(f'Too many redirects' f' (--max-redirects={parsed_args.max_redirects}).') except Exception as e: # TODO: Further distinction between expected and unexpected errors. msg = str(e) if hasattr(e, 'request'): request = e.request if hasattr(request, 'url'): msg = (f'{msg} while doing a {request.method}' f' request to URL: {request.url}') env.log_error(f'{type(e).__name__}: {msg}') if include_traceback: raise exit_status = ExitStatus.ERROR return exit_status
def program( args: argparse.Namespace, env: Environment, ) -> ExitStatus: """ The main program without error handling. """ exit_status = ExitStatus.SUCCESS downloader = None try: if args.download: args.follow = True # --download implies --follow. downloader = Downloader(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume) downloader.pre_request(args.headers) needs_separator = False def maybe_separate(): nonlocal needs_separator if env.stdout_isatty and needs_separator: needs_separator = False getattr(env.stdout, 'buffer', env.stdout).write(b'\n\n') initial_request: Optional[requests.PreparedRequest] = None final_response: Optional[requests.Response] = None def request_body_read_callback(chunk: bytes): should_pipe_to_stdout = ( # Request body output desired OUT_REQ_BODY in args.output_options # & not `.read()` already pre-request (e.g., for compression) and initial_request # & non-EOF chunk and chunk) if should_pipe_to_stdout: msg = requests.PreparedRequest() msg.is_body_upload_chunk = True msg.body = chunk msg.headers = initial_request.headers write_message(requests_message=msg, env=env, args=args, with_body=True, with_headers=False) messages = collect_messages( args=args, config_dir=env.config.directory, request_body_read_callback=request_body_read_callback) for message in messages: maybe_separate() is_request = isinstance(message, requests.PreparedRequest) with_headers, with_body = get_output_options(args=args, message=message) if is_request: if not initial_request: initial_request = message is_streamed_upload = not isinstance( message.body, (str, bytes)) if with_body: with_body = not is_streamed_upload needs_separator = is_streamed_upload else: final_response = message if args.check_status or downloader: exit_status = http_status_to_exit_status( http_status=message.status_code, follow=args.follow) if (not env.stdout_isatty and exit_status != ExitStatus.SUCCESS): env.log_error( f'HTTP {message.raw.status} {message.raw.reason}', level='warning') write_message( requests_message=message, env=env, args=args, with_headers=with_headers, with_body=with_body, ) maybe_separate() if downloader and exit_status == ExitStatus.SUCCESS: # Last response body download. download_stream, download_to = downloader.start( initial_url=initial_request.url, final_response=final_response, ) write_stream( stream=download_stream, outfile=download_to, flush=False, ) downloader.finish() if downloader.interrupted: exit_status = ExitStatus.ERROR env.log_error('Incomplete download: size=%d; downloaded=%d' % (downloader.status.total_size, downloader.status.downloaded)) return exit_status finally: if downloader and not downloader.finished: downloader.failed() if (not isinstance(args, list) and args.output_file and args.output_file_specified): args.output_file.close()
def program( args: argparse.Namespace, env: Environment, ) -> ExitStatus: """ The main program without error handling. """ exit_status = ExitStatus.SUCCESS downloader = None try: if args.download: args.follow = True # --download implies --follow. downloader = Downloader(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume) downloader.pre_request(args.headers) initial_request = None final_response = None for message in collect_messages(args, env.config.directory): write_message( requests_message=message, env=env, args=args, ) if isinstance(message, requests.PreparedRequest): if not initial_request: initial_request = message else: final_response = message if args.check_status or downloader: exit_status = http_status_to_exit_status( http_status=message.status_code, follow=args.follow) if (not env.stdout_isatty and exit_status != ExitStatus.SUCCESS): env.log_error( f'HTTP {message.raw.status} {message.raw.reason}', level='warning') if downloader and exit_status == ExitStatus.SUCCESS: # Last response body download. download_stream, download_to = downloader.start( initial_url=initial_request.url, final_response=final_response, ) write_stream( stream=download_stream, outfile=download_to, flush=False, ) downloader.finish() if downloader.interrupted: exit_status = ExitStatus.ERROR env.log_error('Incomplete download: size=%d; downloaded=%d' % (downloader.status.total_size, downloader.status.downloaded)) return exit_status finally: if downloader and not downloader.finished: downloader.failed() if (not isinstance(args, list) and args.output_file and args.output_file_specified): args.output_file.close()
def program(args: argparse.Namespace, env: Environment) -> ExitStatus: """ The main program without error handling. """ # TODO: Refactor and drastically simplify, especially so that the separator logic is elsewhere. exit_status = ExitStatus.SUCCESS downloader = None initial_request: Optional[requests.PreparedRequest] = None final_response: Optional[requests.Response] = None def separate(): getattr(env.stdout, 'buffer', env.stdout).write(MESSAGE_SEPARATOR_BYTES) def request_body_read_callback(chunk: bytes): should_pipe_to_stdout = bool( # Request body output desired OUT_REQ_BODY in args.output_options # & not `.read()` already pre-request (e.g., for compression) and initial_request # & non-EOF chunk and chunk) if should_pipe_to_stdout: msg = requests.PreparedRequest() msg.is_body_upload_chunk = True msg.body = chunk msg.headers = initial_request.headers write_message(requests_message=msg, env=env, args=args, with_body=True, with_headers=False) try: if args.download: args.follow = True # --download implies --follow. downloader = Downloader(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume) downloader.pre_request(args.headers) messages = collect_messages( args=args, config_dir=env.config.directory, request_body_read_callback=request_body_read_callback) force_separator = False prev_with_body = False if args.output_format_form == "RAW": # Process messages as they’re generated for message in messages: is_request = isinstance(message, requests.PreparedRequest) with_headers, with_body = get_output_options(args=args, message=message) do_write_body = with_body if prev_with_body and (with_headers or with_body) and ( force_separator or not env.stdout_isatty): # Separate after a previous message with body, if needed. See test_tokens.py. separate() force_separator = False if is_request: if not initial_request: initial_request = message is_streamed_upload = not isinstance( message.body, (str, bytes)) if with_body: do_write_body = not is_streamed_upload force_separator = is_streamed_upload and env.stdout_isatty else: final_response = message if args.check_status or downloader: exit_status = http_status_to_exit_status( http_status=message.status_code, follow=args.follow) if exit_status != ExitStatus.SUCCESS and ( not env.stdout_isatty or args.quiet): env.log_error( f'HTTP {message.raw.status} {message.raw.reason}', level='warning') write_message(requests_message=message, env=env, args=args, with_headers=with_headers, with_body=do_write_body) prev_with_body = with_body else: all_messages_together = [] for message in messages: is_request = isinstance(message, requests.PreparedRequest) with_headers, with_body = get_output_options(args=args, message=message) #force_separator = False if is_request: with_headers_req = with_headers with_body_req = with_body if not initial_request: initial_request = message is_streamed_upload = not isinstance( message.body, (str, bytes)) if with_body: with_body_req = not is_streamed_upload #force_separator = is_streamed_upload and env.stdout_isatty else: with_headers_res = with_headers with_body_res = with_body final_response = message if args.check_status or downloader: exit_status = http_status_to_exit_status( http_status=message.status_code, follow=args.follow) if exit_status != ExitStatus.SUCCESS and ( not env.stdout_isatty or args.quiet): env.log_error( f'HTTP {message.raw.status} {message.raw.reason}', level='warning') all_messages_together.append(message) write_message_json(requests_message=all_messages_together, env=env, args=args, with_headers_req=with_headers_req, with_body_req=with_body_req, with_headers_res=with_headers_res, with_body_res=with_body_res) prev_with_body = with_body # Cleanup if force_separator: separate() if downloader and exit_status == ExitStatus.SUCCESS: # Last response body download. download_stream, download_to = downloader.start( initial_url=initial_request.url, final_response=final_response, ) write_stream(stream=download_stream, outfile=download_to, flush=False) downloader.finish() if downloader.interrupted: exit_status = ExitStatus.ERROR env.log_error('Incomplete download: size=%d; downloaded=%d' % (downloader.status.total_size, downloader.status.downloaded)) return exit_status finally: if downloader and not downloader.finished: downloader.failed() if not isinstance( args, list) and args.output_file and args.output_file_specified: args.output_file.close()
def main(args=sys.argv[1:], env=Environment()): """Run the main program and write the output to ``env.stdout``. Return exit status code. """ args = decode_args(args, env.stdin_encoding) plugin_manager.load_installed_plugins() from httpie.cli import parser if env.config.default_options: args = env.config.default_options + args def error(msg, *args, **kwargs): msg = msg % args level = kwargs.get('level', 'error') env.stderr.write('\nhttp: %s: %s\n' % (level, msg)) debug = '--debug' in args traceback = debug or '--traceback' in args exit_status = ExitStatus.OK if debug: print_debug_info(env) if args == ['--debug']: return exit_status download = None try: args = parser.parse_args(args=args, env=env) if args.download: args.follow = True # --download implies --follow. download = Download(output_file=args.output_file, progress_file=env.stderr, resume=args.download_resume) download.pre_request(args.headers) response = get_response(args, config_dir=env.config.directory) if args.check_status or download: exit_status = get_exit_status(http_status=response.status_code, follow=args.follow) if not env.stdout_isatty and exit_status != ExitStatus.OK: error('HTTP %s %s', response.raw.status, response.raw.reason, level='warning') write_kwargs = { 'stream': build_output_stream(args, env, response.request, response), # This will in fact be `stderr` with `--download` 'outfile': env.stdout, 'flush': env.stdout_isatty or args.stream } try: if env.is_windows and is_py3 and 'colors' in args.prettify: write_with_colors_win_py3(**write_kwargs) else: write(**write_kwargs) if download and exit_status == ExitStatus.OK: # Response body download. download_stream, download_to = download.start(response) write( stream=download_stream, outfile=download_to, flush=False, ) download.finish() if download.interrupted: exit_status = ExitStatus.ERROR error('Incomplete download: size=%d; downloaded=%d' % (download.status.total_size, download.status.downloaded)) except IOError as e: if not traceback and e.errno == errno.EPIPE: # Ignore broken pipes unless --traceback. env.stderr.write('\n') else: raise except KeyboardInterrupt: if traceback: raise env.stderr.write('\n') exit_status = ExitStatus.ERROR except SystemExit as e: if e.code != ExitStatus.OK: if traceback: raise env.stderr.write('\n') exit_status = ExitStatus.ERROR except requests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT error('Request timed out (%ss).', args.timeout) except Exception as e: # TODO: Better distinction between expected and unexpected errors. # Network errors vs. bugs, etc. if traceback: raise error('%s: %s', type(e).__name__, str(e)) exit_status = ExitStatus.ERROR finally: if download and not download.finished: download.failed() return exit_status
def main(args=sys.argv[1:], env=Environment(), custom_log_error=None): """ The main function. Pre-process args, handle some special types of invocations, and run the main program with error handling. Return exit status code. """ args = decode_args(args, env.stdin_encoding) plugin_manager.load_installed_plugins() def log_error(msg, *args, **kwargs): msg = msg % args level = kwargs.get('level', 'error') assert level in ['error', 'warning'] env.stderr.write('\nhttp: %s: %s\n' % (level, msg)) from httpie.cli import parser if env.config.default_options: args = env.config.default_options + args if custom_log_error: log_error = custom_log_error include_debug_info = '--debug' in args include_traceback = include_debug_info or '--traceback' in args if include_debug_info: print_debug_info(env) if args == ['--debug']: return ExitStatus.OK exit_status = ExitStatus.OK try: parsed_args = parser.parse_args(args=args, env=env) except KeyboardInterrupt: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except SystemExit as e: if e.code != ExitStatus.OK: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR else: try: exit_status = program( args=parsed_args, env=env, log_error=log_error, ) except KeyboardInterrupt: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except SystemExit as e: if e.code != ExitStatus.OK: env.stderr.write('\n') if include_traceback: raise exit_status = ExitStatus.ERROR except requests.Timeout: exit_status = ExitStatus.ERROR_TIMEOUT log_error('Request timed out (%ss).', parsed_args.timeout) except requests.TooManyRedirects: exit_status = ExitStatus.ERROR_TOO_MANY_REDIRECTS log_error('Too many redirects (--max-redirects=%s).', parsed_args.max_redirects) except Exception as e: # TODO: Further distinction between expected and unexpected errors. msg = str(e) if hasattr(e, 'request'): request = e.request if hasattr(request, 'url'): msg += ' while doing %s request to URL: %s' % ( request.method, request.url) log_error('%s: %s', type(e).__name__, msg) if include_traceback: raise exit_status = ExitStatus.ERROR return exit_status
def test_current_version(): version = Environment().config['__meta__']['httpie'] assert version == __version__