def configure(self, updated): opts = ctx.options if opts.add_upstream_certs_to_client_chain and not opts.upstream_cert: raise exceptions.OptionsError( "The no-upstream-cert and add-upstream-certs-to-client-chain " "options are mutually exclusive. If no-upstream-cert is enabled " "then the upstream certificate is not retrieved before generating " "the client certificate chain." ) if "body_size_limit" in updated: try: human.parse_size(opts.body_size_limit) except ValueError as e: raise exceptions.OptionsError( "Invalid body size limit specification: %s" % opts.body_size_limit ) if "mode" in updated: mode = opts.mode if mode.startswith("reverse:") or mode.startswith("upstream:"): try: server_spec.parse_with_mode(mode) except ValueError as e: raise exceptions.OptionsError(str(e)) from e elif mode == "transparent": if not platform.original_addr: raise exceptions.OptionsError( "Transparent mode not supported on this platform." ) elif mode not in ["regular", "socks5"]: raise exceptions.OptionsError( "Invalid mode specification: %s" % mode )
def test_parse_size(): assert human.parse_size("0") == 0 assert human.parse_size("0b") == 0 assert human.parse_size("1") == 1 assert human.parse_size("1k") == 1024 assert human.parse_size("1m") == 1024**2 assert human.parse_size("1g") == 1024**3 with pytest.raises(ValueError): human.parse_size("1f") with pytest.raises(ValueError): human.parse_size("ak")
def read_request_body(self, request): expected_size = http1.expected_http_body_size(request) return http1.read_body( self.client_conn.rfile, expected_size, human.parse_size(self.config.options.body_size_limit) )
def read_response_body(self, request, response): expected_size = http1.expected_http_body_size(request, response) return http1.read_body( self.server_conn.rfile, expected_size, human.parse_size(self.config.options.body_size_limit) )
def configure(self, updated): opts = ctx.options if opts.add_upstream_certs_to_client_chain and not opts.upstream_cert: raise exceptions.OptionsError( "The no-upstream-cert and add-upstream-certs-to-client-chain " "options are mutually exclusive. If no-upstream-cert is enabled " "then the upstream certificate is not retrieved before generating " "the client certificate chain." ) if opts.add_upstream_certs_to_client_chain and not opts.ssl_insecure: raise exceptions.OptionsError( "The verify-upstream-cert requires certificate verification to be disabled. " "If upstream certificates are verified then extra upstream certificates are " "not available for inclusion to the client chain." ) if "body_size_limit" in updated: try: human.parse_size(opts.body_size_limit) except ValueError as e: raise exceptions.OptionsError( "Invalid body size limit specification: %s" % opts.body_size_limit ) if "mode" in updated: mode = opts.mode if mode.startswith("reverse:") or mode.startswith("upstream:"): try: server_spec.parse_with_mode(mode) except ValueError as e: raise exceptions.OptionsError(str(e)) from e elif mode == "transparent": if not platform.original_addr: raise exceptions.OptionsError( "Transparent mode not supported on this platform." ) elif mode not in ["regular", "socks5"]: raise exceptions.OptionsError( "Invalid mode specification: %s" % mode ) if "client_certs" in updated: if opts.client_certs: client_certs = os.path.expanduser(opts.client_certs) if not os.path.exists(client_certs): raise exceptions.OptionsError( "Client certificate path does not exist: {}".format(opts.client_certs) )
def test_parse_size(): assert human.parse_size("0") == 0 assert human.parse_size("0b") == 0 assert human.parse_size("1") == 1 assert human.parse_size("1k") == 1024 assert human.parse_size("1m") == 1024**2 assert human.parse_size("1g") == 1024**3 tutils.raises(ValueError, human.parse_size, "1f") tutils.raises(ValueError, human.parse_size, "ak")
def _handle_data_received(self, eid, event, source_conn): bsl = human.parse_size(self.config.options.body_size_limit) if bsl and self.streams[eid].queued_data_length > bsl: self.streams[eid].kill() self.connections[source_conn].safe_reset_stream( event.stream_id, h2.errors.ErrorCodes.REFUSED_STREAM) self.log("HTTP body too large. Limit is {}.".format(bsl), "info") else: self.streams[eid].data_queue.put(event.data) self.streams[eid].queued_data_length += len(event.data) # always acknowledge receved data with a WINDOW_UPDATE frame self.connections[source_conn].safe_acknowledge_received_data( event.flow_controlled_length, event.stream_id) return True
def _handle_data_received(self, eid, event, source_conn): bsl = human.parse_size(self.config.options.body_size_limit) if bsl and self.streams[eid].queued_data_length > bsl: self.streams[eid].kill() self.connections[source_conn].safe_reset_stream( event.stream_id, h2.errors.ErrorCodes.REFUSED_STREAM ) self.log("HTTP body too large. Limit is {}.".format(bsl), "info") else: self.streams[eid].data_queue.put(event.data) self.streams[eid].queued_data_length += len(event.data) self.connections[source_conn].safe_acknowledge_received_data( event.flow_controlled_length, event.stream_id ) return True
def replay(self, f): # pragma: no cover f.live = True r = f.request bsl = human.parse_size(self.options.body_size_limit) first_line_format_backup = r.first_line_format server = None global new, cur_cycle, cur_group try: f.response = None # If we have a channel, run script hooks. request_reply = self.channel.ask("request", f) if isinstance(request_reply, http.HTTPResponse): f.response = request_reply if not f.response: # In all modes, we directly connect to the server displayed if self.options.mode.startswith("upstream:"): server_address = server_spec.parse_with_mode( self.options.mode)[1].address server = connections.ServerConnection(server_address) server.connect() if r.scheme == "https": connect_request = http.make_connect_request( (r.data.host, r.port)) server.wfile.write( http1.assemble_request(connect_request)) server.wfile.flush() resp = http1.read_response(server.rfile, connect_request, body_size_limit=bsl) if resp.status_code != 200: raise exceptions.ReplayException( "Upstream server refuses CONNECT request") server.establish_tls( sni=f.server_conn.sni, **tls.client_arguments_from_options(self.options)) r.first_line_format = "relative" else: r.first_line_format = "absolute" else: server_address = (r.host, r.port) server = connections.ServerConnection(server_address) server.connect() if r.scheme == "https": server.establish_tls( sni=f.server_conn.sni, **tls.client_arguments_from_options(self.options)) r.first_line_format = "relative" server.wfile.write(http1.assemble_request(r)) server.wfile.flush() if f.server_conn: f.server_conn.close() f.server_conn = server f.response = http.HTTPResponse.wrap( http1.read_response(server.rfile, r, body_size_limit=bsl)) response_reply = self.channel.ask("response", f) #new.append(f) #record the response cur_cycle[cur_group] = f if response_reply == exceptions.Kill: raise exceptions.Kill() except (exceptions.ReplayException, exceptions.NetlibException) as e: f.error = flow.Error(str(e)) self.channel.ask("error", f) except exceptions.Kill: self.channel.tell("log", log.LogEntry("Connection killed", "info")) except Exception as e: self.channel.tell("log", log.LogEntry(repr(e), "error")) finally: r.first_line_format = first_line_format_backup f.live = False if server.connected(): server.finish() server.close()
def get_common_options(args): stickycookie, stickyauth = None, None if args.stickycookie_filt: stickycookie = args.stickycookie_filt if args.stickyauth_filt: stickyauth = args.stickyauth_filt stream_large_bodies = args.stream_large_bodies if stream_large_bodies: stream_large_bodies = human.parse_size(stream_large_bodies) if args.streamfile and args.streamfile[0] == args.rfile: if args.streamfile[1] == "wb": raise exceptions.OptionsError( "Cannot use '{}' for both reading and writing flows. " "Are you looking for --afile?".format(args.rfile) ) else: raise exceptions.OptionsError( "Cannot use '{}' for both reading and appending flows. " "That would trigger an infinite loop." ) # Proxy config certs = [] for i in args.certs or []: parts = i.split("=", 1) if len(parts) == 1: parts = ["*", parts[0]] certs.append(parts) body_size_limit = args.body_size_limit if body_size_limit: try: body_size_limit = human.parse_size(body_size_limit) except ValueError as e: raise exceptions.OptionsError( "Invalid body size limit specification: %s" % body_size_limit ) # Establish proxy mode c = 0 mode, upstream_server = "regular", None if args.transparent_proxy: c += 1 if not platform.original_addr: raise exceptions.OptionsError( "Transparent mode not supported on this platform." ) mode = "transparent" if args.socks_proxy: c += 1 mode = "socks5" if args.reverse_proxy: c += 1 mode = "reverse" upstream_server = args.reverse_proxy if args.upstream_proxy: c += 1 mode = "upstream" upstream_server = args.upstream_proxy if c > 1: raise exceptions.OptionsError( "Transparent, SOCKS5, reverse and upstream proxy mode " "are mutually exclusive. Read the docs on proxy modes " "to understand why." ) if args.add_upstream_certs_to_client_chain and args.no_upstream_cert: raise exceptions.OptionsError( "The no-upstream-cert and add-upstream-certs-to-client-chain " "options are mutually exclusive. If no-upstream-cert is enabled " "then the upstream certificate is not retrieved before generating " "the client certificate chain." ) if args.quiet: args.verbose = 0 return dict( onboarding=args.onboarding, onboarding_host=args.onboarding_host, onboarding_port=args.onboarding_port, anticache=args.anticache, anticomp=args.anticomp, client_replay=args.client_replay, replay_kill_extra=args.replay_kill_extra, no_server=args.no_server, refresh_server_playback=not args.norefresh, server_replay_use_headers=args.server_replay_use_headers, rfile=args.rfile, replacements=args.replacements, replacement_files=args.replacement_files, setheaders=args.setheaders, keep_host_header=args.keep_host_header, server_replay=args.server_replay, scripts=args.scripts, stickycookie=stickycookie, stickyauth=stickyauth, stream_large_bodies=stream_large_bodies, showhost=args.showhost, streamfile=args.streamfile[0] if args.streamfile else None, streamfile_append=True if args.streamfile and args.streamfile[1] == "a" else False, verbosity=args.verbose, server_replay_nopop=args.server_replay_nopop, server_replay_ignore_content=args.server_replay_ignore_content, server_replay_ignore_params=args.server_replay_ignore_params, server_replay_ignore_payload_params=args.server_replay_ignore_payload_params, server_replay_ignore_host=args.server_replay_ignore_host, auth_nonanonymous = args.auth_nonanonymous, auth_singleuser = args.auth_singleuser, auth_htpasswd = args.auth_htpasswd, add_upstream_certs_to_client_chain = args.add_upstream_certs_to_client_chain, body_size_limit = body_size_limit, cadir = args.cadir, certs = certs, ciphers_client = args.ciphers_client, ciphers_server = args.ciphers_server, clientcerts = args.clientcerts, ignore_hosts = args.ignore_hosts, listen_host = args.addr, listen_port = args.port, upstream_bind_address = args.upstream_bind_address, mode = mode, no_upstream_cert = args.no_upstream_cert, spoof_source_address = args.spoof_source_address, http2 = args.http2, http2_priority = args.http2_priority, websocket = args.websocket, rawtcp = args.rawtcp, upstream_server = upstream_server, upstream_auth = args.upstream_auth, ssl_version_client = args.ssl_version_client, ssl_version_server = args.ssl_version_server, ssl_insecure = args.ssl_insecure, ssl_verify_upstream_trusted_cadir = args.ssl_verify_upstream_trusted_cadir, ssl_verify_upstream_trusted_ca = args.ssl_verify_upstream_trusted_ca, tcp_hosts = args.tcp_hosts, )
def read_response_body(self, request, response): expected_size = http1.expected_http_body_size(request, response) return http1.read_body( self.server_conn.rfile, expected_size, human.parse_size(self.config.options.body_size_limit))
def get_common_options(args): stickycookie, stickyauth = None, None if args.stickycookie_filt: stickycookie = args.stickycookie_filt if args.stickyauth_filt: stickyauth = args.stickyauth_filt stream_large_bodies = args.stream_large_bodies if stream_large_bodies: stream_large_bodies = human.parse_size(stream_large_bodies) reps = [] for i in args.replace or []: try: p = parse_replace_hook(i) except ParseException as e: raise exceptions.OptionsError(e) reps.append(p) for i in args.replace_file or []: try: patt, rex, path = parse_replace_hook(i) except ParseException as e: raise exceptions.OptionsError(e) try: v = open(path, "rb").read() except IOError as e: raise exceptions.OptionsError("Could not read replace file: %s" % path) reps.append((patt, rex, v)) setheaders = [] for i in args.setheader or []: try: p = parse_setheader(i) except ParseException as e: raise exceptions.OptionsError(e) setheaders.append(p) if args.streamfile and args.streamfile[0] == args.rfile: if args.streamfile[1] == "wb": raise exceptions.OptionsError( "Cannot use '{}' for both reading and writing flows. " "Are you looking for --afile?".format(args.rfile)) else: raise exceptions.OptionsError( "Cannot use '{}' for both reading and appending flows. " "That would trigger an infinite loop.") # Proxy config certs = [] for i in args.certs or []: parts = i.split("=", 1) if len(parts) == 1: parts = ["*", parts[0]] certs.append(parts) body_size_limit = args.body_size_limit if body_size_limit: try: body_size_limit = human.parse_size(body_size_limit) except ValueError as e: raise exceptions.OptionsError( "Invalid body size limit specification: %s" % body_size_limit) # Establish proxy mode c = 0 mode, upstream_server = "regular", None if args.transparent_proxy: c += 1 if not platform.original_addr: raise exceptions.OptionsError( "Transparent mode not supported on this platform.") mode = "transparent" if args.socks_proxy: c += 1 mode = "socks5" if args.reverse_proxy: c += 1 mode = "reverse" upstream_server = args.reverse_proxy if args.upstream_proxy: c += 1 mode = "upstream" upstream_server = args.upstream_proxy if c > 1: raise exceptions.OptionsError( "Transparent, SOCKS5, reverse and upstream proxy mode " "are mutually exclusive. Read the docs on proxy modes " "to understand why.") if args.add_upstream_certs_to_client_chain and args.no_upstream_cert: raise exceptions.OptionsError( "The no-upstream-cert and add-upstream-certs-to-client-chain " "options are mutually exclusive. If no-upstream-cert is enabled " "then the upstream certificate is not retrieved before generating " "the client certificate chain.") if args.quiet: args.verbose = 0 return dict( app=args.app, app_host=args.app_host, app_port=args.app_port, anticache=args.anticache, anticomp=args.anticomp, client_replay=args.client_replay, replay_kill_extra=args.replay_kill_extra, no_server=args.no_server, refresh_server_playback=not args.norefresh, server_replay_use_headers=args.server_replay_use_headers, rfile=args.rfile, replacements=reps, setheaders=setheaders, server_replay=args.server_replay, scripts=args.scripts, stickycookie=stickycookie, stickyauth=stickyauth, stream_large_bodies=stream_large_bodies, showhost=args.showhost, streamfile=args.streamfile[0] if args.streamfile else None, streamfile_append=True if args.streamfile and args.streamfile[1] == "a" else False, verbosity=args.verbose, server_replay_nopop=args.server_replay_nopop, server_replay_ignore_content=args.server_replay_ignore_content, server_replay_ignore_params=args.server_replay_ignore_params, server_replay_ignore_payload_params=args. server_replay_ignore_payload_params, server_replay_ignore_host=args.server_replay_ignore_host, auth_nonanonymous=args.auth_nonanonymous, auth_singleuser=args.auth_singleuser, auth_htpasswd=args.auth_htpasswd, add_upstream_certs_to_client_chain=args. add_upstream_certs_to_client_chain, body_size_limit=body_size_limit, cadir=args.cadir, certs=certs, ciphers_client=args.ciphers_client, ciphers_server=args.ciphers_server, clientcerts=args.clientcerts, http2=args.http2, ignore_hosts=args.ignore_hosts, listen_host=args.addr, listen_port=args.port, upstream_bind_address=args.upstream_bind_address, mode=mode, no_upstream_cert=args.no_upstream_cert, spoof_source_address=args.spoof_source_address, rawtcp=args.rawtcp, websocket=args.websocket, upstream_server=upstream_server, upstream_auth=args.upstream_auth, ssl_version_client=args.ssl_version_client, ssl_version_server=args.ssl_version_server, ssl_insecure=args.ssl_insecure, ssl_verify_upstream_trusted_cadir=args. ssl_verify_upstream_trusted_cadir, ssl_verify_upstream_trusted_ca=args.ssl_verify_upstream_trusted_ca, tcp_hosts=args.tcp_hosts, )
def check_body_size(self, request: bool) -> layer.CommandGenerator[bool]: """ Check if the body size exceeds limits imposed by stream_large_bodies or body_size_limit. Returns `True` if the body size exceeds body_size_limit and further processing should be stopped. """ if not (self.context.options.stream_large_bodies or self.context.options.body_size_limit): return False # Step 1: Determine the expected body size. This can either come from a known content-length header, # or from the amount of currently buffered bytes (e.g. for chunked encoding). response = not request expected_size: Optional[int] # the 'late' case: we already started consuming the body if request and self.request_body_buf: expected_size = len(self.request_body_buf) elif response and self.response_body_buf: expected_size = len(self.response_body_buf) else: # the 'early' case: we have not started consuming the body try: expected_size = expected_http_body_size( self.flow.request, self.flow.response if response else None) except ValueError: # pragma: no cover # we just don't stream/kill malformed content-length headers. expected_size = None if expected_size is None or expected_size <= 0: return False # Step 2: Do we need to abort this? max_total_size = human.parse_size(self.context.options.body_size_limit) if max_total_size is not None and expected_size > max_total_size: if request and not self.request_body_buf: yield HttpRequestHeadersHook(self.flow) if response and not self.response_body_buf: yield HttpResponseHeadersHook(self.flow) err_msg = f"{'Request' if request else 'Response'} body exceeds mitmproxy's body_size_limit." err_code = 413 if request else 502 self.flow.error = flow.Error(err_msg) yield HttpErrorHook(self.flow) yield SendHttp( ResponseProtocolError(self.stream_id, err_msg, err_code), self.context.client) self.client_state = self.state_errored if response: yield SendHttp( RequestProtocolError(self.stream_id, err_msg, err_code), self.context.server) self.server_state = self.state_errored self.flow.live = False return True # Step 3: Do we need to stream this? max_stream_size = human.parse_size( self.context.options.stream_large_bodies) if max_stream_size is not None and expected_size > max_stream_size: if request: self.flow.request.stream = True if self.request_body_buf: # clear buffer and then fake a DataReceived event with everything we had in the buffer so far. body_buf = self.request_body_buf self.request_body_buf = b"" yield from self.start_request_stream() yield from self.handle_event( RequestData(self.stream_id, body_buf)) if response: assert self.flow.response self.flow.response.stream = True if self.response_body_buf: body_buf = self.response_body_buf self.response_body_buf = b"" yield from self.start_response_stream() yield from self.handle_event( ResponseData(self.stream_id, body_buf)) return False
def read_request_body(self, request): expected_size = http1.expected_http_body_size(request) return http1.read_body( self.client_conn.rfile, expected_size, human.parse_size(self.config.options.body_size_limit))
def configure(self, updated): if "stream_large_bodies" in updated and ctx.options.stream_large_bodies: try: self.max_size = human.parse_size(ctx.options.stream_large_bodies) except ValueError as e: raise exceptions.OptionsError(e)
def replay(self, f): # pragma: no cover f.live = True r = f.request bsl = human.parse_size(self.options.body_size_limit) authority_backup = r.authority server = None try: f.response = None # If we have a channel, run script hooks. request_reply = self.channel.ask("request", f) if isinstance(request_reply, http.HTTPResponse): f.response = request_reply if not f.response: # In all modes, we directly connect to the server displayed if self.options.mode.startswith("upstream:"): server_address = server_spec.parse_with_mode( self.options.mode)[1].address server = connections.ServerConnection(server_address) server.connect() if r.scheme == "https": connect_request = http.make_connect_request( (r.data.host, r.port)) server.wfile.write( http1.assemble_request(connect_request)) server.wfile.flush() resp = http1.read_response(server.rfile, connect_request, body_size_limit=bsl) if resp.status_code != 200: raise exceptions.ReplayException( "Upstream server refuses CONNECT request") server.establish_tls( sni=f.server_conn.sni, **tls.client_arguments_from_options(self.options)) r.authority = b"" else: r.authority = hostport(r.scheme, r.host, r.port) else: server_address = (r.host, r.port) server = connections.ServerConnection(server_address) server.connect() if r.scheme == "https": server.establish_tls( sni=f.server_conn.sni, **tls.client_arguments_from_options(self.options)) r.authority = "" server.wfile.write(http1.assemble_request(r)) server.wfile.flush() r.timestamp_start = r.timestamp_end = time.time() if f.server_conn: f.server_conn.close() f.server_conn = server f.response = http1.read_response(server.rfile, r, body_size_limit=bsl) response_reply = self.channel.ask("response", f) if response_reply == exceptions.Kill: raise exceptions.Kill() except (exceptions.ReplayException, exceptions.NetlibException) as e: f.error = flow.Error(str(e)) self.channel.ask("error", f) except exceptions.Kill: self.channel.tell("log", log.LogEntry(flow.Error.KILLED_MESSAGE, "info")) except Exception as e: self.channel.tell("log", log.LogEntry(repr(e), "error")) finally: r.authority = authority_backup f.live = False if server and server.connected(): server.finish() server.close()
def args_pathod(argv, stdout_=sys.stdout, stderr_=sys.stderr): parser = argparse.ArgumentParser( description='A pathological HTTP/S daemon.' ) parser.add_argument( '--version', action='version', version="pathod " + version.VERSION ) parser.add_argument( "-p", dest='port', default=9999, type=int, help='Port. Specify 0 to pick an arbitrary empty port. (9999)' ) parser.add_argument( "-l", dest='address', default="127.0.0.1", type=str, help='Listening address. (127.0.0.1)' ) parser.add_argument( "-a", dest='anchors', default=[], type=str, action="append", metavar="ANCHOR", help=""" Add an anchor. Specified as a string with the form pattern=spec or pattern=filepath, where pattern is a regular expression. """ ) parser.add_argument( "-c", dest='craftanchor', default=pathod.DEFAULT_CRAFT_ANCHOR, type=str, help=""" URL path specifying prefix for URL crafting commands. (%s) """ % pathod.DEFAULT_CRAFT_ANCHOR ) parser.add_argument( "--confdir", action="store", type=str, dest="confdir", default='~/.mitmproxy', help="Configuration directory. (~/.mitmproxy)" ) parser.add_argument( "-d", dest='staticdir', default=None, type=str, help='Directory for static files.' ) parser.add_argument( "-D", dest='daemonize', default=False, action="store_true", help='Daemonize.' ) parser.add_argument( "-t", dest="timeout", type=int, default=None, help="Connection timeout" ) parser.add_argument( "--limit-size", dest='sizelimit', default=None, type=str, help='Size limit of served responses. Understands size suffixes, i.e. 100k.') parser.add_argument( "--nohang", dest='nohang', default=False, action="store_true", help='Disable pauses during crafted response generation.' ) parser.add_argument( "--nocraft", dest='nocraft', default=False, action="store_true", help='Disable response crafting. If anchors are specified, they still work.') parser.add_argument( "--webdebug", dest='webdebug', default=False, action="store_true", help='Debugging mode for the web app (dev only).' ) group = parser.add_argument_group( 'SSL', ) group.add_argument( "-s", dest='ssl', default=False, action="store_true", help='Run in HTTPS mode.' ) group.add_argument( "--cn", dest="cn", type=str, default=None, help="CN for generated SSL certs. Default: %s" % pathod.DEFAULT_CERT_DOMAIN) group.add_argument( "-C", dest='ssl_not_after_connect', default=False, action="store_true", help="Don't expect SSL after a CONNECT request." ) group.add_argument( "--cert", dest='ssl_certs', default=[], type=str, metavar="SPEC", action="append", help=""" Add an SSL certificate. SPEC is of the form "[domain=]path". The domain may include a wildcard, and is equal to "*" if not specified. The file at path is a certificate in PEM format. If a private key is included in the PEM, it is used, else the default key in the conf dir is used. Can be passed multiple times. """ ) group.add_argument( "--ciphers", dest="ciphers", type=str, default=False, help="SSL cipher specification" ) group.add_argument( "--san", dest="sans", type=str, default=[], action="append", metavar="SAN", help=""" Subject Altnernate Name to add to the server certificate. May be passed multiple times. """ ) group.add_argument( "--ssl-version", dest="ssl_version", type=str, default="secure", choices=tcp.sslversion_choices.keys(), help="Set supported SSL/TLS versions. " "SSLv2, SSLv3 and 'all' are INSECURE. Defaults to secure, which is TLS1.0+." ) group = parser.add_argument_group( 'Controlling Logging', """ Some of these options expand generated values for logging - if you're generating large data, use them with caution. """ ) group.add_argument( "-e", dest="explain", action="store_true", default=False, help="Explain responses" ) group.add_argument( "-f", dest='logfile', default=None, type=str, help='Log to file.' ) group.add_argument( "-q", dest="logreq", action="store_true", default=False, help="Log full request" ) group.add_argument( "-r", dest="logresp", action="store_true", default=False, help="Log full response" ) group.add_argument( "-x", dest="hexdump", action="store_true", default=False, help="Log request/response in hexdump format" ) group.add_argument( "--http2-framedump", dest="http2_framedump", action="store_true", default=False, help="Output all received & sent HTTP/2 frames" ) args = parser.parse_args(argv[1:]) args.ssl_version, args.ssl_options = tcp.sslversion_choices[args.ssl_version] certs = [] for i in args.ssl_certs: parts = i.split("=", 1) if len(parts) == 1: parts = ["*", parts[0]] parts[1] = os.path.expanduser(parts[1]) if not os.path.isfile(parts[1]): return parser.error( "Certificate file does not exist: %s" % parts[1]) certs.append(parts) args.ssl_certs = certs alst = [] for i in args.anchors: parts = parse_anchor_spec(i) if not parts: return parser.error("Invalid anchor specification: %s" % i) alst.append(parts) args.anchors = alst sizelimit = None if args.sizelimit: try: sizelimit = human.parse_size(args.sizelimit) except ValueError as v: return parser.error(v) args.sizelimit = sizelimit anchors = [] for patt, spec in args.anchors: if os.path.isfile(spec): data = open(spec).read() spec = data try: arex = re.compile(patt) except re.error: return parser.error("Invalid regex in anchor: %s" % patt) anchors.append((arex, spec)) args.anchors = anchors return args
def args_pathod(argv, stdout_=sys.stdout, stderr_=sys.stderr): parser = argparse.ArgumentParser( description='A pathological HTTP/S daemon.') parser.add_argument('--version', action='version', version="pathod " + version.VERSION) parser.add_argument( "-p", dest='port', default=9999, type=int, help='Port. Specify 0 to pick an arbitrary empty port. (9999)') parser.add_argument("-l", dest='address', default="127.0.0.1", type=str, help='Listening address. (127.0.0.1)') parser.add_argument("-a", dest='anchors', default=[], type=str, action="append", metavar="ANCHOR", help=""" Add an anchor. Specified as a string with the form pattern=spec or pattern=filepath, where pattern is a regular expression. """) parser.add_argument("-c", dest='craftanchor', default=pathod.DEFAULT_CRAFT_ANCHOR, type=str, help=""" URL path specifying prefix for URL crafting commands. (%s) """ % pathod.DEFAULT_CRAFT_ANCHOR) parser.add_argument("--confdir", action="store", type=str, dest="confdir", default='~/.mitmproxy', help="Configuration directory. (~/.mitmproxy)") parser.add_argument("-d", dest='staticdir', default=None, type=str, help='Directory for static files.') parser.add_argument("-D", dest='daemonize', default=False, action="store_true", help='Daemonize.') parser.add_argument("-t", dest="timeout", type=int, default=None, help="Connection timeout") parser.add_argument( "--limit-size", dest='sizelimit', default=None, type=str, help= 'Size limit of served responses. Understands size suffixes, i.e. 100k.' ) parser.add_argument( "--nohang", dest='nohang', default=False, action="store_true", help='Disable pauses during crafted response generation.') parser.add_argument( "--nocraft", dest='nocraft', default=False, action="store_true", help= 'Disable response crafting. If anchors are specified, they still work.' ) parser.add_argument("--webdebug", dest='webdebug', default=False, action="store_true", help='Debugging mode for the web app (dev only).') group = parser.add_argument_group('SSL', ) group.add_argument("-s", dest='ssl', default=False, action="store_true", help='Run in HTTPS mode.') group.add_argument("--cn", dest="cn", type=str, default=None, help="CN for generated SSL certs. Default: %s" % pathod.DEFAULT_CERT_DOMAIN) group.add_argument("-C", dest='ssl_not_after_connect', default=False, action="store_true", help="Don't expect SSL after a CONNECT request.") group.add_argument("--cert", dest='ssl_certs', default=[], type=str, metavar="SPEC", action="append", help=""" Add an SSL certificate. SPEC is of the form "[domain=]path". The domain may include a wildcard, and is equal to "*" if not specified. The file at path is a certificate in PEM format. If a private key is included in the PEM, it is used, else the default key in the conf dir is used. Can be passed multiple times. """) group.add_argument("--ciphers", dest="ciphers", type=str, default=False, help="SSL cipher specification") group.add_argument("--san", dest="sans", type=str, default=[], action="append", metavar="SAN", help=""" Subject Altnernate Name to add to the server certificate. May be passed multiple times. """) group.add_argument( "--ssl-version", dest="ssl_version", type=str, default="secure", choices=tcp.sslversion_choices.keys(), help="Set supported SSL/TLS versions. " "SSLv2, SSLv3 and 'all' are INSECURE. Defaults to secure, which is TLS1.0+." ) group = parser.add_argument_group( 'Controlling Logging', """ Some of these options expand generated values for logging - if you're generating large data, use them with caution. """) group.add_argument("-e", dest="explain", action="store_true", default=False, help="Explain responses") group.add_argument("-f", dest='logfile', default=None, type=str, help='Log to file.') group.add_argument("-q", dest="logreq", action="store_true", default=False, help="Log full request") group.add_argument("-r", dest="logresp", action="store_true", default=False, help="Log full response") group.add_argument("-x", dest="hexdump", action="store_true", default=False, help="Log request/response in hexdump format") group.add_argument("--http2-framedump", dest="http2_framedump", action="store_true", default=False, help="Output all received & sent HTTP/2 frames") args = parser.parse_args(argv[1:]) args.ssl_version, args.ssl_options = tcp.sslversion_choices[ args.ssl_version] certs = [] for i in args.ssl_certs: parts = i.split("=", 1) if len(parts) == 1: parts = ["*", parts[0]] parts[1] = os.path.expanduser(parts[1]) if not os.path.isfile(parts[1]): return parser.error("Certificate file does not exist: %s" % parts[1]) certs.append(parts) args.ssl_certs = certs alst = [] for i in args.anchors: parts = parse_anchor_spec(i) if not parts: return parser.error("Invalid anchor specification: %s" % i) alst.append(parts) args.anchors = alst sizelimit = None if args.sizelimit: try: sizelimit = human.parse_size(args.sizelimit) except ValueError as v: return parser.error(v) args.sizelimit = sizelimit anchors = [] for patt, spec in args.anchors: spec = os.path.expanduser(spec) if os.path.isfile(spec): with open(spec) as f: data = f.read() spec = data try: arex = re.compile(patt) except re.error: return parser.error("Invalid regex in anchor: %s" % patt) anchors.append((arex, spec)) args.anchors = anchors return args
def configure(self, options, updated): if "stream_large_bodies" in updated and options.stream_large_bodies: try: self.max_size = human.parse_size(options.stream_large_bodies) except ValueError as e: raise exceptions.OptionsError(e)
def run(self): r = self.f.request bsl = human.parse_size(self.options.body_size_limit) first_line_format_backup = r.first_line_format server = None try: self.f.response = None # If we have a channel, run script hooks. if self.channel: request_reply = self.channel.ask("request", self.f) if isinstance(request_reply, http.HTTPResponse): self.f.response = request_reply if not self.f.response: # In all modes, we directly connect to the server displayed if self.options.mode.startswith("upstream:"): server_address = server_spec.parse_with_mode(self.options.mode)[1].address server = connections.ServerConnection(server_address, (self.options.listen_host, 0)) server.connect() if r.scheme == "https": connect_request = http.make_connect_request((r.data.host, r.port)) server.wfile.write(http1.assemble_request(connect_request)) server.wfile.flush() resp = http1.read_response( server.rfile, connect_request, body_size_limit=bsl ) if resp.status_code != 200: raise exceptions.ReplayException("Upstream server refuses CONNECT request") server.establish_ssl( self.options.client_certs, sni=self.f.server_conn.sni ) r.first_line_format = "relative" else: r.first_line_format = "absolute" else: server_address = (r.host, r.port) server = connections.ServerConnection( server_address, (self.options.listen_host, 0) ) server.connect() if r.scheme == "https": server.establish_ssl( self.options.client_certs, sni=self.f.server_conn.sni ) r.first_line_format = "relative" server.wfile.write(http1.assemble_request(r)) server.wfile.flush() if self.f.server_conn: self.f.server_conn.close() self.f.server_conn = server self.f.response = http.HTTPResponse.wrap( http1.read_response( server.rfile, r, body_size_limit=bsl ) ) if self.channel: response_reply = self.channel.ask("response", self.f) if response_reply == exceptions.Kill: raise exceptions.Kill() except (exceptions.ReplayException, exceptions.NetlibException) as e: self.f.error = flow.Error(str(e)) if self.channel: self.channel.ask("error", self.f) except exceptions.Kill: # Kill should only be raised if there's a channel in the # first place. self.channel.tell( "log", log.LogEntry("Connection killed", "info") ) except Exception as e: self.channel.tell( "log", log.LogEntry(repr(e), "error") ) finally: r.first_line_format = first_line_format_backup self.f.live = False if server.connected(): server.finish()
def replay(self, f): # pragma: no cover f.live = True r = f.request bsl = human.parse_size(self.options.body_size_limit) first_line_format_backup = r.first_line_format server = None try: f.response = None # If we have a channel, run script hooks. request_reply = self.channel.ask("request", f) if isinstance(request_reply, http.HTTPResponse): f.response = request_reply if not f.response: # In all modes, we directly connect to the server displayed if self.options.mode.startswith("upstream:"): server_address = server_spec.parse_with_mode(self.options.mode)[1].address server = connections.ServerConnection(server_address) server.connect() if r.scheme == "https": connect_request = http.make_connect_request((r.data.host, r.port)) server.wfile.write(http1.assemble_request(connect_request)) server.wfile.flush() resp = http1.read_response( server.rfile, connect_request, body_size_limit=bsl ) if resp.status_code != 200: raise exceptions.ReplayException( "Upstream server refuses CONNECT request" ) server.establish_tls( sni=f.server_conn.sni, **tls.client_arguments_from_options(self.options) ) r.first_line_format = "relative" else: r.first_line_format = "absolute" else: server_address = (r.host, r.port) server = connections.ServerConnection(server_address) server.connect() if r.scheme == "https": server.establish_tls( sni=f.server_conn.sni, **tls.client_arguments_from_options(self.options) ) r.first_line_format = "relative" server.wfile.write(http1.assemble_request(r)) server.wfile.flush() r.timestamp_start = r.timestamp_end = time.time() if f.server_conn: f.server_conn.close() f.server_conn = server f.response = http.HTTPResponse.wrap( http1.read_response(server.rfile, r, body_size_limit=bsl) ) response_reply = self.channel.ask("response", f) if response_reply == exceptions.Kill: raise exceptions.Kill() except (exceptions.ReplayException, exceptions.NetlibException) as e: f.error = flow.Error(str(e)) self.channel.ask("error", f) except exceptions.Kill: self.channel.tell("log", log.LogEntry("Connection killed", "info")) except Exception as e: self.channel.tell("log", log.LogEntry(repr(e), "error")) finally: r.first_line_format = first_line_format_backup f.live = False if server.connected(): server.finish() server.close()
def test_parse_size(): assert human.parse_size("0") == 0 assert human.parse_size("0b") == 0 assert human.parse_size("1") == 1 assert human.parse_size("1k") == 1024 assert human.parse_size("1m") == 1024**2 assert human.parse_size("1g") == 1024**3 with pytest.raises(ValueError): human.parse_size("1f") with pytest.raises(ValueError): human.parse_size("ak") assert human.parse_size(None) is None
def run(self): r = self.f.request bsl = human.parse_size(self.options.body_size_limit) first_line_format_backup = r.first_line_format server = None try: self.f.response = None # If we have a channel, run script hooks. if self.channel: request_reply = self.channel.ask("request", self.f) if isinstance(request_reply, http.HTTPResponse): self.f.response = request_reply if not self.f.response: # In all modes, we directly connect to the server displayed if self.options.mode.startswith("upstream:"): server_address = server_spec.parse_with_mode( self.options.mode)[1].address server = connections.ServerConnection( server_address, (self.options.listen_host, 0)) server.connect() if r.scheme == "https": connect_request = http.make_connect_request( (r.data.host, r.port)) server.wfile.write( http1.assemble_request(connect_request)) server.wfile.flush() resp = http1.read_response(server.rfile, connect_request, body_size_limit=bsl) if resp.status_code != 200: raise exceptions.ReplayException( "Upstream server refuses CONNECT request") server.establish_tls(self.options.client_certs, sni=self.f.server_conn.sni) r.first_line_format = "relative" else: r.first_line_format = "absolute" else: server_address = (r.host, r.port) server = connections.ServerConnection( server_address, (self.options.listen_host, 0)) server.connect() if r.scheme == "https": server.establish_tls(self.options.client_certs, sni=self.f.server_conn.sni) r.first_line_format = "relative" server.wfile.write(http1.assemble_request(r)) server.wfile.flush() if self.f.server_conn: self.f.server_conn.close() self.f.server_conn = server self.f.response = http.HTTPResponse.wrap( http1.read_response(server.rfile, r, body_size_limit=bsl)) if self.channel: response_reply = self.channel.ask("response", self.f) if response_reply == exceptions.Kill: raise exceptions.Kill() except (exceptions.ReplayException, exceptions.NetlibException) as e: self.f.error = flow.Error(str(e)) if self.channel: self.channel.ask("error", self.f) except exceptions.Kill: # Kill should only be raised if there's a channel in the # first place. self.channel.tell("log", log.LogEntry("Connection killed", "info")) except Exception as e: self.channel.tell("log", log.LogEntry(repr(e), "error")) finally: r.first_line_format = first_line_format_backup self.f.live = False if server.connected(): server.finish()