def export(line): """ Write the full request/response of a request/response to a file. Usage: export [req|rsp] <reqid(s)> """ args = shlex.split(line) if len(args) < 2: print 'Requires req/rsp and and request id(s)' defer.returnValue(None) if args[0] not in ('req', 'rsp'): raise PappyException('Request or response not specified') reqs = yield load_reqlist(args[1]) for req in reqs: try: if args[0] == 'req': fname = 'req_%s.txt'%req.reqid with open(fname, 'w') as f: f.write(req.full_request) print 'Full request written to %s' % fname elif args[0] == 'rsp': fname = 'rsp_%s.txt'%req.reqid with open(fname, 'w') as f: f.write(req.full_response) print 'Full response written to %s' % fname except PappyException as e: print 'Unable to export %s: %s' % (req.reqid, e)
def export(line): """ Write the full request/response of a request/response to a file. Usage: export [req|rsp] <reqid(s)> """ args = shlex.split(line) if len(args) < 2: print 'Requires req/rsp and and request id(s)' defer.returnValue(None) if args[0] not in ('req', 'rsp'): raise PappyException('Request or response not specified') reqs = yield load_reqlist(args[1]) for req in reqs: try: if args[0] == 'req': fname = 'req_%s.txt' % req.reqid with open(fname, 'w') as f: f.write(req.full_request) print 'Full request written to %s' % fname elif args[0] == 'rsp': fname = 'rsp_%s.txt' % req.reqid with open(fname, 'w') as f: f.write(req.full_response) print 'Full response written to %s' % fname except PappyException as e: print 'Unable to export %s: %s' % (req.reqid, e)
def submit(line): """ Resubmit some requests, optionally with modified headers and cookies. Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]] """ parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__) parser.add_argument('reqids') parser.add_argument('-m', '--inmem', action='store_true', help='Store resubmitted requests in memory without storing them in the data file') parser.add_argument('-u', '--unique', action='store_true', help='Only resubmit one request per endpoint (different URL parameters are different endpoints)') parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)') parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting') parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting') parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request') args = parser.parse_args(shlex.split(line)) headers = {} cookies = {} clear_cookies = False if args.headers: for h in args.headers: k, v = h.split('=', 1) headers[k] = v if args.copycookies: reqid = args.copycookies req = yield Request.load_request(reqid) clear_cookies = True for k, v in req.cookies.all_pairs(): cookies[k] = v if args.cookies: for c in args.cookies: k, v = c.split('=', 1) cookies[k] = v if args.unique and args.uniquepath: raise PappyException('Both -u and -p cannot be given as arguments') newsession = Session(cookie_vals=cookies, header_vals=headers) reqs = yield load_reqlist(args.reqids) for req in reqs: if clear_cookies: req.cookies.clear() newsession.apply_req(req) conf_message = "You're about to submit %d requests, continue?" % len(reqs) if not confirm(conf_message): defer.returnValue(None) for r in reqs: r.tags.add('resubmitted') save = not args.inmem yield async_submit_requests(reqs, save=save, save_in_mem=args.inmem, unique_paths=args.uniquepath, unique_path_and_args=args.unique)
def gen_template_args_macro(args): if len(args) > 0: reqids = args[0] reqs = yield load_reqlist(reqids) else: reqs = [] defer.returnValue(macro_from_requests(reqs))
def untag(line): """ Remove a tag from requests Usage: untag <tag> <request ids> You can provide as many request ids as you want and the tag will be removed from all of them. If no ids are given, the tag will be removed from all in-context requests. """ args = shlex.split(line) if len(args) == 0: raise PappyException("Tag and request ids are required") tag = args[0] ids = [] if len(args) > 1: reqids = yield load_reqlist(args[1], False, ids_only=True) print 'Removing tag %s from %s' % (tag, ', '.join(reqids)) else: print "Removing tag %s from all in-context requests" % tag reqids = yield async_main_context_ids() for reqid in reqids: req = yield Request.load_request(reqid) if tag in req.tags: req.tags.discard(tag) if req.saved: yield req.async_save() if ids: print 'Tag %s removed from %s' % (tag, ', '.join(ids))
def tag(line): """ Add a tag to requests. Usage: tag <tag> [request ids] You can tag as many requests as you want at the same time. If no ids are given, the tag will be applied to all in-context requests. """ args = shlex.split(line) if len(args) == 0: raise PappyException('Tag name is required') tag = args[0] if len(args) > 1: reqids = yield load_reqlist(args[1], False, ids_only=True) print 'Tagging %s with %s' % (', '.join(reqids), tag) else: print "Tagging all in-context requests with %s" % tag reqids = yield async_main_context_ids() for reqid in reqids: req = yield Request.load_request(reqid) if tag not in req.tags: req.tags.add(tag) if req.saved: yield req.async_save() else: print 'Request %s already has tag %s' % (req.reqid, tag)
def rpy(line): """ Copy python object definitions of requests. Usage: rpy <reqs> """ reqs = yield load_reqlist(line) for req in reqs: print pappyproxy.macros.req_obj_def(req)
def find_urls(line): args = shlex.split(line) reqs = yield load_reqlist(args[0]) url_regexp = r'((?:http|ftp|https)://(?:[\w_-]+(?:(?:\.[\w_-]+)+))(?:[\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)' urls = set() for req in reqs: urls |= set(re.findall(url_regexp, req.full_message)) if req.response: urls |= set(re.findall(url_regexp, req.response.full_message)) for url in sorted(urls): print url
def view_response_bytes(line): """ View the full data of the response associated with a request Usage: view_request_bytes <reqid(s)> """ reqs = yield load_reqlist(line) for req in reqs: if req.response: if len(reqs) > 1: print '-' * 15 + (' %s ' % req.reqid) + '-' * 15 print req.response.full_message else: print "Request %s does not have a response" % req.reqid
def view_response_bytes(line): """ View the full data of the response associated with a request Usage: view_request_bytes <reqid(s)> """ reqs = yield load_reqlist(line) for req in reqs: if req.response: if len(reqs) > 1: print '-'*15 + (' %s ' % req.reqid) + '-'*15 print req.response.full_message else: print "Request %s does not have a response" % req.reqid
def view_response_headers(line): """ View the headers of the response Usage: view_response_headers <reqid(s)> """ reqs = yield load_reqlist(line) for req in reqs: if req.response: if len(reqs) > 1: print '-' * 15 + (' %s ' % req.reqid) + '-' * 15 view_full_message(req.response, True) else: print "Request %s does not have a response" % req.reqid
def pretty_print_request(line): """ Print the body of the request pretty printed. Usage: pretty_print_request <format> <reqid(s)> """ args = shlex.split(line) if len(args) < 2: raise PappyException("Usage: pretty_print_request <format> <reqid(s)>") reqids = args[1] reqs = yield load_reqlist(reqids) for req in reqs: pretty_print_body(args[0], req.body)
def view_response_headers(line): """ View the headers of the response Usage: view_response_headers <reqid(s)> """ reqs = yield load_reqlist(line) for req in reqs: if req.response: if len(reqs) > 1: print '-'*15 + (' %s ' % req.reqid) + '-'*15 view_full_message(req.response, True) else: print "Request %s does not have a response" % req.reqid
def clrtag(line): """ Clear all the tags from requests Usage: clrtag <request ids> """ args = shlex.split(line) if len(args) == 0: raise PappyException('No request IDs given') reqs = yield load_reqlist(args[0], False) for req in reqs: if req.tags: req.tags = set() print 'Tags cleared from request %s' % (req.reqid) if req.saved: yield req.async_save()
def pretty_print_response(line): """ Print the body of the request pretty printed. Usage: pretty_print_request <format> <reqid(s)> """ args = shlex.split(line) if len(args) < 2: raise PappyException("Usage: pretty_print_request <format> <reqid(s)>") reqids = args[1] reqs = yield load_reqlist(reqids) for req in reqs: if req.response: pretty_print_body(args[0], req.response.body) else: print 'No response associated with request %s' % req.reqid
def view_request_info(line): """ View information about request Usage: view_request_info <reqid(s)> """ args = shlex.split(line) if not args: raise PappyException("Request id is required") reqids = args[0] reqs = yield load_reqlist(reqids) for req in reqs: print '' print_request_extended(req) print ''
def view_request_bytes(line): """ View the raw bytes of the request. Use this if you want to redirect output to a file. Usage: view_request_bytes <reqid(s)> """ args = shlex.split(line) if not args: raise PappyException("Request id is required") reqid = args[0] reqs = yield load_reqlist(reqid) for req in reqs: if len(reqs) > 1: print 'Request %s:' % req.reqid print req.full_message if len(reqs) > 1: print '-' * 30 print ''
def view_full_request(line): """ View the full data of the request Usage: view_full_request <reqid(s)> """ args = shlex.split(line) if not args: raise PappyException("Request id is required") reqid = args[0] reqs = yield load_reqlist(reqid) for req in reqs: if len(reqs) > 1: print 'Request %s:' % req.reqid view_full_message(req) if len(reqs) > 1: print '-' * 30 print ''
def generate_macro(line): """ Generate a macro script with request objects Usage: generate_macro <name> [reqs] """ if line == "": raise PappyException("Macro name is required") args = shlex.split(line) name = args[0] if len(args) > 1: reqs = yield load_reqlist(args[1]) else: reqs = [] script_str = macro_from_requests(reqs) fname = "macro_%s.py" % name with open(fname, "wc") as f: f.write(script_str) print "Wrote script to %s" % fname
def view_full_request(line): """ View the full data of the request Usage: view_full_request <reqid(s)> """ args = shlex.split(line) if not args: raise PappyException("Request id is required") reqid = args[0] reqs = yield load_reqlist(reqid) for req in reqs: if len(reqs) > 1: print 'Request %s:' % req.reqid view_full_message(req) if len(reqs) > 1: print '-'*30 print ''
def view_request_bytes(line): """ View the raw bytes of the request. Use this if you want to redirect output to a file. Usage: view_request_bytes <reqid(s)> """ args = shlex.split(line) if not args: raise PappyException("Request id is required") reqid = args[0] reqs = yield load_reqlist(reqid) for req in reqs: if len(reqs) > 1: print 'Request %s:' % req.reqid print req.full_message if len(reqs) > 1: print '-'*30 print ''
def print_params_cmd(line): """ View the headers of the request Usage: view_request_headers <reqid(s)> """ args = shlex.split(line) reqid = args[0] if len(args) > 1: keys = args[1:] else: keys = None reqs = yield load_reqlist(reqid) for req in reqs: if len(reqs) > 1: print 'Request %s:' % req.reqid print_params(req, keys) if len(reqs) > 1: print '-'*30
def print_params_cmd(line): """ View the headers of the request Usage: view_request_headers <reqid(s)> """ args = shlex.split(line) reqid = args[0] if len(args) > 1: keys = args[1:] else: keys = None reqs = yield load_reqlist(reqid) for req in reqs: if len(reqs) > 1: print 'Request %s:' % req.reqid print_params(req, keys) if len(reqs) > 1: print '-' * 30
def dump_response(line): """ Dump the data of the response to a file. Usage: dump_response <id> <filename> """ # dump the data of a response args = shlex.split(line) reqs = yield load_reqlist(args[0]) for req in reqs: if req.response: rsp = req.response if len(args) >= 2: fname = args[1] else: fname = req.path.split('/')[-1] with open(fname, 'w') as f: f.write(rsp.body) print 'Response data written to %s' % fname else: print 'Request %s does not have a response' % req.reqid
def submit(line): """ Resubmit some requests, optionally with modified headers and cookies. Usage: submit reqids [-h] [-m] [-u] [-p] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]] """ parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__) parser.add_argument('reqids') parser.add_argument( '-m', '--inmem', action='store_true', help= 'Store resubmitted requests in memory without storing them in the data file' ) parser.add_argument( '-u', '--unique', action='store_true', help= 'Only resubmit one request per endpoint (different URL parameters are different endpoints)' ) parser.add_argument( '-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)' ) parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting') parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting') args = parser.parse_args(shlex.split(line)) headers = {} cookies = {} if args.headers: for h in args.headers: k, v = h.split('=', 1) headers[k] = v if args.cookies: for c in args.cookies: k, v = c.split('=', 1) cookies[k] = v if args.unique and args.uniquepath: raise PappyException('Both -u and -p cannot be given as arguments') newsession = Session(cookie_vals=cookies, header_vals=headers) reqs = yield load_reqlist(args.reqids) if args.unique or args.uniquepath: endpoints = set() new_reqs = [] for r in reqs: if args.unique: s = r.url else: s = r.path if not s in endpoints: new_reqs.append(r.copy()) endpoints.add(s) reqs = new_reqs else: reqs = [r.copy() for r in reqs] for req in reqs: newsession.apply_req(req) conf_message = "You're about to submit %d requests, continue?" % len(reqs) if not confirm(conf_message): defer.returnValue(None) for r in reqs: r.tags.add('resubmitted') if args.inmem: yield async_submit_requests(reqs) for req in reqs: add_to_history(req) else: yield async_submit_requests(reqs, save=True)
def save(line): args = shlex.split(line) reqids = args[0] reqs = yield load_reqlist(reqids) for req in reqs: yield req.async_deep_save()
def submit(line): """ Resubmit some requests, optionally with modified headers and cookies. Usage: submit reqids [-h] [-m] [-u] [-p] [-o REQID] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]] """ parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__) parser.add_argument('reqids') parser.add_argument( '-m', '--inmem', action='store_true', help= 'Store resubmitted requests in memory without storing them in the data file' ) parser.add_argument( '-u', '--unique', action='store_true', help= 'Only resubmit one request per endpoint (different URL parameters are different endpoints)' ) parser.add_argument( '-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)' ) parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting') parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting') parser.add_argument('-o', '--copycookies', help='Copy the cookies used in another request') args = parser.parse_args(shlex.split(line)) headers = {} cookies = {} clear_cookies = False if args.headers: for h in args.headers: k, v = h.split('=', 1) headers[k] = v if args.copycookies: reqid = args.copycookies req = yield Request.load_request(reqid) clear_cookies = True for k, v in req.cookies.all_pairs(): cookies[k] = v if args.cookies: for c in args.cookies: k, v = c.split('=', 1) cookies[k] = v if args.unique and args.uniquepath: raise PappyException('Both -u and -p cannot be given as arguments') newsession = Session(cookie_vals=cookies, header_vals=headers) reqs = yield load_reqlist(args.reqids) for req in reqs: if clear_cookies: req.cookies.clear() newsession.apply_req(req) conf_message = "You're about to submit %d requests, continue?" % len(reqs) if not confirm(conf_message): defer.returnValue(None) for r in reqs: r.tags.add('resubmitted') save = not args.inmem yield async_submit_requests(reqs, save=save, save_in_mem=args.inmem, unique_paths=args.uniquepath, unique_path_and_args=args.unique)
def submit(line): """ Resubmit some requests, optionally with modified headers and cookies. Usage: submit reqids [-h] [-m] [-u] [-p] [-c [COOKIES [COOKIES ...]]] [-d [HEADERS [HEADERS ...]]] """ parser = argparse.ArgumentParser(prog="submit", usage=submit.__doc__) parser.add_argument('reqids') parser.add_argument('-m', '--inmem', action='store_true', help='Store resubmitted requests in memory without storing them in the data file') parser.add_argument('-u', '--unique', action='store_true', help='Only resubmit one request per endpoint (different URL parameters are different endpoints)') parser.add_argument('-p', '--uniquepath', action='store_true', help='Only resubmit one request per endpoint (ignoring URL parameters)') parser.add_argument('-c', '--cookies', nargs='*', help='Apply a cookie to requests before submitting') parser.add_argument('-d', '--headers', nargs='*', help='Apply a header to requests before submitting') args = parser.parse_args(shlex.split(line)) headers = {} cookies = {} if args.headers: for h in args.headers: k, v = h.split('=', 1) headers[k] = v if args.cookies: for c in args.cookies: k, v = c.split('=', 1) cookies[k] = v if args.unique and args.uniquepath: raise PappyException('Both -u and -p cannot be given as arguments') newsession = Session(cookie_vals=cookies, header_vals=headers) reqs = yield load_reqlist(args.reqids) if args.unique or args.uniquepath: endpoints = set() new_reqs = [] for r in reqs: if args.unique: s = r.url else: s = r.path if not s in endpoints: new_reqs.append(r.copy()) endpoints.add(s) reqs = new_reqs else: reqs = [r.copy() for r in reqs] for req in reqs: newsession.apply_req(req) conf_message = "You're about to submit %d requests, continue?" % len(reqs) if not confirm(conf_message): defer.returnValue(None) for r in reqs: r.tags.add('resubmitted') if args.inmem: yield async_submit_requests(reqs) for req in reqs: add_to_history(req) else: yield async_submit_requests(reqs, save=True)