def server_to_defend(ip): if APP_DEBUG: Log.info('CALLED: Add.server_to_defend(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Add.__add__(keys.SERVER_TO_DEFEND, ip)
def game_server(ip): if APP_DEBUG: Log.info('CALLED: Set.game_server(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Set.__set__(keys.GAME_SERVER, ip)
def team_player(ip='*'): if APP_DEBUG: Log.info('CALLED: Remove.team_player(' + str(ip) + ')') if ip != '*' and not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Remove.__remove__(keys.TEAM_PLAYER, ip)
def my_ip(ip): if APP_DEBUG: Log.info('CALLED: Set.my_ip(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Set.__set__(keys.MY_IP, ip)
def team_player(ip): if APP_DEBUG: Log.info('CALLED: Add.team_player(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Add.__add__(keys.TEAM_PLAYER, ip)
def server_to_defend(ip='*'): if APP_DEBUG: Log.info('CALLED: Remove.server_to_defend(' + str(ip) + ')') if ip != '*' and not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Remove.__remove__(keys.SERVER_TO_DEFEND, ip)
def regex_in_string(regex, string): if APP_DEBUG: Log.info('CALLED: regex_in_string(' + str(regex) + ', ' + str(string) + ')') reg = re.compile(regex) matches = re.findall(reg, string) return len(matches) > 0
def __init__(self, host='0.0.0.0', port=8775): self.host = host self.port = port # Start the sqlmap-api server in a parallel thread Log.info("Starting sqlmap-api server in a parallel thread") multithread(sqlmap_server, (self.host, self.port), True, 1) Log.success("Sqlmap-api server started!")
def find_forms(parsed, url=None): forms = [] if parsed is None: return forms if type(parsed) == dict: if 'form' == parsed.get('tag'): attrs = parsed.get('attrs') action = attrs.get('action') method = attrs.get('method') if action is None: action = url if method is None: method = RequestType.POST form = { 'method': method, 'action': action, 'inputs': find_inputs(parsed.get('children')) } forms.append(form) forms += find_forms(parsed.get('children'), url) elif type(parsed) == list: for value in parsed: forms += find_forms(value, url) else: Log.error(str(parsed) + ' is not a valid parsed content!') return forms
def parse(self, url=None, html=None): self.url = None self.base_url = None cookies = '' if url is not None: self.url = url url_parsed = urlparse(url) self.url_scheme = str(url_parsed.scheme) self.base_url = self.url_scheme + '://' + str(url_parsed.netloc) r = request(url, RequestType.GET) if r is None: return None try: html = r.json() Log.warning('Trying to parse a json with HTML parser!') except ValueError: html = r.text if r.headers is not None: for k in r.headers.keys(): if k == 'Set-Cookie' or k == 'set-cookie' or k == 'Set-cookie': cookies = r.headers.get('Set-Cookie') break # Sort html (and fix errors) sorted_html, errors = tidy_document(html) self.feed(sorted_html) return self.tags, cookies
def submit_url(url): if APP_DEBUG: Log.info('CALLED: Set.submit_url(' + str(url) + ')') if not validators.is_url(url): Log.error(str(url) + ' is not a valid url') return False return Set.__set__(keys.SUBMIT_URL, url)
def try_inject(forms, cookies=''): """ Try injection in all provided forms :param forms: dict A dictionary of { "<url>": [ <parsed_form_1>, <parsed_form_2>, ... ], ... } :param cookies: str the request cookies """ if SqlmapClient._client is None: SqlmapClient._client = SqlmapClient() pprint(forms) Log.info('Trying injection with cookies: ' + str(cookies)) Log.error("try_inject: Not Implemented")
def inject_form(url=None, html=None): """ Search a form in the page returned by url (or inside the html). :param url: str The url to visit (or None) :param html: str the html code to analyze (or None) :return A list of parsed forms like [ form_1, form_2 ] """ parsed_forms = dict() parsed_forms[url], cookies = form_parse(url, html) Log.success('Html parsed! Found '+str(len(parsed_forms[url]))+' forms') SqlmapClient.try_inject(parsed_forms, cookies)
def multi_request(urls, request_type=Type.GET, data=None, headers=None): if APP_DEBUG: Log.info('CALLED: multi_request(' + str(urls) + ', ' + str(request_type) + ', ' + str(data) + ')') request_type = request_type.lower() for url in urls: r = request(url, request_type, data, headers) if r is None: continue if APP_DEBUG: try: print(r.json()) except json.decoder.JSONDecodeError or simplejson.errors.JSONDecodeError: print(r.text)
def request(url, request_type=Type.GET, data=None, headers=None): if headers is None: headers = {} req_headers = {'User-Agent': str(APP_NAME) + ' ' + str(APP_VERSION)} req_headers.update(headers) if data is None: data = {} request_type = request_type.lower() if not is_url(url): Log.error(str(url) + ' is not a valid url!') return None try: if request_type == Type.GET: r = requests.get(url, data, headers=req_headers) elif request_type == Type.POST: r = requests.post(url, data, headers=req_headers) elif request_type == Type.PUT: r = requests.put(url, data, headers=req_headers) elif request_type == Type.PATCH: r = requests.patch(url, data, headers=req_headers) elif request_type == Type.DELETE: r = requests.delete(url, headers=req_headers) else: Log.error(str(request_type) + ' is not a valid request type!') return None if APP_DEBUG: print_request(r) return r except requests.exceptions.ConnectionError or requests.exceptions.TooManyRedirects as e: Log.error('Unable to connect to ' + str(url)) Log.error('Exception: ' + str(e)) return None
def decrypt(string): for api in Md5.Api.all(): r = request(api['url'] + string, RequestType.GET) if r is None: continue try: r_json = r.json() except json.decoder.JSONDecodeError: continue # Chiamo la funzione per prelevare dati dalla risposta result = api['get_result'](r_json) if result is not None: return result Log.error('md5: unable to decrypt: ' + str(string)) return None
def __init__(self, tasks_type): if tasks_type not in MultiTask.tasks_types: tasks_type = MultiTask.MULTI_PROCESSING Log.error(str(tasks_type) + ' is not a valid tasks type!') self.tasks_type = tasks_type if self.tasks_type == MultiTask.MULTI_PROCESSING: self.Multitask = multiprocessing.Process self.tag = 'Process ' else: self.Multitask = threading.Thread self.tag = 'Thread ' self.tasks = [] pid = str(multiprocessing.process.current_process().pid) # File con pids (se multiprocessing) self.pidfile = APP_TMP + '/multitask.' + pid + '.pids' # File con result self.resfile = APP_TMP + '/multitask.' + pid + '.res'
def find_inputs(parsed): inputs = {} if parsed is None: return inputs if type(parsed) == dict: tag = parsed.get('tag') if tag in ('input', 'textarea'): attrs = parsed.get('attrs') form_input = {'tag': tag} for key, value in attrs.items(): form_input[key] = value inputs[attrs.get('name')] = form_input inputs.update(find_inputs(parsed.get('children'))) elif type(parsed) == list: for value in parsed: inputs.update(find_inputs(value)) else: Log.error(str(parsed) + ' is not a valid parsed content!') return inputs
def find_links(parsed): links = set() if parsed is None: return links if type(parsed) == dict: attrs = parsed.get('attrs') if attrs is not None: url = None if 'form' == parsed.get('tag'): url = attrs.get('action') elif 'a' == parsed.get('tag'): url = attrs.get('href') if url is not None: links.add(url) links = links.union(find_links(parsed.get('children'))) elif type(parsed) == list: for value in parsed: links = links.union(find_links(value)) else: Log.error(str(parsed) + ' is not a valid parsed content!') return links
def print_parsed(parsed, depth=0): space = ' ' * depth if type(parsed) == dict: print(space + '{') for key, value in parsed.items(): if key == 'children': print_parsed(value, depth + 1) elif is_listable(value): print((space + ' ') + str(key) + ':') print_parsed(value, depth + 2) # print((space+' ') + str(key) + ':') # subspace = ' ' * (depth+1) # for el in dict: # if (is_listable(el)): else: print((space + ' ') + str(key) + ': ' + str(value)) print(space + '}') elif type(parsed) == list: for value in parsed: print_parsed(value, depth + 1) else: Log.error(str(parsed) + ' is not a valid parsed content!')
def _deep_inject_form(href, depth=1): # Check the domain if href in parsed_forms or urlparse(href).netloc != base_url or (max_depth is not None and depth > max_depth): return '' # Visit the current href parsed_relevant, request_cookies = relevant_parse(href) parsed_forms[href] = find_forms(parsed_relevant, href) # Find adjacent links links = find_links(parsed_relevant) if len(parsed_forms) % 10 == 0: Log.info('Writing result in ' + out_file + '...') set_json(parsed_forms, out_file) # Visit adjacent links for link in links: # print('link: '+link) child_request_cookies = _deep_inject_form(link, depth+1) if len(child_request_cookies) > len(request_cookies): request_cookies = child_request_cookies return request_cookies
def deep_inject_form(url, max_depth): """ Search a form in the page returned by url. If it doesn't find a form, or the injection can't be done, it visit the website in search for other forms :param url: str The url to visit :param max_depth: int The max depth during the visit :return A dictionary of parsed forms like { '<visited_url>': [ form_1, form_2, ... } """ base_url = urlparse(url).netloc parsed_forms = dict() out_file = APP_STORAGE_OUT + '/' + now() + '_DEEP_FORMS_' + base_url + '.json' def _deep_inject_form(href, depth=1): # Check the domain if href in parsed_forms or urlparse(href).netloc != base_url or (max_depth is not None and depth > max_depth): return '' # Visit the current href parsed_relevant, request_cookies = relevant_parse(href) parsed_forms[href] = find_forms(parsed_relevant, href) # Find adjacent links links = find_links(parsed_relevant) if len(parsed_forms) % 10 == 0: Log.info('Writing result in ' + out_file + '...') set_json(parsed_forms, out_file) # Visit adjacent links for link in links: # print('link: '+link) child_request_cookies = _deep_inject_form(link, depth+1) if len(child_request_cookies) > len(request_cookies): request_cookies = child_request_cookies return request_cookies cookies = _deep_inject_form(url) Log.info('Writing result in ' + out_file + '...') set_json(parsed_forms, out_file) Log.success('Result wrote in ' + out_file) Log.success('Website crawled! Found '+str(len(parsed_forms))+' pages') SqlmapClient.try_inject(parsed_forms, cookies) return parsed_forms
def task_target(*arguments): result = None if self.tasks_type == MultiTask.MULTI_PROCESSING: curr_task = multiprocessing.process.current_process() Log.info(self.tag + 'started (PID=' + str(curr_task.pid) + ')') else: curr_task = threading.current_thread() Log.info(self.tag + 'started') if target is not None: result = target(*arguments) if result is not None: Log.success("Result: " + str(result)) # Scrivo il risultato nel file Log.info('Writing result in ' + str(self.resfile)) storage.overwrite_file(str(result), self.resfile) # Termino tutti gli altri threads/processi if self.tasks_type == MultiTask.MULTI_PROCESSING: Log.info('Killing other processes') running_pids = storage.read_file(self.pidfile).split(', ') for pid in running_pids: pid = int(pid) if pid == curr_task.pid: continue try: os.kill(pid, signal.SIGKILL) Log.info('Process ' + str(pid) + ' killed!') except Exception as e: Log.error(str(e)) Log.info(self.tag + 'end') else: Log.info('Ignoring other threads') # Killa se stesso pid = multiprocessing.process.current_process().pid Log.info(self.tag + 'end') os.kill(pid, signal.SIGKILL)
def start(self, target, args, asynchronous, cpu): self.tasks = [] def task_target(*arguments): result = None if self.tasks_type == MultiTask.MULTI_PROCESSING: curr_task = multiprocessing.process.current_process() Log.info(self.tag + 'started (PID=' + str(curr_task.pid) + ')') else: curr_task = threading.current_thread() Log.info(self.tag + 'started') if target is not None: result = target(*arguments) if result is not None: Log.success("Result: " + str(result)) # Scrivo il risultato nel file Log.info('Writing result in ' + str(self.resfile)) storage.overwrite_file(str(result), self.resfile) # Termino tutti gli altri threads/processi if self.tasks_type == MultiTask.MULTI_PROCESSING: Log.info('Killing other processes') running_pids = storage.read_file(self.pidfile).split(', ') for pid in running_pids: pid = int(pid) if pid == curr_task.pid: continue try: os.kill(pid, signal.SIGKILL) Log.info('Process ' + str(pid) + ' killed!') except Exception as e: Log.error(str(e)) Log.info(self.tag + 'end') else: Log.info('Ignoring other threads') # Killa se stesso pid = multiprocessing.process.current_process().pid Log.info(self.tag + 'end') os.kill(pid, signal.SIGKILL) for i in range(0, cpu): task_args = () for arg in args: Log.info('Argument type: ' + str(type(arg))) if is_listable(arg): # Divido gli elementi in 1/cpu parti p_list_len = (len(arg) / cpu) + (len(arg) % cpu) if type(arg) == dict: iterator = iter(arg.items()) task_args += ( dict(itertools.islice(iterator, int((i * p_list_len)), int((i + 1) * p_list_len))), ) else: task_args += (arg[int((i * p_list_len)):int(((i + 1) * p_list_len))],) else: task_args += (arg,) task = self.Multitask(target=task_target, args=task_args) self.tasks.append(task) if self.tasks_type == MultiTask.MULTI_PROCESSING: pids = [] for task in self.tasks: task.start() # noinspection PyUnresolvedReferences pids.append(task.pid) storage.overwrite_file(str(pids).strip('[]'), self.pidfile) else: for task in self.tasks: task.start() if not asynchronous: # Attende la fine dell'esecuzione di tutti i tasks for task in self.tasks: task.join() Log.info('Task ' + str(task.name) + ' joined') Log.info('Reading result in ' + str(self.resfile)) # Prendo il risultato dal file res = storage.read_file(self.resfile) # Elimino l'eventuale file con i pid storage.delete(self.pidfile) # Elimino il file con il risultato storage.delete(self.resfile) Log.success('MultiTask -> result: ' + str(res)) return res return None
def print_request(_request, limit=1000): Log.info(str(_request.url)) Log.info(' |--- status_code: ' + str(_request.status_code)) Log.info(' |--- encoding: ' + str(_request.encoding)) Log.info(' |--- headers:') for key, value in _request.headers.items(): Log.info(' | |--- ' + str(key) + ': ' + str(value)) Log.info(' |') try: json_body = _request.json() Log.info(' |-- data: ' + str(json_body)) except ValueError: data = str(_request.text) if len(data) > limit: data = '[truncated]' + data[0:limit] Log.info(' |-- data: ' + data)
def django_gui(): sys.path.insert(0, os.path.dirname(__file__)) bind_host = _get_bind_socket() Log.info("Starting " + str(APP_NAME) + ' GUI') sys.argv = [sys.argv[0], 'web.wsgi', '-b', bind_host] gunicorn_run()
def pexec(*args): if APP_DEBUG: Log.info('CALLED: pexec' + str(args)) """
def my_interface(interface): if APP_DEBUG: Log.info('CALLED: Set.my_interface(' + str(interface) + ')') return Set.__set__(keys.MY_INTERFACE, interface)
def flag_regex(regex): if APP_DEBUG: Log.info('CALLED: Set.flag_regex(' + str(regex) + ')') return Set.__set__(keys.FLAG_REGEX, regex)
def team_token(token): if APP_DEBUG: Log.info('CALLED: Set.team_token(' + str(token) + ')') return Set.__set__(keys.TEAM_TOKEN, token)