def _parse(self, url: str = None, html: str = None) -> (dict, str): """ Make an HTML/URL parsing by processing ALL found tags :param url: The url to parse (or None) :param html: The html page to parse as string (or None) :return: dictionary of tags, cookies """ self.url = None self.base_url = None cookies = '' if url is not None: self.url = url url_parsed = urlparse(url) self.url_scheme = str(url_parsed.scheme) self.base_url = self.url_scheme + '://' + str(url_parsed.netloc) r = HttpRequest.request(url) if r is None: return None try: html = r.json() Log.warning('Trying to parse a json with HTML parser!') except ValueError: html = r.text if r.headers is not None: for k in r.headers.keys(): if k == 'Set-Cookie' or k == 'set-cookie' or k == 'Set-cookie': cookies = r.headers.get('Set-Cookie') break sorted_html, errors = tidy_document(html) # Sort html (and fix errors) self.feed(sorted_html) return self.tags, cookies
def _deep_inject_form(href, depth=1): # Check the domain if href in parsed_forms or \ urlparse(href).netloc != base_url or \ (max_depth is not None and depth > max_depth): return '' # Visit the current href parsed_relevant, request_cookies = HtmlParser.relevant_parse(href) parsed_forms[href] = HtmlParser.find_forms(parsed_relevant, href) # Find adjacent links links = HtmlParser.find_links(parsed_relevant) if len(parsed_forms) % 10 == 0: Log.info('Writing result in ' + out_file + '...') JsonSerializer.set_dictionary(parsed_forms, out_file) # Visit adjacent links for link in links: # print('link: '+link) child_request_cookies = _deep_inject_form(link, depth + 1) if len(child_request_cookies) > len(request_cookies): request_cookies = child_request_cookies return request_cookies
def find_forms(parsed: dict or list, url=None) -> list: """ Search forms inside parsed html (dict) :param parsed: A parsed html :param url: The parsed url (or None) :return: The list of found forms """ forms = [] if parsed is None: return forms if type(parsed) == dict: if 'form' == parsed.get('tag'): attrs = parsed.get('attrs') action = attrs.get('action') method = attrs.get('method') if action is None: action = url if method is None: method = HttpRequest.Type.POST form = { 'method': method, 'action': action, 'inputs': HtmlParser.find_inputs(parsed.get('children')) } forms.append(form) forms += HtmlParser.find_forms(parsed.get('children'), url) elif type(parsed) == list: for value in parsed: forms += HtmlParser.find_forms(value, url) else: Log.error(str(parsed) + ' is not a valid parsed content!') return forms
def my_ip(ip): if APP_DEBUG: Log.info('CALLED: Set.my_ip(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Set.__set__(keys.MY_IP, ip)
def team_player(ip): if APP_DEBUG: Log.info('CALLED: Add.team_player(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Add.__add__(keys.TEAM_PLAYER, ip)
def server_to_defend(ip='*'): if APP_DEBUG: Log.info('CALLED: Remove.server_to_defend(' + str(ip) + ')') if ip != '*' and not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Remove.__remove__(keys.SERVER_TO_DEFEND, ip)
def print_parsed(parsed: dict or list, depth: int = 0): """ Print The result of methods @parse and @relevant_parse (so a parsed html) :param parsed: A parsed html :param depth: Current depth to build a pretty tree """ space = ' ' * depth if type(parsed) == dict: print(space + '{') for key, value in parsed.items(): if key == 'children': HtmlParser.print_parsed(value, depth + 1) elif is_listable(value): print((space + ' ') + str(key) + ':') HtmlParser.print_parsed(value, depth + 2) # print((space+' ') + str(key) + ':') # subspace = ' ' * (depth+1) # for el in dict: # if (is_listable(el)): else: print((space + ' ') + str(key) + ': ' + str(value)) print(space + '}') elif type(parsed) == list: for value in parsed: HtmlParser.print_parsed(value, depth + 1) else: Log.error(str(parsed) + ' is not a valid parsed content!')
def submit_url(url): if APP_DEBUG: Log.info('CALLED: Set.submit_url(' + str(url) + ')') if not validators.is_url(url): Log.error(str(url) + ' is not a valid url') return False return Set.__set__(keys.SUBMIT_URL, url)
def server_to_defend(ip): if APP_DEBUG: Log.info('CALLED: Add.server_to_defend(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Add.__add__(keys.SERVER_TO_DEFEND, ip)
def deep_inject_form(href, depth=0): # Check the domain if href in parsed_forms or \ urlparse(href).netloc != base_url or \ (max_depth is not None and depth > max_depth): return '' # Visit the current href parsed_relevant, request_cookies = HtmlParser.relevant_parse(href) # Find forms in page parsed_forms[href] = HtmlParser.find_forms(parsed_relevant, href) # Execute Sqlmap task task = SqlmapClient.try_inject_form(href, parsed_forms, request_cookies) tasks[task.id] = task Log.success('SQL injection of "' + href + '" started!') # Find adjacent links links = HtmlParser.find_links(parsed_relevant) # Visit adjacent links for link in links: # print('link: '+link) child_request_cookies = deep_inject_form(link, depth + 1) if len(child_request_cookies) > len(request_cookies): request_cookies = child_request_cookies
def multi_sequential_requests(urls: list, request_type: str = Type.GET, data=None, json: dict or list = None, headers: dict = None) -> dict: """ Make multiple sequential requests :param urls: The list of target urls :param request_type: get|post|put|patch|delete :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request` :param json: (optional) json data to send in the body of the :class:`Request` :param headers: The headers to send :return A dictionary of responses like {'url_1': <response>, 'url_2': <response>, ...} """ if APP_DEBUG: Log.info('CALLED: multi_request(' + str(urls) + ', ' + str(request_type) + ', ' + str(data) + ')') request_type = request_type.lower() response_dict = dict() for url in urls: response = HttpRequest.request(url, request_type, data, json, headers) if response is None: continue response_dict[url] = response if APP_DEBUG: try: print(response.json()) except (JSONDecodeError, SimpleJSONDecodeError): print(response.text) return response_dict
def team_player(ip='*'): if APP_DEBUG: Log.info('CALLED: Remove.team_player(' + str(ip) + ')') if ip != '*' and not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Remove.__remove__(keys.TEAM_PLAYER, ip)
def game_server(ip): if APP_DEBUG: Log.info('CALLED: Set.game_server(' + str(ip) + ')') if not validators.is_ip(ip): Log.error(str(ip) + ' is not a valid ip address') return False return Set.__set__(keys.GAME_SERVER, ip)
def find_links(parsed: dict or list) -> set: """ Search links inside a parsed html (dict) :param parsed: A parsed html :return: A set of found links """ links = set() if parsed is None: return links if type(parsed) == dict: attrs = parsed.get('attrs') if attrs is not None: url = None if 'form' == parsed.get('tag'): url = attrs.get('action') elif 'a' == parsed.get('tag'): url = attrs.get('href') if url is not None: links.add(url) links = links.union(HtmlParser.find_links(parsed.get('children'))) elif type(parsed) == list: for value in parsed: links = links.union(HtmlParser.find_links(value)) else: Log.error(str(parsed) + ' is not a valid parsed content!') return links
def try_inject_forms(forms: dict, cookies: str = '', delay: int = 1, random_agent: bool = False) -> dict: """ Try injection with all provided forms :param forms: dict A dictionary of { "<url>": [ <parsed_form_1>, <parsed_form_2>, ... ], ... } :param cookies: str the request cookies :param delay: int The delay on each request :param random_agent: True if set a random agent for each sqlmap request :rtype: dict """ sqlmap_tasks = dict() Log.info('Trying injection with cookies: ' + str(cookies)) for url, page_forms in forms.items(): page_forms: list # The forms in page returned by url for page_form in page_forms: page_form: dict # The attributes and inputs of form action: str = page_form.get('action') inputs: dict = page_form.get('inputs') method: str = page_form.get('method') if random_agent: agent = None else: agent = HttpRequest.default_agent() task_options = { 'dbms': 'MySQL', 'cookie': cookies, 'agent': agent, 'referer': url, 'delay': delay, 'randomAgent': random_agent, 'method': method, 'url': action, 'data': SqlmapClient.__get_data(inputs) } csrf_token = SqlmapClient.__get_csrf_token(inputs) if csrf_token is not None: csrf_token_name = csrf_token.get('name') task_options.update({ 'csrfUrl': url, 'csrfMethod': HttpRequest.Type.GET, 'csrfToken': csrf_token_name, }) # for key, value in task_options.items(): # print('---------- ' + key + ': ----------') # print(value) sqlmap_task = SqlmapClient._task_new() sqlmap_task.option_set(task_options) sqlmap_tasks[sqlmap_task.id] = sqlmap_task sqlmap_task.scan_start() return sqlmap_tasks
def browser_target(): """ The function that launch the browser """ set_owner_process(user) Log.info('Launching browser with User: '******'Web browser opened')
def django_gui(): sys.path.insert(0, os.path.dirname(__file__)) bind_host = _get_bind_socket() Log.info("Starting " + str(APP_NAME) + ' GUI') sys.argv = [sys.argv[0], 'web.wsgi', '-b', bind_host] django_cmd(['migrate']) _launch_browser(bind_host) gunicorn_run()
def get(self, request, *args, **kwargs): """ :type request: django.core.handlers.wsgi.WSGIRequest :return: django.http.HttpResponse """ request_params: dict = request.GET.dict() job_id = request_params.get('job_id') Log.info("Showing job #" + str(job_id)) return render(request, self.template_name)
def get_dictionary(file: str) -> dict: """ :param file: A file that contains a json :return: A dictionary """ if not os.path.isfile(file): Log.error(file + ' is not a file') return dict() return JsonSerializer.load_json(file)
def _post_job(self, request) -> JsonResponse: """ :type request: django.core.handlers.wsgi.WSGIRequest :return: django.http.JsonResponse """ # noinspection PyTypeChecker job: AbstractJobModel = None request_params: dict = request.POST.dict() job_id = request_params.get('id') try: job_id = int(job_id) job = self.model_class.objects.get(id=job_id) except ValueError: pass except Exception as e: Log.error(str(e)) if job is None: return JsonResponse( {'message': 'Unable to find the requested job'}, status=400) signal_job = request_params.get('signal') if signal_job is not None: signal_job = int(signal_job) if signal_job == 0: # Custom signal 0 = Restart capturing job_new = self._copy_job(job) job_id = job_new.id signal_job = signal.SIGABRT try: job.kill(signal_job) except ProcessLookupError: Log.warning("The process " + str(job.pid) + " does not exists") if signal_job == signal.SIGABRT: # 6 = Abort permanently by cleaning job if not job.delete(): return JsonResponse( {'message': 'Unable to delete the job'}, status=400) return JsonResponse( { 'id': job_id, 'signal': signal_job, 'message': 'Signal sent' }, status=200) job.self_check() page = request_params.get('page') page_size = request_params.get('page_size') pagination = self.pagination(job.json_dict, page, page_size) pagination.update({'job': {'id': job_id, 'status': job.status_name}}) return JsonResponse(pagination, status=200)
def kill(self, sig: int): """ Send a signal to process which is running this job :param sig: The signal as integer (eg. 9 for SIGKILL) """ Log.info("Sending signal " + str(sig) + " to job #" + str(self.id) + ' (' + str(self.pid) + ')') self.status = sig os.kill(self.pid, sig) self.save() Log.success("Signal " + str(sig) + " sent to job #" + str(self.id) + ' (' + str(self.pid) + ')')
def find_forms(parsed: dict or list, url=None) -> dict: """ Search forms inside parsed html (dict) :param parsed: A parsed html :param url: The parsed url (or None) :return: The list of found forms """ form = dict() if parsed is None: return form if type(parsed) == dict: parsed_children = parsed.get('children') if 'form' == parsed.get('tag'): attrs = parsed.get('attrs') action = attrs.get('action') method = attrs.get('method') name = attrs.get('name') if action is None: action = url if method is None: method = HttpRequest.Type.POST attrs = {'method': method, 'action': action, 'name': name} if name is None: name = action form = { 'tag': 'form', 'attrs': attrs, 'name': name, 'children': HtmlParser.__find_inputs(parsed_children) } children = HtmlParser.find_forms(parsed_children, url) if children is not None and len(children) > 0: if len(form) > 0: form['children'] = children else: if len(children) == 1 and children.get('tag') is None: form = children.get(0) else: form = children elif type(parsed) is list: children = dict() index = 0 for value in parsed: child = HtmlParser.find_forms(value, url) if len(child) > 0: children[index] = child index += 1 if len(children) == 1 and children.get(0).get('tag') is None: form = children.get(0) else: form = children else: Log.error(str(parsed) + ' is not a valid parsed content!') return form
def load_json(file: str) -> dict: """ :param file: The file to read :return: A dictionary """ try: with open(file, 'r') as infile: return json.load(infile) except json.decoder.JSONDecodeError as e: Log.error(str(e)) return dict()
def inject_form(url=None, html=None): """ Search a form in the page returned by url (or inside the html). :param url: str The url to visit (or None) :param html: str the html code to analyze (or None) :return A list of parsed forms like [ form_1, form_2 ] """ parsed_forms = dict() parsed_forms[url], cookies = HtmlParser.form_parse(url, html) Log.success('Html parsed! Found ' + str(len(parsed_forms[url])) + ' forms') SqlmapClient.try_inject(parsed_forms, cookies)
def __parse(self, url: str = None, html: str = None, cookies: str = None) -> (dict, str): """ Make an HTML/URL parsing by processing ALL found tags :param url: The url to parse (or None) :param html: The html page to parse as string (or None) :param cookies: The cookies to use on parsing :return: dictionary of tags, cookies """ self.url = None self.base_url = None is_image = False if url is not None: self.url = url url_parsed = urlparse(url) self.url_scheme = str(url_parsed.scheme) self.base_url = self.url_scheme + '://' + str(url_parsed.netloc) r = HttpRequest.request(url, cookies=cookies) if r is None: return None if r.status_code >= 400 or r.headers.get( 'Content-Type') in HtmlParser._unacceptable_content_types: return None try: html = r.json() Log.warning('Trying to parse a json with HTML parser!') except ValueError: html = r.text if r.headers is not None: for k, v in r.headers.items(): if k.lower() == 'set-cookie': cookies = v if HttpRequest.is_image(r): is_image = True xmp_start = html.find('<x:xmpmeta') xmp_end = html.find('</x:xmpmeta') xmp_str = html[xmp_start:xmp_end + 12] html = xmp_str if is_image: sorted_html = html else: sorted_html, errors = tidy_document( html) # Sort html (and fix errors) self.feed(sorted_html) if cookies is None: cookies = '' return self.tags, cookies
def __init__(self, host: str = DEFAULT_HOST, port: int = DEFAULT_PORT): """ :param host: The host :param port: The port """ self.host = host self.port = port self.base_url = 'http://' + self.host + ':' + str(port) # Start the sqlmap-api server in a parallel thread Log.info("Starting sqlmap-api server in a parallel thread") MultiTask.multithread(sqlmap_server, (self.host, self.port), True, 1) while not check_socket(self.host, self.port): # Wait sqlmap-api server sleep(0.1) Log.success("Sqlmap-api server started!")
def decrypt(text: str): for api in Md5Crypto.Api.all(): r = HttpRequest.request(api['url'] + text) if r is None: continue try: r_json = r.json() except json.decoder.JSONDecodeError: continue result = api['get_result'](r_json) if result is not None: return result Log.error('md5: unable to decrypt: ' + text) return None
def __inject_forms(url, max_depth) -> dict: """ Search a form in the page returned by url. If it doesn't find a form, or the injection can't be done, it visit the website in search for other forms :param url: str The url to visit :param max_depth: int The max depth during the visit :return A dictionary of SQL injection tasks """ base_url = urlparse(url).netloc parsed_forms = dict() tasks = dict() def deep_inject_form(href, depth=0): # Check the domain if href in parsed_forms or \ urlparse(href).netloc != base_url or \ (max_depth is not None and depth > max_depth): return '' # Visit the current href parsed_relevant, request_cookies = HtmlParser.relevant_parse(href) # Find forms in page parsed_forms[href] = HtmlParser.find_forms(parsed_relevant, href) # Execute Sqlmap task task = SqlmapClient.try_inject_form(href, parsed_forms, request_cookies) tasks[task.id] = task Log.success('SQL injection of "' + href + '" started!') # Find adjacent links links = HtmlParser.find_links(parsed_relevant) # Visit adjacent links for link in links: # print('link: '+link) child_request_cookies = deep_inject_form(link, depth + 1) if len(child_request_cookies) > len(request_cookies): request_cookies = child_request_cookies Log.success('SQL injection started!') deep_inject_form(url) Log.success('Website crawled! Found ' + str(len(parsed_forms)) + ' forms') return tasks
def request(url: str, request_type: str = HttpRequest.Type.GET, json: dict or list = None) -> dict: """ Send a request to sqlmap-api server and then load the data json as dict :param url: The sqlmap-api url (eg. "http://127.0.0.1:8775/task/new") :param request_type: get|post|put|patch|delete :param json: The json to send :rtype: dict """ response = HttpRequest.request(url, request_type, json=json) r_data = JsonSerializer.load_json(response.text) Log.info('Response data of ' + url + ': ' + str(r_data)) if not r_data['success']: Log.error('Response data of ' + url + ' has { success: False }') raise requests.RequestException('Request to ' + url + ' failed') return r_data
def __init__(self, tasks_type): if tasks_type not in MultiTask.tasks_types: tasks_type = MultiTask.MULTI_PROCESSING Log.error(str(tasks_type) + ' is not a valid tasks type!') self.tasks_type = tasks_type if self.tasks_type == MultiTask.MULTI_PROCESSING: self.Multitask = multiprocessing.Process self.tag = 'Process ' else: self.Multitask = threading.Thread self.tag = 'Thread ' self.tasks = [] pid = str(multiprocessing.process.current_process().pid) # File con pids (se multiprocessing) self.pidfile = APP_TMP + '/multitask.' + pid + '.' + timestamp() + '.pids' # File con result self.resfile = APP_TMP + '/multitask.' + pid + '.' + timestamp() + '.res'