def __parse(self, url: str = None, html: str = None, cookies: str = None) -> (dict, str): """ Make an HTML/URL parsing by processing ALL found tags :param url: The url to parse (or None) :param html: The html page to parse as string (or None) :param cookies: The cookies to use on parsing :return: dictionary of tags, cookies """ self.url = None self.base_url = None is_image = False if url is not None: self.url = url url_parsed = urlparse(url) self.url_scheme = str(url_parsed.scheme) self.base_url = self.url_scheme + '://' + str(url_parsed.netloc) r = HttpRequest.request(url, cookies=cookies) if r is None: return None if r.status_code >= 400 or r.headers.get( 'Content-Type') in HtmlParser._unacceptable_content_types: return None try: html = r.json() Log.warning('Trying to parse a json with HTML parser!') except ValueError: html = r.text if r.headers is not None: for k, v in r.headers.items(): if k.lower() == 'set-cookie': cookies = v if HttpRequest.is_image(r): is_image = True xmp_start = html.find('<x:xmpmeta') xmp_end = html.find('</x:xmpmeta') xmp_str = html[xmp_start:xmp_end + 12] html = xmp_str if is_image: sorted_html = html else: sorted_html, errors = tidy_document( html) # Sort html (and fix errors) self.feed(sorted_html) if cookies is None: cookies = '' return self.tags, cookies
def _parse(self, url: str = None, html: str = None) -> (dict, str): """ Make an HTML/URL parsing by processing ALL found tags :param url: The url to parse (or None) :param html: The html page to parse as string (or None) :return: dictionary of tags, cookies """ self.url = None self.base_url = None cookies = '' if url is not None: self.url = url url_parsed = urlparse(url) self.url_scheme = str(url_parsed.scheme) self.base_url = self.url_scheme + '://' + str(url_parsed.netloc) r = HttpRequest.request(url) if r is None: return None try: html = r.json() Log.warning('Trying to parse a json with HTML parser!') except ValueError: html = r.text if r.headers is not None: for k in r.headers.keys(): if k == 'Set-Cookie' or k == 'set-cookie' or k == 'Set-cookie': cookies = r.headers.get('Set-Cookie') break sorted_html, errors = tidy_document(html) # Sort html (and fix errors) self.feed(sorted_html) return self.tags, cookies
def try_inject_forms(forms: dict, cookies: str = '', delay: int = 1, random_agent: bool = False) -> dict: """ Try injection with all provided forms :param forms: dict A dictionary of { "<url>": [ <parsed_form_1>, <parsed_form_2>, ... ], ... } :param cookies: str the request cookies :param delay: int The delay on each request :param random_agent: True if set a random agent for each sqlmap request :rtype: dict """ sqlmap_tasks = dict() Log.info('Trying injection with cookies: ' + str(cookies)) for url, page_forms in forms.items(): page_forms: list # The forms in page returned by url for page_form in page_forms: page_form: dict # The attributes and inputs of form action: str = page_form.get('action') inputs: dict = page_form.get('inputs') method: str = page_form.get('method') if random_agent: agent = None else: agent = HttpRequest.default_agent() task_options = { 'dbms': 'MySQL', 'cookie': cookies, 'agent': agent, 'referer': url, 'delay': delay, 'randomAgent': random_agent, 'method': method, 'url': action, 'data': SqlmapClient.__get_data(inputs) } csrf_token = SqlmapClient.__get_csrf_token(inputs) if csrf_token is not None: csrf_token_name = csrf_token.get('name') task_options.update({ 'csrfUrl': url, 'csrfMethod': HttpRequest.Type.GET, 'csrfToken': csrf_token_name, }) # for key, value in task_options.items(): # print('---------- ' + key + ': ----------') # print(value) sqlmap_task = SqlmapClient._task_new() sqlmap_task.option_set(task_options) sqlmap_tasks[sqlmap_task.id] = sqlmap_task sqlmap_task.scan_start() return sqlmap_tasks
def _update_manufacturer_dict(self): manufacturer_response = HttpRequest.request( MacManufacturer._MANUFACTURERS_URL) if manufacturer_response is None: return if manufacturer_response.text is None: return manufacturer_dict = dict() manufacturer_list = manufacturer_response.text.splitlines() for manufacturer in manufacturer_list: if len(manufacturer) < 1: continue if manufacturer[0] == '#': continue manufacturer_details = manufacturer.split('\t') i = 0 mac = None lookup_dict = { MacManufacturer._MANUFACTURERS_DETAIL_DICT[1]: None, MacManufacturer._MANUFACTURERS_DETAIL_DICT[2]: None, MacManufacturer._MANUFACTURERS_DETAIL_DICT[3]: None } for detail in manufacturer_details: if detail == '': continue if i == 0: # MAC address mac_detail = detail.split('/') if len(mac_detail) == 2: # The mac has a sub mask, so the dict key is the first n bits sub_mask = int(mac_detail[1]) / 4 mac_sub_mask = floor(sub_mask + (sub_mask / 2)) mac = mac_detail[0][0:mac_sub_mask] elif len(mac_detail) == 1: # The mac has not a sub mask mac = mac_detail[0] else: Log.error("Wrong mac address: " + str(detail)) break if i >= len(MacManufacturer._MANUFACTURERS_DETAIL_DICT): Log.error("Wrong manufacturer details: " + str(manufacturer_details)) break lookup_dict[ MacManufacturer._MANUFACTURERS_DETAIL_DICT[i]] = detail i += 1 if mac is None: Log.error("Wrong manufacturer details: " + str(manufacturer_details)) continue manufacturer_dict[mac] = lookup_dict if len(manufacturer_dict) > 0: self._manufacturer_dict = manufacturer_dict JsonSerializer.set_dictionary(self._manufacturer_dict, MacManufacturer._MANUFACTURERS_JSON)
def decrypt(text: str): for api in Md5Crypto.Api.all(): r = HttpRequest.request(api['url'] + text) if r is None: continue try: r_json = r.json() except json.decoder.JSONDecodeError: continue result = api['get_result'](r_json) if result is not None: return result Log.error('md5: unable to decrypt: ' + text) return None
def request(url: str, request_type: str = HttpRequest.Type.GET, json: dict or list = None) -> dict: """ Send a request to sqlmap-api server and then load the data json as dict :param url: The sqlmap-api url (eg. "http://127.0.0.1:8775/task/new") :param request_type: get|post|put|patch|delete :param json: The json to send :rtype: dict """ response = HttpRequest.request(url, request_type, json=json) r_data = JsonSerializer.load_json(response.text) Log.info('Response data of ' + url + ': ' + str(r_data)) if not r_data['success']: Log.error('Response data of ' + url + ' has { success: False }') raise requests.RequestException('Request to ' + url + ' failed') return r_data
def try_inject(forms: dict, cookies: str = '', delay: int = 0, random_agent: bool = False) -> dict: """ Try injection with all provided forms :param forms: dict A dictionary of { "<url>": [ <parsed_form_1>, <parsed_form_2>, ... ], ... } :param cookies: str the request cookies :param delay: int The delay on each request :param random_agent: True if set a random agent for each sqlmap request :rtype: dict """ sqlmap_tasks = dict() Log.info('Trying injection with cookies: ' + str(cookies)) for url, page_forms in forms.items(): page_forms: list # The forms in page returned by url for page_form in page_forms: page_form: dict # The attributes and inputs of form action = page_form.get('action') inputs = page_form.get('inputs') method = page_form.get('method') # Foreach form, will created a new SqlmapTask pprint(inputs) sqlmap_task = SqlmapClient._task_new() sqlmap_task.option_set({ 'cookie': cookies, 'agent': HttpRequest.default_agent(), 'referer': url, 'delay': delay, 'randomAgent': random_agent, 'method': method, 'url': action }) sqlmap_task.option_get([ 'referer', 'agent', 'referer', 'delay', 'randomAgent', 'method', 'url' ]) sqlmap_tasks[sqlmap_task.id] = sqlmap_task return sqlmap_tasks