def insert(self, values): """ This function is used for inserting row(s) into current table. """ if len(values) == len(self.columns): self.execute( 'INSERT INTO "%s" VALUES (%s)' % (self.name, ','.join(['?'] * len(values))), safechardecode(values)) else: errMsg = "wrong number of columns used in replicating insert" raise SqlmapValueException(errMsg)
def evalCmd(self, cmd, first=None, last=None): retVal = None if self.webBackdoorUrl and not isStackingAvailable(): retVal = self.webBackdoorRunCmd(cmd) elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): retVal = self.udfEvalCmd(cmd, first, last) elif Backend.isDbms(DBMS.MSSQL): retVal = self.xpCmdshellEvalCmd(cmd, first, last) else: errMsg = "Feature not yet implemented for the back-end DBMS" raise SqlmapUnsupportedFeatureException(errMsg) return safechardecode(retVal)
def pivotDumpTable(table, colList, count=None, blind=True): lengths = {} entries = {} dumpNode = queries[Backend.getIdentifiedDbms()].dump_table.blind validColumnList = False validPivotValue = False if count is None: query = dumpNode.count % table query = whereQuery(query) count = inject.getValue( query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if blind else inject.getValue( query, blind=False, time=False, expected=EXPECTED.INT) if isinstance(count, basestring) and count.isdigit(): count = int(count) if count == 0: infoMsg = "table '%s' appears to be empty" % unsafeSQLIdentificatorNaming( table) logger.info(infoMsg) for column in colList: lengths[column] = len(column) entries[column] = [] return entries, lengths elif not isNumPosStrValue(count): return None for column in colList: lengths[column] = 0 entries[column] = BigArray() colList = filter(None, sorted(colList, key=lambda x: len(x) if x else MAX_INT)) if conf.pivotColumn: if any( re.search(r"(.+\.)?%s" % re.escape(conf.pivotColumn), _, re.I) for _ in colList): infoMsg = "using column '%s' as a pivot " % conf.pivotColumn infoMsg += "for retrieving row data" logger.info(infoMsg) validPivotValue = True colList.remove(conf.pivotColumn) colList.insert(0, conf.pivotColumn) else: warnMsg = "column '%s' not " % conf.pivotColumn warnMsg += "found in table '%s'" % table logger.warn(warnMsg) if not validPivotValue: for column in colList: infoMsg = "fetching number of distinct " infoMsg += "values for column '%s'" % column logger.info(infoMsg) query = dumpNode.count2 % (column, table) query = whereQuery(query) value = inject.getValue(query, blind=blind, union=not blind, error=not blind, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if isNumPosStrValue(value): validColumnList = True if value == count: infoMsg = "using column '%s' as a pivot " % column infoMsg += "for retrieving row data" logger.info(infoMsg) validPivotValue = True colList.remove(column) colList.insert(0, column) break if not validColumnList: errMsg = "all column name(s) provided are non-existent" raise SqlmapNoneDataException(errMsg) if not validPivotValue: warnMsg = "no proper pivot column provided (with unique values)." warnMsg += " It won't be possible to retrieve all rows" logger.warn(warnMsg) pivotValue = " " breakRetrieval = False try: for i in xrange(count): if breakRetrieval: break for column in colList: def _(pivotValue): if column == colList[0]: query = dumpNode.query.replace("'%s'", "%s") % ( agent.preprocessField(table, column), table, agent.preprocessField(table, column), unescaper.escape(pivotValue, False)) else: query = dumpNode.query2.replace("'%s'", "%s") % ( agent.preprocessField(table, column), table, agent.preprocessField(table, colList[0]), unescaper.escape(pivotValue, False)) query = whereQuery(query) return unArrayizeValue( inject.getValue(query, blind=blind, time=blind, union=not blind, error=not blind)) value = _(pivotValue) if column == colList[0]: if isNoneValue(value): for pivotValue in filter( None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))): value = _(pivotValue) if not isNoneValue(value): break if isNoneValue(value): breakRetrieval = True break pivotValue = safechardecode(value) if conf.limitStart or conf.limitStop: if conf.limitStart and (i + 1) < conf.limitStart: warnMsg = "skipping first %d pivot " % conf.limitStart warnMsg += "point values" singleTimeWarnMessage(warnMsg) break elif conf.limitStop and (i + 1) > conf.limitStop: breakRetrieval = True break value = "" if isNoneValue(value) else unArrayizeValue(value) lengths[column] = max(lengths[column], len(value) if value else 0) entries[column].append(value) except KeyboardInterrupt: warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) except SqlmapConnectionException, e: errMsg = "connection exception detected. sqlmap " errMsg += "will display partial output" errMsg += "'%s'" % e logger.critical(errMsg)
def __pivotDumpTable(self, table, colList, count=None, blind=True): lengths = {} entries = {} dumpNode = queries[Backend.getIdentifiedDbms()].dump_table.blind validColumnList = False validPivotValue = False if count is None: query = dumpNode.count % table count = inject.getValue( query, inband=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if blind else inject.getValue( query, blind=False, expected=EXPECTED.INT) if isinstance(count, basestring) and count.isdigit(): count = int(count) if count == 0: infoMsg = "table '%s' appears to be empty" % unsafeSQLIdentificatorNaming( table) logger.info(infoMsg) for column in colList: lengths[column] = len(column) entries[column] = [] return entries, lengths elif not isNumPosStrValue(count): return None for column in colList: lengths[column] = 0 entries[column] = BigArray() colList = filter( None, sorted(colList, key=lambda x: len(x) if x else MAX_INT)) for column in colList: infoMsg = "fetching number of distinct " infoMsg += "values for column '%s'" % column logger.info(infoMsg) query = dumpNode.count2 % (column, table) value = inject.getValue(query, blind=blind, inband=not blind, error=not blind, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if isNumPosStrValue(value): validColumnList = True if value == count: infoMsg = "using column '%s' as a pivot " % column infoMsg += "for retrieving row data" logger.info(infoMsg) validPivotValue = True colList.remove(column) colList.insert(0, column) break if not validColumnList: errMsg = "all column name(s) provided are non-existent" raise sqlmapNoneDataException, errMsg if not validPivotValue: warnMsg = "no proper pivot column provided (with unique values)." warnMsg += " It won't be possible to retrieve all rows" logger.warn(warnMsg) pivotValue = " " breakRetrieval = False try: for i in xrange(count): if breakRetrieval: break for column in colList: # Correction for pivotValues with unrecognized/problematic chars for char in ('\'', '?'): if pivotValue and char in pivotValue and pivotValue[ 0] != char: pivotValue = pivotValue.split(char)[0] pivotValue = pivotValue[:-1] + decodeIntToUnicode( ord(pivotValue[-1]) + 1) break if column == colList[0]: query = dumpNode.query % (column, table, column, pivotValue) else: query = dumpNode.query2 % (column, table, colList[0], pivotValue) value = inject.getValue(query, blind=blind, inband=not blind, error=not blind) if column == colList[0]: if isNoneValue(value): breakRetrieval = True break else: pivotValue = safechardecode(value) if conf.limitStart or conf.limitStop: if conf.limitStart and (i + 1) < conf.limitStart: warnMsg = "skipping first %d pivot " % conf.limitStart warnMsg += "point values" singleTimeWarnMessage(warnMsg) break elif conf.limitStop and (i + 1) > conf.limitStop: breakRetrieval = True break value = "" if isNoneValue(value) else unArrayizeValue( value) lengths[column] = max(lengths[column], len(value) if value else 0) entries[column].append(value) except KeyboardInterrupt: warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) except sqlmapConnectionException, e: errMsg = "connection exception detected. sqlmap " errMsg += "will display partial output" errMsg += "'%s'" % e logger.critical(errMsg)
def insert(self, values): """ This function is used for inserting row(s) into current table. """ if len(values) == len(self.columns): self.execute('INSERT INTO "%s" VALUES (%s)' % (self.name, ','.join(['?'] * len(values))), safechardecode(values)) else: errMsg = "wrong number of columns used in replicating insert" raise SqlmapValueException(errMsg)
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if conf.api: self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except: warnFile = True _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db)) dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(getBytes(db)).hexdigest()[:8])) if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except Exception as ex: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join(dumpDbPath, re.sub(r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table)) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(getBytes(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: appendToFile = any((conf.limitStart, conf.limitStop)) if not appendToFile: count = 1 while True: candidate = "%s.%d" % (dumpFileName, count) if not checkFile(candidate, False): try: shutil.copyfile(dumpFileName, candidate) except IOError: pass break else: count += 1 dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE) count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(list(tableValues.keys())) if conf.col: cols = conf.col.split(',') columns = sorted(columns, key=lambda _: cols.index(_) if _ in cols else 0) for column in columns: if column != "__infos__": info = tableValues[column] lines = "-" * (int(info["length"]) + 2) separator += "+%s" % lines separator += "+" self._write("Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) if conf.dumpFormat == DUMP_FORMAT.SQLITE: cols = [] for column in columns: if column != "__infos__": colType = Replication.INTEGER for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue int(value) except ValueError: colType = None break if colType is None: colType = Replication.REAL for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue float(value) except ValueError: colType = None break cols.append((unsafeSQLIdentificatorNaming(column), colType if colType else Replication.TEXT)) rtable = replication.createTable(table, cols) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n") dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING) dataToDumpFile(dumpFP, "<meta name=\"generator\" content=\"%s\" />\n" % VERSION_STRING) dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n") if count == 1: self._write("[1 entry]") else: self._write("[%d entries]" % count) self._write(separator) for column in columns: if column != "__infos__": info = tableValues[column] column = unsafeSQLIdentificatorNaming(column) maxlength = int(info["length"]) blank = " " * (maxlength - len(column)) self._write("| %s%s" % (column, blank), newline=False) if not appendToFile: if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<th>%s</th>" % getUnicode(cgi.escape(column).encode("ascii", "xmlcharrefreplace"))) field += 1 if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "\n</tr>\n</thead>\n<tbody>\n") self._write("|\n%s" % separator) if conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n" if not appendToFile else "") elif conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.beginTransaction() if count > TRIM_STDOUT_DUMP_SIZE: warnMsg = "console output will be trimmed to " warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE warnMsg += "large table size" logger.warning(warnMsg) for i in xrange(count): console = (i >= count - TRIM_STDOUT_DUMP_SIZE) field = 1 values = [] if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<tr>") for column in columns: if column != "__infos__": info = tableValues[column] if len(info["values"]) <= i: continue if info["values"][i] is None: value = u'' else: value = getUnicode(info["values"][i]) value = DUMP_REPLACEMENTS.get(value, value) values.append(value) maxlength = int(info["length"]) blank = " " * (maxlength - len(value)) self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: try: mimetype = getText(magic.from_buffer(value, mime=True)) if any(mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath) _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(column))) filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) with openFile(filepath, "w+b", None) as f: _ = safechardecode(value, True) f.write(_) except magic.MagicException as ex: logger.debug(getSafeExString(ex)) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<td>%s</td>" % getUnicode(cgi.escape(value).encode("ascii", "xmlcharrefreplace"))) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE: try: rtable.insert(values) except SqlmapValueException: pass elif conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tr>\n") self._write("|", console=console) self._write("%s\n" % separator) if conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.endTransaction() logger.info("table '%s.%s' dumped to sqlite3 database '%s'" % (db, table, replication.dbpath)) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tbody>\n</table>\n</body>\n</html>") else: dataToDumpFile(dumpFP, "\n") dumpFP.close() msg = "table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName) if not warnFile: logger.info(msg) else: logger.warn(msg)
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if hasattr(conf, "api"): self._write(tableValues, content_type=API_CONTENT_TYPE.DUMP_TABLE) return if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication("%s%s%s.sqlite3" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): dumpDbPath = "%s%s%s" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db)) if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) dumpFileName = "%s%s%s.%s" % (dumpDbPath, os.sep, unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()) dumpFP = openFile(dumpFileName, "wb") count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(tableValues.keys()) for column in columns: if column != "__infos__": info = tableValues[column] lines = "-" * (int(info["length"]) + 2) separator += "+%s" % lines separator += "+" self._write("Database: %s\nTable: %s" % (db if db else "Current database", table)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: cols = [] for column in columns: if column != "__infos__": colType = Replication.INTEGER for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue int(value) except ValueError: colType = None break if colType is None: colType = Replication.REAL for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue float(value) except ValueError: colType = None break cols.append((column, colType if colType else Replication.TEXT)) rtable = replication.createTable(table, cols) elif conf.dumpFormat == DUMP_FORMAT.HTML: documentNode = getDOMImplementation().createDocument(None, "table", None) tableNode = documentNode.documentElement if count == 1: self._write("[1 entry]") else: self._write("[%d entries]" % count) self._write(separator) if conf.dumpFormat == DUMP_FORMAT.HTML: headNode = documentNode.createElement("thead") rowNode = documentNode.createElement("tr") tableNode.appendChild(headNode) headNode.appendChild(rowNode) bodyNode = documentNode.createElement("tbody") tableNode.appendChild(bodyNode) for column in columns: if column != "__infos__": info = tableValues[column] maxlength = int(info["length"]) blank = " " * (maxlength - len(column)) self._write("| %s%s" % (column, blank), newline=False) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: entryNode = documentNode.createElement("td") rowNode.appendChild(entryNode) entryNode.appendChild(documentNode.createTextNode(column)) field += 1 self._write("|\n%s" % separator) if conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") elif conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.beginTransaction() if count > TRIM_STDOUT_DUMP_SIZE: warnMsg = "console output will be trimmed to " warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE warnMsg += "large table size" logger.warning(warnMsg) for i in xrange(count): console = (i >= count - TRIM_STDOUT_DUMP_SIZE) field = 1 values = [] if conf.dumpFormat == DUMP_FORMAT.HTML: rowNode = documentNode.createElement("tr") bodyNode.appendChild(rowNode) for column in columns: if column != "__infos__": info = tableValues[column] if len(info["values"]) <= i: continue if info["values"][i] is None: value = u'' else: value = getUnicode(info["values"][i]) value = DUMP_REPLACEMENTS.get(value, value) values.append(value) maxlength = int(info["length"]) blank = " " * (maxlength - len(value)) self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: mimetype = magic.from_buffer(value, mime=True) if any(mimetype.startswith(_) for _ in ("application", "image")): filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (column, randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) with open(filepath, "wb") as f: _ = safechardecode(value, True) f.write(_) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: entryNode = documentNode.createElement("td") rowNode.appendChild(entryNode) entryNode.appendChild(documentNode.createTextNode(value)) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE: try: rtable.insert(values) except SqlmapValueException: pass elif conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") self._write("|", console=console) self._write("%s\n" % separator) if conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.endTransaction() logger.info("table '%s.%s' dumped to sqlite3 database '%s'" % (db, table, replication.dbpath)) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n") dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING) dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) dataToDumpFile(dumpFP, "\n</head>\n") dataToDumpFile(dumpFP, tableNode.toxml()) dataToDumpFile(dumpFP, "\n</html>") else: dataToDumpFile(dumpFP, "\n") dumpFP.close() logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
def pivotDumpTable(table, colList, count=None, blind=True): lengths = {} entries = {} dumpNode = queries[Backend.getIdentifiedDbms()].dump_table.blind validColumnList = False validPivotValue = False if count is None: query = dumpNode.count % table count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if blind else inject.getValue(query, blind=False, time=False, expected=EXPECTED.INT) if isinstance(count, basestring) and count.isdigit(): count = int(count) if count == 0: infoMsg = "table '%s' appears to be empty" % unsafeSQLIdentificatorNaming(table) logger.info(infoMsg) for column in colList: lengths[column] = len(column) entries[column] = [] return entries, lengths elif not isNumPosStrValue(count): return None for column in colList: lengths[column] = 0 entries[column] = BigArray() colList = filter(None, sorted(colList, key=lambda x: len(x) if x else MAX_INT)) if conf.pivotColumn: if any(re.search(r"(.+\.)?%s" % conf.pivotColumn, _, re.I) for _ in colList): infoMsg = "using column '%s' as a pivot " % conf.pivotColumn infoMsg += "for retrieving row data" logger.info(infoMsg) validPivotValue = True colList.remove(conf.pivotColumn) colList.insert(0, conf.pivotColumn) else: warnMsg = "column '%s' not " % conf.pivotColumn warnMsg += "found in table '%s'" % table logger.warn(warnMsg) if not validPivotValue: for column in colList: infoMsg = "fetching number of distinct " infoMsg += "values for column '%s'" % column logger.info(infoMsg) query = dumpNode.count2 % (column, table) value = inject.getValue(query, blind=blind, union=not blind, error=not blind, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if isNumPosStrValue(value): validColumnList = True if value == count: infoMsg = "using column '%s' as a pivot " % column infoMsg += "for retrieving row data" logger.info(infoMsg) validPivotValue = True colList.remove(column) colList.insert(0, column) break if not validColumnList: errMsg = "all column name(s) provided are non-existent" raise SqlmapNoneDataException(errMsg) if not validPivotValue: warnMsg = "no proper pivot column provided (with unique values)." warnMsg += " It won't be possible to retrieve all rows" logger.warn(warnMsg) pivotValue = " " breakRetrieval = False try: for i in xrange(count): if breakRetrieval: break for column in colList: def _(pivotValue): if column == colList[0]: query = dumpNode.query.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, column), unescaper.escape(pivotValue, False)) else: query = dumpNode.query2.replace("'%s'", "%s") % (agent.preprocessField(table, column), table, agent.preprocessField(table, colList[0]), unescaper.escape(pivotValue, False)) return unArrayizeValue(inject.getValue(query, blind=blind, time=blind, union=not blind, error=not blind)) value = _(pivotValue) if column == colList[0]: if isNoneValue(value): for pivotValue in filter(None, (" " if pivotValue == " " else None, "%s%s" % (pivotValue[0], unichr(ord(pivotValue[1]) + 1)) if len(pivotValue) > 1 else None, unichr(ord(pivotValue[0]) + 1))): value = _(pivotValue) if not isNoneValue(value): break if isNoneValue(value): breakRetrieval = True break pivotValue = safechardecode(value) if conf.limitStart or conf.limitStop: if conf.limitStart and (i + 1) < conf.limitStart: warnMsg = "skipping first %d pivot " % conf.limitStart warnMsg += "point values" singleTimeWarnMessage(warnMsg) break elif conf.limitStop and (i + 1) > conf.limitStop: breakRetrieval = True break value = "" if isNoneValue(value) else unArrayizeValue(value) lengths[column] = max(lengths[column], len(value) if value else 0) entries[column].append(value) except KeyboardInterrupt: warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) except SqlmapConnectionException, e: errMsg = "connection exception detected. sqlmap " errMsg += "will display partial output" errMsg += "'%s'" % e logger.critical(errMsg)
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if hasattr(conf, "api"): self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(db))) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())) appendToFile = os.path.isfile(dumpFileName) and any((conf.limitStart, conf.limitStop)) dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(tableValues.keys()) for column in columns: if column != "__infos__": info = tableValues[column] lines = "-" * (int(info["length"]) + 2) separator += "+%s" % lines separator += "+" self._write("Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) if conf.dumpFormat == DUMP_FORMAT.SQLITE: cols = [] for column in columns: if column != "__infos__": colType = Replication.INTEGER for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue int(value) except ValueError: colType = None break if colType is None: colType = Replication.REAL for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue float(value) except ValueError: colType = None break cols.append((unsafeSQLIdentificatorNaming(column), colType if colType else Replication.TEXT)) rtable = replication.createTable(table, cols) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n") dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING) dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n") if count == 1: self._write("[1 entry]") else: self._write("[%d entries]" % count) self._write(separator) for column in columns: if column != "__infos__": info = tableValues[column] column = unsafeSQLIdentificatorNaming(column) maxlength = int(info["length"]) blank = " " * (maxlength - len(column)) self._write("| %s%s" % (column, blank), newline=False) if not appendToFile: if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<th>%s</th>" % cgi.escape(column).encode("ascii", "xmlcharrefreplace")) field += 1 if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "\n</tr>\n</thead>\n<tbody>\n") self._write("|\n%s" % separator) if conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n" if not appendToFile else "") elif conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.beginTransaction() if count > TRIM_STDOUT_DUMP_SIZE: warnMsg = "console output will be trimmed to " warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE warnMsg += "large table size" logger.warning(warnMsg) for i in xrange(count): console = (i >= count - TRIM_STDOUT_DUMP_SIZE) field = 1 values = [] if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<tr>") for column in columns: if column != "__infos__": info = tableValues[column] if len(info["values"]) <= i: continue if info["values"][i] is None: value = u'' else: value = getUnicode(info["values"][i]) value = DUMP_REPLACEMENTS.get(value, value) values.append(value) maxlength = int(info["length"]) blank = " " * (maxlength - len(value)) self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: try: mimetype = magic.from_buffer(value, mime=True) if any(mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (unsafeSQLIdentificatorNaming(column), randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) with open(filepath, "wb") as f: _ = safechardecode(value, True) f.write(_) except magic.MagicException, err: logger.debug(str(err)) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<td>%s</td>" % cgi.escape(value).encode("ascii", "xmlcharrefreplace")) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE: try: rtable.insert(values) except SqlmapValueException: pass elif conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tr>\n") self._write("|", console=console)
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if hasattr(conf, "api"): self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = "%s%s%s" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication( "%s%s%s.sqlite3" % (conf.dumpPath, os.sep, unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) dumpFileName = "%s%s%s.%s" % (dumpDbPath, os.sep, unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()) appendToFile = os.path.isfile(dumpFileName) and any( (conf.limitStart, conf.limitStop)) dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab") count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(tableValues.keys()) for column in columns: if column != "__infos__": info = tableValues[column] lines = "-" * (int(info["length"]) + 2) separator += "+%s" % lines separator += "+" self._write( "Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) if conf.dumpFormat == DUMP_FORMAT.SQLITE: cols = [] for column in columns: if column != "__infos__": colType = Replication.INTEGER for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue int(value) except ValueError: colType = None break if colType is None: colType = Replication.REAL for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue float(value) except ValueError: colType = None break cols.append((unsafeSQLIdentificatorNaming(column), colType if colType else Replication.TEXT)) rtable = replication.createTable(table, cols) elif conf.dumpFormat == DUMP_FORMAT.HTML: documentNode = getDOMImplementation().createDocument( None, "table", None) tableNode = documentNode.documentElement if count == 1: self._write("[1 entry]") else: self._write("[%d entries]" % count) self._write(separator) if conf.dumpFormat == DUMP_FORMAT.HTML: headNode = documentNode.createElement("thead") rowNode = documentNode.createElement("tr") tableNode.appendChild(headNode) headNode.appendChild(rowNode) bodyNode = documentNode.createElement("tbody") tableNode.appendChild(bodyNode) for column in columns: if column != "__infos__": info = tableValues[column] column = unsafeSQLIdentificatorNaming(column) maxlength = int(info["length"]) blank = " " * (maxlength - len(column)) self._write("| %s%s" % (column, blank), newline=False) if not appendToFile: if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) else: dataToDumpFile( dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: entryNode = documentNode.createElement("td") rowNode.appendChild(entryNode) entryNode.appendChild( documentNode.createTextNode(column)) field += 1 self._write("|\n%s" % separator) if conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n" if not appendToFile else "") elif conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.beginTransaction() if count > TRIM_STDOUT_DUMP_SIZE: warnMsg = "console output will be trimmed to " warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE warnMsg += "large table size" logger.warning(warnMsg) for i in xrange(count): console = (i >= count - TRIM_STDOUT_DUMP_SIZE) field = 1 values = [] if conf.dumpFormat == DUMP_FORMAT.HTML: rowNode = documentNode.createElement("tr") bodyNode.appendChild(rowNode) for column in columns: if column != "__infos__": info = tableValues[column] if len(info["values"]) <= i: continue if info["values"][i] is None: value = u'' else: value = getUnicode(info["values"][i]) value = DUMP_REPLACEMENTS.get(value, value) values.append(value) maxlength = int(info["length"]) blank = " " * (maxlength - len(value)) self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value ) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: mimetype = magic.from_buffer(value, mime=True) if any( mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) filepath = os.path.join( dumpDbPath, "%s-%d.bin" % (unsafeSQLIdentificatorNaming(column), randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % ( mimetype, filepath) logger.warn(warnMsg) with open(filepath, "wb") as f: _ = safechardecode(value, True) f.write(_) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile( dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: entryNode = documentNode.createElement("td") rowNode.appendChild(entryNode) entryNode.appendChild( documentNode.createTextNode(value)) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE: try: rtable.insert(values) except SqlmapValueException: pass elif conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") self._write("|", console=console) self._write("%s\n" % separator) if conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.endTransaction() logger.info("table '%s.%s' dumped to sqlite3 database '%s'" % (db, table, replication.dbpath)) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n") dataToDumpFile( dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING) dataToDumpFile( dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) dataToDumpFile(dumpFP, "\n</head>\n") dataToDumpFile(dumpFP, tableNode.toxml()) dataToDumpFile(dumpFP, "\n</html>") else: dataToDumpFile(dumpFP, "\n") dumpFP.close() logger.info("table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName))
if any( mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) filepath = os.path.join( dumpDbPath, "%s-%d.bin" % (unsafeSQLIdentificatorNaming(column), randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % ( mimetype, filepath) logger.warn(warnMsg) with open(filepath, "wb") as f: _ = safechardecode(value, True) f.write(_) except magic.MagicException, err: logger.debug(str(err)) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile( dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile( dumpFP, "<td>%s</td>" % cgi.escape(value).encode( "ascii", "xmlcharrefreplace"))
self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: try: mimetype = magic.from_buffer(value, mime=True) if any(mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath, 0755) _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(column))) filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) with open(filepath, "wb") as f: _ = safechardecode(value, True) f.write(_) except magic.MagicException, err: logger.debug(str(err)) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<td>%s</td>" % cgi.escape(value).encode("ascii", "xmlcharrefreplace")) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE:
def __pivotDumpTable(self, table, colList, count=None, blind=True): lengths = {} entries = {} dumpNode = queries[Backend.getIdentifiedDbms()].dump_table.blind validColumnList = False validPivotValue = False if count is None: query = dumpNode.count % table count = ( inject.getValue( query, inband=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS ) if blind else inject.getValue(query, blind=False, expected=EXPECTED.INT) ) if isinstance(count, basestring) and count.isdigit(): count = int(count) if count == 0: infoMsg = "table '%s' appears to be empty" % unsafeSQLIdentificatorNaming(table) logger.info(infoMsg) for column in colList: lengths[column] = len(column) entries[column] = [] return entries, lengths elif not isNumPosStrValue(count): return None for column in colList: lengths[column] = 0 entries[column] = BigArray() colList = filter(None, sorted(colList, key=lambda x: len(x) if x else MAX_INT)) for column in colList: infoMsg = "fetching number of distinct " infoMsg += "values for column '%s'" % column logger.info(infoMsg) query = dumpNode.count2 % (column, table) value = inject.getValue( query, blind=blind, inband=not blind, error=not blind, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS, ) if isNumPosStrValue(value): validColumnList = True if value == count: infoMsg = "using column '%s' as a pivot " % column infoMsg += "for retrieving row data" logger.info(infoMsg) validPivotValue = True colList.remove(column) colList.insert(0, column) break if not validColumnList: errMsg = "all column name(s) provided are non-existent" raise sqlmapNoneDataException, errMsg if not validPivotValue: warnMsg = "no proper pivot column provided (with unique values)." warnMsg += " It won't be possible to retrieve all rows" logger.warn(warnMsg) pivotValue = " " breakRetrieval = False try: for i in xrange(count): if breakRetrieval: break for column in colList: # Correction for pivotValues with unrecognized/problematic chars for char in ("'", "?"): if pivotValue and char in pivotValue and pivotValue[0] != char: pivotValue = pivotValue.split(char)[0] pivotValue = pivotValue[:-1] + chr(ord(pivotValue[-1]) + 1) break if column == colList[0]: query = dumpNode.query % (column, table, column, pivotValue) else: query = dumpNode.query2 % (column, table, colList[0], pivotValue) value = inject.getValue(query, blind=blind, inband=not blind, error=not blind) if column == colList[0]: if isNoneValue(value): breakRetrieval = True break else: pivotValue = safechardecode(value) if conf.limitStart or conf.limitStop: if conf.limitStart and (i + 1) < conf.limitStart: warnMsg = "skipping first %d pivot " % conf.limitStart warnMsg += "point values" singleTimeWarnMessage(warnMsg) break elif conf.limitStop and (i + 1) > conf.limitStop: breakRetrieval = True break value = "" if isNoneValue(value) else unArrayizeValue(value) lengths[column] = max(lengths[column], len(value) if value else 0) entries[column].append(value) except KeyboardInterrupt: warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) except sqlmapConnectionException, e: errMsg = "connection exception detected. sqlmap " errMsg += "will display partial output" errMsg += "'%s'" % e logger.critical(errMsg)