def headersParser(headers): """ This function calls a class that parses the input HTTP headers to fingerprint the back-end database management system operating system and the web application technology """ # It is enough to parse the headers on first four HTTP responses if kb.headersCount > 3: return kb.headersCount += 1 topHeaders = { "cookie": "%s/cookie.xml" % paths.SQLMAP_XML_BANNER_PATH, "microsoftsharepointteamservices": "%s/sharepoint.xml" % paths.SQLMAP_XML_BANNER_PATH, "server": "%s/server.xml" % paths.SQLMAP_XML_BANNER_PATH, "servlet-engine": "%s/servlet.xml" % paths.SQLMAP_XML_BANNER_PATH, "set-cookie": "%s/cookie.xml" % paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version": "%s/x-aspnet-version.xml" % paths.SQLMAP_XML_BANNER_PATH, "x-powered-by": "%s/x-powered-by.xml" % paths.SQLMAP_XML_BANNER_PATH, } for header in headers: if header in topHeaders.keys(): value = headers[header] xmlfile = topHeaders[header] checkFile(xmlfile) handler = FingerprintHandler(value, kb.headersFp) parse(xmlfile, handler) parse(paths.GENERIC_XML, handler)
def headersParser(headers): """ This function calls a class that parses the input HTTP headers to fingerprint the back-end database management system operating system and the web application technology """ if not kb.headerPaths: kb.headerPaths = { "cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"), "microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"), "server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"), "servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet.xml"), "set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"), "x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"), "x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml"), } for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers): value = headers[header] xmlfile = kb.headerPaths[header] checkFile(xmlfile) handler = FingerprintHandler(value, kb.headersFp) parseXmlFile(xmlfile, handler) parseXmlFile(paths.GENERIC_XML, handler)
def bannerParser(banner): """ This function calls a class to extract information from the given DBMS banner based upon the data in XML file """ xmlfile = None if Backend.getIdentifiedDbms() == DBMS.MSSQL: xmlfile = paths.MSSQL_XML elif Backend.getIdentifiedDbms() == DBMS.MYSQL: xmlfile = paths.MYSQL_XML elif Backend.getIdentifiedDbms() == DBMS.ORACLE: xmlfile = paths.ORACLE_XML elif Backend.getIdentifiedDbms() == DBMS.PGSQL: xmlfile = paths.PGSQL_XML if not xmlfile: return checkFile(xmlfile) if Backend.getIdentifiedDbms() == DBMS.MSSQL: handler = MSSQLBannerHandler(banner, kb.bannerFp) parseXmlFile(xmlfile, handler) handler = FingerprintHandler(banner, kb.bannerFp) parseXmlFile(paths.GENERIC_XML, handler) else: handler = FingerprintHandler(banner, kb.bannerFp) parseXmlFile(xmlfile, handler) parseXmlFile(paths.GENERIC_XML, handler)
def bannerParser(banner): """ This function calls a class to extract information from the given DBMS banner based upon the data in XML file """ if kb.dbms == "Microsoft SQL Server": xmlfile = paths.MSSQL_XML elif kb.dbms == "MySQL": xmlfile = paths.MYSQL_XML elif kb.dbms == "Oracle": xmlfile = paths.ORACLE_XML elif kb.dbms == "PostgreSQL": xmlfile = paths.PGSQL_XML checkFile(xmlfile) if kb.dbms == "Microsoft SQL Server": handler = MSSQLBannerHandler(banner) parse(xmlfile, handler) handler = FingerprintHandler(banner, kb.bannerFp) parse(paths.GENERIC_XML, handler) else: handler = FingerprintHandler(banner, kb.bannerFp) parse(xmlfile, handler) parse(paths.GENERIC_XML, handler)
def writeFile(self, localFile, remoteFile, fileType=None, forceCheck=False): written = False checkFile(localFile) self.checkDbmsOs() if localFile.endswith('_'): localFile = decloakToTemp(localFile) if conf.direct or isStackingAvailable(): if isStackingAvailable(): debugMsg = "going to upload the %s file with " % fileType debugMsg += "stacked query SQL injection technique" logger.debug(debugMsg) written = self.stackedWriteFile(localFile, remoteFile, fileType, forceCheck) self.cleanup(onlyFileTbl=True) elif isTechniqueAvailable(PAYLOAD.TECHNIQUE.UNION) and Backend.isDbms(DBMS.MYSQL): debugMsg = "going to upload the %s file with " % fileType debugMsg += "UNION query SQL injection technique" logger.debug(debugMsg) written = self.unionWriteFile(localFile, remoteFile, fileType, forceCheck) else: errMsg = "none of the SQL injection techniques detected can " errMsg += "be used to write files to the underlying file " errMsg += "system of the back-end %s server" % Backend.getDbms() logger.error(errMsg) return None return written
def bannerParser(banner): """ This function calls a class to extract information from the given DBMS banner based upon the data in XML file """ xmlfile = None if Backend.isDbms(DBMS.MSSQL): xmlfile = paths.MSSQL_XML elif Backend.isDbms(DBMS.MYSQL): xmlfile = paths.MYSQL_XML elif Backend.isDbms(DBMS.ORACLE): xmlfile = paths.ORACLE_XML elif Backend.isDbms(DBMS.PGSQL): xmlfile = paths.PGSQL_XML if not xmlfile: return checkFile(xmlfile) if Backend.isDbms(DBMS.MSSQL): handler = MSSQLBannerHandler(banner, kb.bannerFp) parseXmlFile(xmlfile, handler) handler = FingerprintHandler(banner, kb.bannerFp) parseXmlFile(paths.GENERIC_XML, handler) else: handler = FingerprintHandler(banner, kb.bannerFp) parseXmlFile(xmlfile, handler) parseXmlFile(paths.GENERIC_XML, handler)
def bannerParser(banner): """ This function calls a class to extract information from the given DBMS banner based upon the data in XML file """ if kb.dbms == "Microsoft SQL Server": xmlfile = paths.MSSQL_XML elif kb.dbms == "MySQL": xmlfile = paths.MYSQL_XML elif kb.dbms == "Oracle": xmlfile = paths.ORACLE_XML elif kb.dbms == "PostgreSQL": xmlfile = paths.PGSQL_XML checkFile(xmlfile) if kb.dbms == "Microsoft SQL Server": handler = MSSQLBannerHandler(banner, kb.bannerFp) parse(xmlfile, handler) handler = FingerprintHandler(banner, kb.bannerFp) parse(paths.GENERIC_XML, handler) else: handler = FingerprintHandler(banner, kb.bannerFp) parse(xmlfile, handler) parse(paths.GENERIC_XML, handler)
def udfSetLocalPaths(self): self.udfLocalFile = paths.SQLMAP_UDF_PATH self.udfSharedLibName = "libs%s" % randomStr(lowercase=True) self.getVersionFromBanner() banVer = kb.bannerFp["dbmsVersion"] if banVer >= "10": majorVer = banVer.split('.')[0] elif banVer >= "8.2" and '.' in banVer: majorVer = '.'.join(banVer.split('.')[:2]) else: errMsg = "unsupported feature on versions of PostgreSQL before 8.2" raise SqlmapUnsupportedFeatureException(errMsg) try: if Backend.isOs(OS.WINDOWS): _ = os.path.join(self.udfLocalFile, "postgresql", "windows", "%d" % Backend.getArch(), majorVer, "lib_postgresqludf_sys.dll_") checkFile(_) self.udfLocalFile = decloakToTemp(_) self.udfSharedLibExt = "dll" else: _ = os.path.join(self.udfLocalFile, "postgresql", "linux", "%d" % Backend.getArch(), majorVer, "lib_postgresqludf_sys.so_") checkFile(_) self.udfLocalFile = decloakToTemp(_) self.udfSharedLibExt = "so" except SqlmapSystemException: errMsg = "unsupported feature on PostgreSQL %s (%s-bit)" % ( majorVer, Backend.getArch()) raise SqlmapUnsupportedFeatureException(errMsg)
def headersParser(headers): """ This function calls a class that parses the input HTTP headers to fingerprint the back-end database management system operating system and the web application technology """ if not kb.headerPaths: kb.headerPaths = { "cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"), "microsoftsharepointteamservices": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "sharepoint.xml"), "server": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "server.xml"), "servlet-engine": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "servlet.xml"), "set-cookie": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "cookie.xml"), "x-aspnet-version": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-aspnet-version.xml"), "x-powered-by": os.path.join(paths.SQLMAP_XML_BANNER_PATH, "x-powered-by.xml") } for header in itertools.ifilter(lambda x: x in kb.headerPaths, headers): value = headers[header] xmlfile = kb.headerPaths[header] checkFile(xmlfile) handler = FingerprintHandler(value, kb.headersFp) parseXmlFile(xmlfile, handler) parseXmlFile(paths.GENERIC_XML, handler)
def _setCipher(): if conf.cipher: if conf.cipher not in CIPHER_TYPE: msg = "Cipher type not supported, check if you have input the right name." raise PyExpSystemException(msg) if conf.file: checkFile(conf.file) file = open(conf.file) conf.text = file.read() if not conf.text: msg = "No input text, cipher work aborted." raise PyExpSystemException(msg) if not conf.encrypt and not conf.decrypt and not conf.brute: msg = "Encrypt or Decrypt or Brute not set." raise PyExpSystemException(msg) if conf.brute: conf.englishWords = loadDictionary() cfile = conf.cipher cfile = cfile.strip() if os.path.exists( os.path.join(paths.PYEXP_CIPHER_PATH, "%s.py" % cfile)): cfile = os.path.join(paths.PYEXP_CIPHER_PATH, "%s.py" % cfile) infoMsg = "loading module '%s.py'" % conf.cipher logger.info(infoMsg) try: file, pathname, description = imp.find_module( conf.cipher, [paths.PYEXP_CIPHER_PATH]) mod = imp.load_module(conf.cipher, file, pathname, description) if not conf.brute: try: ru.cipherfunc = mod.cipher infoMsg = "'%s.py' cipher module loaded successfully." % conf.cipher logger.info(infoMsg) except: msg = "No cipher function found." raise PyExpSystemException(msg) else: try: ru.cipherfunc = mod.bruter infoMsg = "'%s.py' bruter module loaded successfully." % conf.cipher logger.info(infoMsg) except: msg = "No bruter function found." raise PyExpSystemException(msg) except ImportError: raise SystemExit
def connect(self): self.initConnection() try: msg = "what's the location of 'hsqldb.jar'? " jar = readInput(msg) checkFile(jar) args = "-Djava.class.path=%s" % jar jvm_path = jpype.getDefaultJVMPath() jpype.startJVM(jvm_path, args) except Exception, msg: raise SqlmapConnectionException(msg[0])
def stackedWriteFile(self, localFile, remoteFile, fileType, forceCheck=False): # NOTE: this is needed here because we use xp_cmdshell extended # procedure to write a file on the back-end Microsoft SQL Server # file system self.initEnv() self.getRemoteTempPath() tmpPath = posixToNtSlashes(conf.tmpPath) remoteFile = posixToNtSlashes(remoteFile) checkFile(localFile) localFileContent = open(localFile, "rb").read() self._stackedWriteFilePS(tmpPath, localFileContent, remoteFile, fileType) written = self.askCheckWrittenFile(localFile, remoteFile, forceCheck) if written is False: message = "do you want to try to upload the file with " message += "the custom Visual Basic script technique? [Y/n] " if readInput(message, default='Y', boolean=True): self._stackedWriteFileVbs(tmpPath, localFileContent, remoteFile, fileType) written = self.askCheckWrittenFile(localFile, remoteFile, forceCheck) if written is False: message = "do you want to try to upload the file with " message += "the built-in debug.exe technique? [Y/n] " if readInput(message, default='Y', boolean=True): self._stackedWriteFileDebugExe(tmpPath, localFile, localFileContent, remoteFile, fileType) written = self.askCheckWrittenFile(localFile, remoteFile, forceCheck) if written is False: message = "do you want to try to upload the file with " message += "the built-in certutil.exe technique? [Y/n] " if readInput(message, default='Y', boolean=True): self._stackedWriteFileCertutilExe(tmpPath, localFile, localFileContent, remoteFile, fileType) written = self.askCheckWrittenFile(localFile, remoteFile, forceCheck) return written
def fileEncode(self, fileName, encoding, single, chunkSize=256): """ Called by MySQL and PostgreSQL plugins to write a file on the back-end DBMS underlying file system """ checkFile(fileName) with open(fileName, "rb") as f: content = f.read() return self.fileContentEncode(content, encoding, single, chunkSize)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = codecs.open(configFile, "rb", UNICODE_ENCODING) try: config = UnicodeRawConfigParser() config.readfp(configFP) except MissingSectionHeaderError: errMsg = "you have provided an invalid configuration file" raise sqlmapSyntaxException, errMsg if not config.has_section("Target"): errMsg = "missing a mandatory section 'Target' in the configuration file" raise sqlmapMissingMandatoryOptionException, errMsg condition = not config.has_option("Target", "url") condition &= not config.has_option("Target", "logFile") condition &= not config.has_option("Target", "bulkFile") condition &= not config.has_option("Target", "googleDork") condition &= not config.has_option("Target", "requestFile") condition &= not config.has_option("Target", "wizard") if condition: errMsg = "missing a mandatory option in the configuration file " errMsg += "(url, logFile, bulkFile, googleDork, requestFile or wizard)" raise sqlmapMissingMandatoryOptionException, errMsg for family, optionData in optDict.items(): for option, datatype in optionData.items(): boolean = False integer = False if isinstance(datatype, (list, tuple, set)): datatype = datatype[0] if datatype == "boolean": boolean = True elif datatype == "integer": integer = True configFileProxy(family, option, boolean, integer)
def queriesParser(): """ This function calls a class to parse the default DBMS queries from an XML file """ debugMsg = "parsing XML queries file" logger.debug(debugMsg) xmlfile = paths.QUERIES_XML checkFile(xmlfile) handler = queriesHandler() parse(xmlfile, handler)
def htmlParser(page): """ This function calls a class that parses the input HTML page to fingerprint the back-end database management system """ xmlfile = paths.ERRORS_XML checkFile(xmlfile) page = sanitizeStr(page) handler = htmlHandler(page) parse(xmlfile, handler) if handler.dbms and handler.dbms not in kb.htmlFp: kb.htmlFp.append(handler.dbms) return handler.dbms
def udfInjectCore(self, udfDict): written = False for udf in udfDict.keys(): if udf in self.createdUdf: continue self.udfCheckAndOverwrite(udf) if len(self.udfToCreate) > 0: self.udfSetRemotePath() checkFile(self.udfLocalFile) written = self.writeFile(self.udfLocalFile, self.udfRemoteFile, "binary", forceCheck=True) if written is not True: errMsg = "there has been a problem uploading the shared library, " errMsg += "it looks like the binary file has not been written " errMsg += "on the database underlying file system" logger.error(errMsg) message = "do you want to proceed anyway? Beware that the " message += "operating system takeover will fail [y/N] " choice = readInput(message, default="N") if choice and choice.lower() == "y": written = True else: return False else: return True for udf, inpRet in udfDict.items(): if udf in self.udfToCreate and udf not in self.createdUdf: self.udfCreateFromSharedLib(udf, inpRet) if Backend.isDbms(DBMS.MYSQL): supportTblType = "longtext" elif Backend.isDbms(DBMS.PGSQL): supportTblType = "text" self.udfCreateSupportTbl(supportTblType) return written
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = codecs.open(configFile, "rb", UNICODE_ENCODING) try: config = UnicodeRawConfigParser() config.readfp(configFP) except MissingSectionHeaderError: errMsg = "you have provided an invalid configuration file" raise SqlmapSyntaxException(errMsg) if not config.has_section("Target"): errMsg = "missing a mandatory section 'Target' in the configuration file" raise SqlmapMissingMandatoryOptionException(errMsg) condition = not config.has_option("Target", "url") condition &= not config.has_option("Target", "logFile") condition &= not config.has_option("Target", "bulkFile") condition &= not config.has_option("Target", "googleDork") condition &= not config.has_option("Target", "requestFile") condition &= not config.has_option("Target", "wizard") if condition: errMsg = "missing a mandatory option in the configuration file " errMsg += "(url, logFile, bulkFile, googleDork, requestFile or wizard)" raise SqlmapMissingMandatoryOptionException(errMsg) for family, optionData in optDict.items(): for option, datatype in optionData.items(): datatype = unArrayizeValue(datatype) boolean = datatype == "boolean" integer = datatype == "integer" configFileProxy(family, option, boolean, integer)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = openFile(configFile, "rb") try: config = UnicodeRawConfigParser() config.readfp(configFP) except Exception as ex: errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString( ex) raise SqlmapSyntaxException(errMsg) if not config.has_section("Target"): errMsg = "missing a mandatory section 'Target' in the configuration file" raise SqlmapMissingMandatoryOptionException(errMsg) mandatory = False for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"): if config.has_option("Target", option) and config.get( "Target", option) or cmdLineOptions.get(option): mandatory = True break if not mandatory: errMsg = "missing a mandatory option in the configuration file " errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)" raise SqlmapMissingMandatoryOptionException(errMsg) for family, optionData in list(optDict.items()): for option, datatype in list(optionData.items()): datatype = unArrayizeValue(datatype) configFileProxy(family, option, datatype)
def udfSetLocalPaths(self): self.udfLocalFile = paths.SQLMAP_UDF_PATH self.udfSharedLibName = "libs%s" % randomStr(lowercase=True) self.getVersionFromBanner() banVer = kb.bannerFp["dbmsVersion"] if banVer >= "9.4": majorVer = "9.4" elif banVer >= "9.3": majorVer = "9.3" elif banVer >= "9.2": majorVer = "9.2" elif banVer >= "9.1": majorVer = "9.1" elif banVer >= "9.0": majorVer = "9.0" elif banVer >= "8.4": majorVer = "8.4" elif banVer >= "8.3": majorVer = "8.3" elif banVer >= "8.2": majorVer = "8.2" else: errMsg = "unsupported feature on versions of PostgreSQL before 8.2" raise SqlmapUnsupportedFeatureException(errMsg) try: if Backend.isOs(OS.WINDOWS): _ = os.path.join(self.udfLocalFile, "postgresql", "windows", "%d" % Backend.getArch(), majorVer, "lib_postgresqludf_sys.dll_") checkFile(_) self.udfLocalFile = decloakToTemp(_) self.udfSharedLibExt = "dll" else: _ = os.path.join(self.udfLocalFile, "postgresql", "linux", "%d" % Backend.getArch(), majorVer, "lib_postgresqludf_sys.so_") checkFile(_) self.udfLocalFile = decloakToTemp(_) self.udfSharedLibExt = "so" except SqlmapSystemException: errMsg = "unsupported feature on PostgreSQL %s (%s-bit)" % (majorVer, Backend.getArch()) raise SqlmapUnsupportedFeatureException(errMsg)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = codecs.open(configFile, "rb", UNICODE_ENCODING) config = UnicodeRawConfigParser() config.readfp(configFP) if not config.has_section("Target"): raise NoSectionError, "Target in the configuration file is mandatory" condition = not config.has_option("Target", "url") condition &= not config.has_option("Target", "list") condition &= not config.has_option("Target", "googleDork") if condition: errMsg = "missing a mandatory option in the configuration " errMsg += "file (url, list or googleDork)" raise sqlmapMissingMandatoryOptionException, errMsg for family, optionData in optDict.items(): for option, datatype in optionData.items(): boolean = False integer = False if isinstance(datatype, (list, tuple, set)): datatype = datatype[0] if datatype == "boolean": boolean = True elif datatype == "integer": integer = True configFileProxy(family, option, boolean, integer)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = openFile(configFile, "rb") try: config = UnicodeRawConfigParser() config.readfp(configFP) except Exception, ex: errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex) raise SqlmapSyntaxException(errMsg)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = codecs.open(configFile, "rb", UNICODE_ENCODING) try: config = UnicodeRawConfigParser() config.readfp(configFP) except (MissingSectionHeaderError, ParsingError), ex: errMsg = "you have provided an invalid configuration file ('%s')" % str(ex) raise SqlmapSyntaxException(errMsg)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = openFile(configFile, "rb") try: config = UnicodeRawConfigParser() config.readfp(configFP) except Exception, ex: errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % ex.message raise SqlmapSyntaxException(errMsg)
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) configFP = openFile(configFile, "rb") try: config = UnicodeRawConfigParser() config.readfp(configFP) except Exception as ex: errMsg = "you have provided an invalid and/or unreadable configuration file ('%s')" % getSafeExString(ex) raise SqlmapSyntaxException(errMsg) if not config.has_section("Target"): errMsg = "missing a mandatory section 'Target' in the configuration file" raise SqlmapMissingMandatoryOptionException(errMsg) mandatory = False for option in ("direct", "url", "logFile", "bulkFile", "googleDork", "requestFile", "sitemapUrl", "wizard"): if config.has_option("Target", option) and config.get("Target", option) or cmdLineOptions.get(option): mandatory = True break if not mandatory: errMsg = "missing a mandatory option in the configuration file " errMsg += "(direct, url, logFile, bulkFile, googleDork, requestFile, sitemapUrl or wizard)" raise SqlmapMissingMandatoryOptionException(errMsg) for family, optionData in optDict.items(): for option, datatype in optionData.items(): datatype = unArrayizeValue(datatype) configFileProxy(family, option, datatype)
def connect(self): self.initConnection() try: msg = "what's the location of 'hsqldb.jar'? " jar = readInput(msg) checkFile(jar) args = "-Djava.class.path=%s" % jar jvm_path = jpype.getDefaultJVMPath() jpype.startJVM(jvm_path, args) except Exception as ex: raise SqlmapConnectionException(getSafeExString(ex)) try: driver = 'org.hsqldb.jdbc.JDBCDriver' connection_string = 'jdbc:hsqldb:mem:.' # 'jdbc:hsqldb:hsql://%s/%s' % (self.hostname, self.db) self.connector = jaydebeapi.connect(driver, connection_string, str(self.user), str(self.password)) except Exception as ex: raise SqlmapConnectionException(getSafeExString(ex)) self.initCursor() self.printConnected()
def configFileParser(configFile): """ Parse configuration file and save settings into the configuration advanced dictionary. """ global config debugMsg = "parsing configuration file" logger.debug(debugMsg) checkFile(configFile) config = ConfigParser() config.read(configFile) if not config.has_section("Target"): raise NoSectionError, "Target in the configuration file is mandatory" condition = not config.has_option("Target", "url") condition &= not config.has_option("Target", "list") condition &= not config.has_option("Target", "googleDork") if condition: errMsg = "missing a mandatory option in the configuration " errMsg += "file (url, list or googleDork)" raise sqlmapMissingMandatoryOptionException, errMsg for family, optionData in optDict.items(): for option, data in optionData.items(): boolean = False integer = False if data == "boolean": boolean = True elif data == "integer": integer = True configFileProxy(family, option, boolean, integer)
def htmlParser(page): """ This function calls a class that parses the input HTML page to fingerprint the back-end database management system """ xmlfile = paths.ERRORS_XML checkFile(xmlfile) handler = HTMLHandler(page) parseXmlFile(xmlfile, handler) if handler.dbms and handler.dbms not in kb.htmlFp: kb.lastParserStatus = handler.dbms kb.htmlFp.append(handler.dbms) else: kb.lastParserStatus = None # generic SQL warning/error messages if re.search(r"SQL (warning|error|syntax)", page, re.I): handler._markAsErrorPage() return handler.dbms
def htmlParser(page): #htmlParser函数,就是根据不同的数据库指纹去识别当前的数据库究竟是什么 """ This function calls a class that parses the input HTML page to fingerprint the back-end database management system """ xmlfile = paths.ERRORS_XML #paths.ERRORS_XML这一变量的就是SQLMAP用来识别的指纹配置文件路径,位置在于./xml/errors.xml中 checkFile(xmlfile) handler = HTMLHandler(page) ##最终实现的的其实是HTMLHandler这个类, parseXmlFile(xmlfile, handler) if handler.dbms and handler.dbms not in kb.htmlFp: kb.lastParserStatus = handler.dbms kb.htmlFp.append(handler.dbms) else: kb.lastParserStatus = None # generic SQL warning/error messages if re.search(r"SQL (warning|error|syntax)", page, re.I): handler._markAsErrorPage() return handler.dbms
def htmlParser(page): """ This function calls a class that parses the input HTML page to fingerprint the back-end database management system """ xmlfile = paths.ERRORS_XML checkFile(xmlfile) handler = htmlHandler(page) parseXmlFile(xmlfile, handler) if handler.dbms and handler.dbms not in kb.htmlFp: kb.lastParserStatus = handler.dbms kb.htmlFp.append(handler.dbms) else: kb.lastParserStatus = None # generic SQL warning/error messages if re.search(r"SQL (warning|error|syntax)", page, re.I): handler._markAsErrorPage() return handler.dbms
def connect(self): self.initConnection() try: msg = "please enter the location of 'cachejdbc.jar'? " jar = readInput(msg) checkFile(jar) args = "-Djava.class.path=%s" % jar jvm_path = jpype.getDefaultJVMPath() jpype.startJVM(jvm_path, args) except Exception as ex: raise SqlmapConnectionException(getSafeExString(ex)) try: driver = 'com.intersys.jdbc.CacheDriver' connection_string = 'jdbc:Cache://%s:%d/%s' % (self.hostname, self.port, self.db) self.connector = jaydebeapi.connect(driver, connection_string, str(self.user), str(self.password)) except Exception as ex: raise SqlmapConnectionException(getSafeExString(ex)) self.initCursor() self.printConnected()
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [""] hash_regexes = [] results = [] resumes = [] user_hash = [] processException = False foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): try: item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.JOOMLA, HASH.WORDPRESS, HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD, HASH.SSHA, HASH.SSHA256, HASH.SSHA512, HASH.DJANGO_MD5, HASH.DJANGO_SHA1, HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): hash_ = hash_.lower() if hash_regex in (HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): item = [(user, hash_.decode("base64").encode("hex")), {}] elif hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.APACHE_SHA1): item = [(user, hash_), {}] elif hash_regex in (HASH.SSHA,): item = [(user, hash_), {"salt": hash_.decode("base64")[20:]}] elif hash_regex in (HASH.SSHA256,): item = [(user, hash_), {"salt": hash_.decode("base64")[32:]}] elif hash_regex in (HASH.SSHA512,): item = [(user, hash_), {"salt": hash_.decode("base64")[64:]}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE,): item = [(user, hash_), {"salt": hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {"salt": hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC,): item = [(user, hash_), {"salt": hash_[0:2]}] elif hash_regex in (HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT): item = [(user, hash_), {"salt": hash_.split('$')[2], "magic": "$%s$" % hash_.split('$')[1]}] elif hash_regex in (HASH.JOOMLA, HASH.VBULLETIN, HASH.VBULLETIN_OLD): item = [(user, hash_), {"salt": hash_.split(':')[-1]}] elif hash_regex in (HASH.DJANGO_MD5, HASH.DJANGO_SHA1): item = [(user, hash_), {"salt": hash_.split('$')[1]}] elif hash_regex in (HASH.WORDPRESS,): if ITOA64.index(hash_[3]) < 32: item = [(user, hash_), {"salt": hash_[4:12], "count": 1 << ITOA64.index(hash_[3]), "prefix": hash_[:12]}] else: warnMsg = "invalid hash '%s'" % hash_ logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % (resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) except (binascii.Error, IndexError): pass if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD,): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default='1') try: if choice == '2': message = "what's the custom dictionary's location?\n" dictPath = readInput(message) if dictPath: dictPaths = [dictPath] logger.info("using custom dictionary") elif choice == '3': message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") dictPaths = filter(None, dictPaths) for dictPath in dictPaths: checkFile(dictPath) if os.path.splitext(dictPath)[1].lower() == ".zip": _ = zipfile.ZipFile(dictPath, 'r') if len(_.namelist()) == 0: errMsg = "no file(s) inside '%s'" % dictPath raise SqlmapDataException(errMsg) else: _.open(_.namelist()[0]) kb.wordlists = dictPaths except Exception, ex: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % getSafeExString(ex) logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " if readInput(message, default='N', boolean=True): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) # Algorithms without extra arguments (e.g. salt and/or username) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.SHA224_GENERIC, HASH.SHA256_GENERIC, HASH.SHA384_GENERIC, HASH.SHA512_GENERIC, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist, conf.api) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter(lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist, conf.api) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
def dbTableValues(self, tableValues): replication = None rtable = None dumpFP = None appendToFile = False warnFile = False if tableValues is None: return db = tableValues["__infos__"]["db"] if not db: db = "All" table = tableValues["__infos__"]["table"] if conf.api: self._write(tableValues, content_type=CONTENT_TYPE.DUMP_TABLE) return dumpDbPath = os.path.join(conf.dumpPath, unsafeSQLIdentificatorNaming(db)) if conf.dumpFormat == DUMP_FORMAT.SQLITE: replication = Replication(os.path.join(conf.dumpPath, "%s.sqlite3" % unsafeSQLIdentificatorNaming(db))) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except: warnFile = True _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(db)) dumpDbPath = os.path.join(conf.dumpPath, "%s-%s" % (_, hashlib.md5(getBytes(db)).hexdigest()[:8])) if not os.path.isdir(dumpDbPath): try: os.makedirs(dumpDbPath) except Exception as ex: tempDir = tempfile.mkdtemp(prefix="sqlmapdb") warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join(dumpDbPath, re.sub(r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table)) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(getBytes(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: appendToFile = any((conf.limitStart, conf.limitStop)) if not appendToFile: count = 1 while True: candidate = "%s.%d" % (dumpFileName, count) if not checkFile(candidate, False): try: shutil.copyfile(dumpFileName, candidate) except IOError: pass break else: count += 1 dumpFP = openFile(dumpFileName, "wb" if not appendToFile else "ab", buffering=DUMP_FILE_BUFFER_SIZE) count = int(tableValues["__infos__"]["count"]) separator = str() field = 1 fields = len(tableValues) - 1 columns = prioritySortColumns(list(tableValues.keys())) if conf.col: cols = conf.col.split(',') columns = sorted(columns, key=lambda _: cols.index(_) if _ in cols else 0) for column in columns: if column != "__infos__": info = tableValues[column] lines = "-" * (int(info["length"]) + 2) separator += "+%s" % lines separator += "+" self._write("Database: %s\nTable: %s" % (unsafeSQLIdentificatorNaming(db) if db else "Current database", unsafeSQLIdentificatorNaming(table))) if conf.dumpFormat == DUMP_FORMAT.SQLITE: cols = [] for column in columns: if column != "__infos__": colType = Replication.INTEGER for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue int(value) except ValueError: colType = None break if colType is None: colType = Replication.REAL for value in tableValues[column]['values']: try: if not value or value == " ": # NULL continue float(value) except ValueError: colType = None break cols.append((unsafeSQLIdentificatorNaming(column), colType if colType else Replication.TEXT)) rtable = replication.createTable(table, cols) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<!DOCTYPE html>\n<html>\n<head>\n") dataToDumpFile(dumpFP, "<meta http-equiv=\"Content-type\" content=\"text/html;charset=%s\">\n" % UNICODE_ENCODING) dataToDumpFile(dumpFP, "<meta name=\"generator\" content=\"%s\" />\n" % VERSION_STRING) dataToDumpFile(dumpFP, "<title>%s</title>\n" % ("%s%s" % ("%s." % db if METADB_SUFFIX not in db else "", table))) dataToDumpFile(dumpFP, HTML_DUMP_CSS_STYLE) dataToDumpFile(dumpFP, "\n</head>\n<body>\n<table>\n<thead>\n<tr>\n") if count == 1: self._write("[1 entry]") else: self._write("[%d entries]" % count) self._write(separator) for column in columns: if column != "__infos__": info = tableValues[column] column = unsafeSQLIdentificatorNaming(column) maxlength = int(info["length"]) blank = " " * (maxlength - len(column)) self._write("| %s%s" % (column, blank), newline=False) if not appendToFile: if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(column)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(column), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<th>%s</th>" % getUnicode(cgi.escape(column).encode("ascii", "xmlcharrefreplace"))) field += 1 if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "\n</tr>\n</thead>\n<tbody>\n") self._write("|\n%s" % separator) if conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n" if not appendToFile else "") elif conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.beginTransaction() if count > TRIM_STDOUT_DUMP_SIZE: warnMsg = "console output will be trimmed to " warnMsg += "last %d rows due to " % TRIM_STDOUT_DUMP_SIZE warnMsg += "large table size" logger.warning(warnMsg) for i in xrange(count): console = (i >= count - TRIM_STDOUT_DUMP_SIZE) field = 1 values = [] if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<tr>") for column in columns: if column != "__infos__": info = tableValues[column] if len(info["values"]) <= i: continue if info["values"][i] is None: value = u'' else: value = getUnicode(info["values"][i]) value = DUMP_REPLACEMENTS.get(value, value) values.append(value) maxlength = int(info["length"]) blank = " " * (maxlength - len(value)) self._write("| %s%s" % (value, blank), newline=False, console=console) if len(value) > MIN_BINARY_DISK_DUMP_SIZE and r'\x' in value: try: mimetype = getText(magic.from_buffer(value, mime=True)) if any(mimetype.startswith(_) for _ in ("application", "image")): if not os.path.isdir(dumpDbPath): os.makedirs(dumpDbPath) _ = re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(column))) filepath = os.path.join(dumpDbPath, "%s-%d.bin" % (_, randomInt(8))) warnMsg = "writing binary ('%s') content to file '%s' " % (mimetype, filepath) logger.warn(warnMsg) with openFile(filepath, "w+b", None) as f: _ = safechardecode(value, True) f.write(_) except magic.MagicException as ex: logger.debug(getSafeExString(ex)) if conf.dumpFormat == DUMP_FORMAT.CSV: if field == fields: dataToDumpFile(dumpFP, "%s" % safeCSValue(value)) else: dataToDumpFile(dumpFP, "%s%s" % (safeCSValue(value), conf.csvDel)) elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "<td>%s</td>" % getUnicode(cgi.escape(value).encode("ascii", "xmlcharrefreplace"))) field += 1 if conf.dumpFormat == DUMP_FORMAT.SQLITE: try: rtable.insert(values) except SqlmapValueException: pass elif conf.dumpFormat == DUMP_FORMAT.CSV: dataToDumpFile(dumpFP, "\n") elif conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tr>\n") self._write("|", console=console) self._write("%s\n" % separator) if conf.dumpFormat == DUMP_FORMAT.SQLITE: rtable.endTransaction() logger.info("table '%s.%s' dumped to sqlite3 database '%s'" % (db, table, replication.dbpath)) elif conf.dumpFormat in (DUMP_FORMAT.CSV, DUMP_FORMAT.HTML): if conf.dumpFormat == DUMP_FORMAT.HTML: dataToDumpFile(dumpFP, "</tbody>\n</table>\n</body>\n</html>") else: dataToDumpFile(dumpFP, "\n") dumpFP.close() msg = "table '%s.%s' dumped to %s file '%s'" % (db, table, conf.dumpFormat, dumpFileName) if not warnFile: logger.info(msg) else: logger.warn(msg)
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [] hash_regexes = [] results = [] resumes = [] processException = False user_hash = [] for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if re.match(hash_regex, hash_): item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.WORDPRESS): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): item = [(user, hash_), {}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE): item = [(user, hash_), {'salt': hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD): item = [(user, hash_), {'salt': hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC): item = [(user, hash_), {'salt': hash_[0:2]}] elif hash_regex in (HASH.WORDPRESS): item = [(user, hash_), {'salt': hash_[4:12], 'count': 1<<ITOA64.index(hash_[3]), 'prefix': hash_[:12]}] if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % (resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD, HASH.WORDPRESS): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default="1") try: if choice == "2": message = "what's the custom dictionary's location?\n" dictPaths = [readInput(message)] logger.info("using custom dictionary") elif choice == "3": message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") for dictPath in dictPaths: checkFile(dictPath) kb.wordlists = dictPaths except SqlmapFilePathException, msg: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % msg logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) retVal = _multiprocessing.Queue() count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process(target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.start() for p in processes: p.join() else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except OSError: pass finally: if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter(lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value('i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process(target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.start() for p in processes: p.join() found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except OSError: pass finally: if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
def dictionaryAttack(attack_dict): suffix_list = [""] hash_regexes = [] results = [] for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method: '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if getCompiledRegex(hash_regex).match(hash_): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): attack_info.append([(user, hash_), {}]) elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): attack_info.append([(user, hash_), {'username': user}]) elif hash_regex in (HASH.ORACLE): attack_info.append([(user, hash_), {'salt': hash_[-20:]}]) elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD): attack_info.append([(user, hash_), {'salt': hash_[6:14]}]) elif hash_regex in (HASH.CRYPT_GENERIC): attack_info.append([(user, hash_), {'salt': hash_[0:2]}]) if not attack_info: continue if not kb.wordlist: if hash_regex == HASH.ORACLE_OLD: #it's the slowest of all methods hence smaller default dict message = "what's the dictionary's location? [%s]" % paths.ORACLE_DEFAULT_PASSWD dictpath = readInput(message, default=paths.ORACLE_DEFAULT_PASSWD) else: message = "what's the dictionary's location? [%s]" % paths.WORDLIST dictpath = readInput(message, default=paths.WORDLIST) checkFile(dictpath) infoMsg = "loading dictionary from: '%s'" % dictpath logger.info(infoMsg) kb.wordlist = getFileItems(dictpath, None, False) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary attack (%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item kb.wordlist.append(normalizeUnicode(user)) length = len(kb.wordlist) * len(suffix_list) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): count = 0 for suffix in suffix_list: if not attack_info: break for word in kb.wordlist: if not attack_info: break count += 1 if suffix: word = word + suffix try: current = __functions__[hash_regex](password = word, uppercase = False) for item in attack_info: ((user, hash_), _) = item if hash_ == current: results.append((user, hash_, word)) clearConsoleLine() infoMsg = "[%s] [INFO] found: '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user: '******'\n" % user else: infoMsg += " for hash: '%s'\n" % hash_ dataToStdout(infoMsg, True) attack_info.remove(item) elif count % HASH_MOD_ITEM_DISPLAY == 0 or count == length or hash_regex in (HASH.ORACLE_OLD) or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: status = '%d/%d words (%d%s)' % (count, length, round(100.0*count/length), '%') dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: print warnMsg = "user aborted during dictionary attack phase" logger.warn(warnMsg) return results except: warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s." % ML logger.critical(warnMsg) clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: count = 0 found = False for suffix in suffix_list: if found: break for word in kb.wordlist: current = __functions__[hash_regex](password = word, uppercase = False, **kwargs) count += 1 if suffix: word = word + suffix try: if hash_ == current: if regex == HASH.ORACLE_OLD: #only for cosmetic purposes word = word.upper() results.append((user, hash_, word)) clearConsoleLine() infoMsg = "[%s] [INFO] found: '%s'" % (time.strftime("%X"), word) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user: '******'\n" % user else: infoMsg += " for hash: '%s'\n" % hash_ dataToStdout(infoMsg, True) found = True break elif count % HASH_MOD_ITEM_DISPLAY == 0 or count == length or hash_regex in (HASH.ORACLE_OLD) or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: status = '%d/%d words (%d%s)' % (count, length, round(100.0*count/length), '%') if not user.startswith(DUMMY_USER_PREFIX): status += ' (user: %s)' % user dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: print warnMsg = "user aborted during dictionary attack phase" logger.warn(warnMsg) return results except: warnMsg = "there was a problem while hashing entry: %s. " % repr(word) warnMsg += "Please report by e-mail to %s." % ML logger.critical(warnMsg) clearConsoleLine() if len(hash_regexes) == 0: warnMsg = "unknown hash Format. " warnMsg += "Please report by e-mail to %s." % ML logger.warn(warnMsg) if len(results) == 0: warnMsg = "no clear password(s) found" logger.warn(warnMsg) return results
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [] hash_regexes = [] results = [] resumes = [] processException = False user_hash = [] for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[ regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if re.match(hash_regex, hash_): item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.WORDPRESS): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): item = [(user, hash_), {}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE): item = [(user, hash_), {'salt': hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD): item = [(user, hash_), {'salt': hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC): item = [(user, hash_), {'salt': hash_[0:2]}] elif hash_regex in (HASH.WORDPRESS): item = [(user, hash_), { 'salt': hash_[4:12], 'count': 1 << ITOA64.index(hash_[3]), 'prefix': hash_[:12] }] if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % ( resumed, hash_) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD, HASH.WORDPRESS): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[ 0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default="1") try: if choice == "2": message = "what's the custom dictionary's location?\n" dictPaths = [readInput(message)] logger.info("using custom dictionary") elif choice == "3": message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") for dictPath in dictPaths: checkFile(dictPath) kb.wordlists = dictPaths except sqlmapFilePathException, msg: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % msg logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[ hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) retVal = _multiprocessing.Queue() count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process( target=__bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.start() for p in processes: p.join() else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() __bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except OSError: pass finally: if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter( lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): p = _multiprocessing.Process( target=__bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist)) processes.append(p) for p in processes: p.start() for p in processes: p.join() found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False __bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except OSError: pass finally: if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get( block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [""] hash_regexes = [] results = [] resumes = [] user_hash = [] processException = False foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method '%s'" % __functions__[ regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): try: item = None if hash_regex not in ( HASH.CRYPT_GENERIC, HASH.JOOMLA, HASH.WORDPRESS, HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD, HASH.SSHA, HASH.SSHA256, HASH.SSHA512, HASH.DJANGO_MD5, HASH.DJANGO_SHA1, HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): hash_ = hash_.lower() if hash_regex in (HASH.MD5_BASE64, HASH.SHA1_BASE64, HASH.SHA256_BASE64, HASH.SHA512_BASE64): item = [(user, hash_.decode("base64").encode("hex")), {}] elif hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.APACHE_SHA1): item = [(user, hash_), {}] elif hash_regex in (HASH.SSHA, ): item = [(user, hash_), { "salt": hash_.decode("base64")[20:] }] elif hash_regex in (HASH.SSHA256, ): item = [(user, hash_), { "salt": hash_.decode("base64")[32:] }] elif hash_regex in (HASH.SSHA512, ): item = [(user, hash_), { "salt": hash_.decode("base64")[64:] }] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE, ): item = [(user, hash_), {"salt": hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {"salt": hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC, ): item = [(user, hash_), {"salt": hash_[0:2]}] elif hash_regex in (HASH.UNIX_MD5_CRYPT, HASH.APACHE_MD5_CRYPT): item = [(user, hash_), { "salt": hash_.split('$')[2], "magic": "$%s$" % hash_.split('$')[1] }] elif hash_regex in (HASH.JOOMLA, HASH.VBULLETIN, HASH.VBULLETIN_OLD): item = [(user, hash_), { "salt": hash_.split(':')[-1] }] elif hash_regex in (HASH.DJANGO_MD5, HASH.DJANGO_SHA1): item = [(user, hash_), { "salt": hash_.split('$')[1] }] elif hash_regex in (HASH.WORDPRESS, ): if ITOA64.index(hash_[3]) < 32: item = [(user, hash_), { "salt": hash_[4:12], "count": 1 << ITOA64.index(hash_[3]), "prefix": hash_[:12] }] else: warnMsg = "invalid hash '%s'" % hash_ logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = "resuming password '%s' for hash '%s'" % ( resumed, hash_) if user and not user.startswith( DUMMY_USER_PREFIX): infoMsg += " for user '%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) except (binascii.Error, IndexError): pass if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD, ): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = "what dictionary do you want to use?\n" message += "[1] default dictionary file '%s' (press Enter)\n" % dictPaths[ 0] message += "[2] custom dictionary file\n" message += "[3] file with list of dictionary files" choice = readInput(message, default='1') try: if choice == '2': message = "what's the custom dictionary's location?\n" dictPath = readInput(message) if dictPath: dictPaths = [dictPath] logger.info("using custom dictionary") elif choice == '3': message = "what's the list file location?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info("using custom list of dictionaries") else: logger.info("using default dictionary") dictPaths = filter(None, dictPaths) for dictPath in dictPaths: checkFile(dictPath) if os.path.splitext(dictPath)[1].lower() == ".zip": _ = zipfile.ZipFile(dictPath, 'r') if len(_.namelist()) == 0: errMsg = "no file(s) inside '%s'" % dictPath raise SqlmapDataException(errMsg) else: _.open(_.namelist()[0]) kb.wordlists = dictPaths except Exception, ex: warnMsg = "there was a problem while loading dictionaries" warnMsg += " ('%s')" % getSafeExString(ex) logger.critical(warnMsg) message = "do you want to use common password suffixes? (slow!) [y/N] " if readInput(message, default='N', boolean=True): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary-based cracking (%s)" % __functions__[ hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) # Algorithms without extra arguments (e.g. salt and/or username) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC, HASH.SHA224_GENERIC, HASH.SHA256_GENERIC, HASH.SHA384_GENERIC, HASH.SHA512_GENERIC, HASH.APACHE_SHA1, HASH.VBULLETIN, HASH.VBULLETIN_OLD): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process( target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist, conf.api) except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter( lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = "using suffix '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = "starting %d processes " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process( target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) found = found_.value != 0 else: warnMsg = "multiprocessing hash cracking is currently " warnMsg += "not supported on this platform" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist, conf.api) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = "user aborted during dictionary-based attack phase (Ctrl+C was pressed)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get( block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
def dictionaryAttack(attack_dict): suffix_list = [""] hash_regexes = [] results = [] for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = "using hash method: '%s'" % __functions__[ regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if getCompiledRegex(hash_regex).match(hash_): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): attack_info.append([(user, hash_), {}]) elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): attack_info.append([(user, hash_), {'username': user}]) elif hash_regex in (HASH.ORACLE): attack_info.append([(user, hash_), { 'salt': hash_[-20:] }]) elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD): attack_info.append([(user, hash_), { 'salt': hash_[6:14] }]) elif hash_regex in (HASH.CRYPT_GENERIC): attack_info.append([(user, hash_), { 'salt': hash_[0:2] }]) if not attack_info: continue if not kb.wordlist: if hash_regex == HASH.ORACLE_OLD: #it's the slowest of all methods hence smaller default dict message = "what's the dictionary's location? [%s]" % paths.ORACLE_DEFAULT_PASSWD dictpath = readInput(message, default=paths.ORACLE_DEFAULT_PASSWD) else: message = "what's the dictionary's location? [%s]" % paths.WORDLIST dictpath = readInput(message, default=paths.WORDLIST) checkFile(dictpath) infoMsg = "loading dictionary from: '%s'" % dictpath logger.info(infoMsg) kb.wordlist = getFileItems(dictpath, None, False) message = "do you want to use common password suffixes? (slow!) [y/N] " test = readInput(message, default="N") if test[0] in ("y", "Y"): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = "starting dictionary attack (%s)" % __functions__[ hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item kb.wordlist.append(normalizeUnicode(user)) length = len(kb.wordlist) * len(suffix_list) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): count = 0 for suffix in suffix_list: if not attack_info: break for word in kb.wordlist: if not attack_info: break count += 1 if suffix: word = word + suffix try: current = __functions__[hash_regex](password=word, uppercase=False) for item in attack_info: ((user, hash_), _) = item if hash_ == current: results.append((user, hash_, word)) clearConsoleLine() infoMsg = "[%s] [INFO] found: '%s'" % ( time.strftime("%X"), word) if user and not user.startswith( DUMMY_USER_PREFIX): infoMsg += " for user: '******'\n" % user else: infoMsg += " for hash: '%s'\n" % hash_ dataToStdout(infoMsg, True) attack_info.remove(item) elif count % HASH_MOD_ITEM_DISPLAY == 0 or count == length or hash_regex in ( HASH.ORACLE_OLD ) or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: status = '%d/%d words (%d%s)' % ( count, length, round( 100.0 * count / length), '%') dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: print warnMsg = "user aborted during dictionary attack phase" logger.warn(warnMsg) return results except: warnMsg = "there was a problem while hashing entry: %s. " % repr( word) warnMsg += "Please report by e-mail to %s." % ML logger.critical(warnMsg) clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: count = 0 found = False for suffix in suffix_list: if found: break for word in kb.wordlist: current = __functions__[hash_regex](password=word, uppercase=False, **kwargs) count += 1 if suffix: word = word + suffix try: if hash_ == current: if regex == HASH.ORACLE_OLD: #only for cosmetic purposes word = word.upper() results.append((user, hash_, word)) clearConsoleLine() infoMsg = "[%s] [INFO] found: '%s'" % ( time.strftime("%X"), word) if user and not user.startswith( DUMMY_USER_PREFIX): infoMsg += " for user: '******'\n" % user else: infoMsg += " for hash: '%s'\n" % hash_ dataToStdout(infoMsg, True) found = True break elif count % HASH_MOD_ITEM_DISPLAY == 0 or count == length or hash_regex in ( HASH.ORACLE_OLD ) or hash_regex == HASH.CRYPT_GENERIC and IS_WIN: status = '%d/%d words (%d%s)' % ( count, length, round( 100.0 * count / length), '%') if not user.startswith(DUMMY_USER_PREFIX): status += ' (user: %s)' % user dataToStdout("\r[%s] [INFO] %s" % (time.strftime("%X"), status)) except KeyboardInterrupt: print warnMsg = "user aborted during dictionary attack phase" logger.warn(warnMsg) return results except: warnMsg = "there was a problem while hashing entry: %s. " % repr( word) warnMsg += "Please report by e-mail to %s." % ML logger.critical(warnMsg) clearConsoleLine() if len(hash_regexes) == 0: warnMsg = "unknown hash Format. " warnMsg += "Please report by e-mail to %s." % ML logger.warn(warnMsg) if len(results) == 0: warnMsg = "no clear password(s) found" logger.warn(warnMsg) return results
except IOError, _: errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) appendToFile = any((conf.limitStart, conf.limitStop)) and checkFile(dumpFileName, False)
def _feedTargetsDict(reqFile, targets): def _parseBurpLog(content): """ Parses burp logs """ if not re.search(BURP_REQUEST_REGEX, content, re.I | re.S): if re.search(BURP_XML_HISTORY_REGEX, content, re.I | re.S): reqResList = [] for match in re.finditer(BURP_XML_HISTORY_REGEX, content, re.I | re.S): port, request = match.groups() try: request = request.decode("base64") except binascii.Error: continue _ = re.search(r"%s:.+" % re.escape(HTTP_HEADER.HOST), request) if _: host = _.group(0).strip() if not re.search(r":\d+\Z", host): request = request.replace( host, "%s:%d" % (host, int(port))) reqResList.append(request) else: reqResList = [content] else: reqResList = re.finditer(BURP_REQUEST_REGEX, content, re.I | re.S) for match in reqResList: request = match if isinstance(match, basestring) else match.group(0) request = re.sub(r"\A[^\w]+", "", request) schemePort = re.search(r"(http[\w]*)\:\/\/.*?\:([\d]+).+?={10,}", request, re.I | re.S) if schemePort: scheme = schemePort.group(1) port = schemePort.group(2) request = re.sub( r"\n=+\Z", "", request.split(schemePort.group(0))[-1].lstrip()) else: scheme, port = None, None if not re.search( r"^[\n]*(%s).*?\sHTTP\/" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), request, re.I | re.M): continue if re.search( r"^[\n]*%s.*?\.(%s)\sHTTP\/" % (HTTPMETHOD.GET, "|".join(CRAWL_EXCLUDE_EXTENSIONS)), request, re.I | re.M): continue getPostReq = False url = None host = None method = None data = None cookie = None params = False newline = None lines = request.split('\n') headers = [] for index in xrange(len(lines)): line = lines[index] if not line.strip() and index == len(lines) - 1: break newline = "\r\n" if line.endswith('\r') else '\n' line = line.strip('\r') match = re.search( r"\A(%s) (.+) HTTP/[\d.]+\Z" % "|".join(getPublicTypeMembers(HTTPMETHOD, True)), line) if not method else None if len( line.strip() ) == 0 and method and method != HTTPMETHOD.GET and data is None: data = "" params = True elif match: method = match.group(1) url = match.group(2) params = True getPostReq = True # POST parameters elif data is not None and params: data += "%s%s" % (line, newline) # GET parameters elif "?" in line and "=" in line and ": " not in line: params = True # Headers elif re.search(r"\A\S+:", line): key, value = line.split(":", 1) value = value.strip().replace("\r", "").replace("\n", "") # Cookie and Host headers if key.upper() == HTTP_HEADER.COOKIE.upper(): cookie = value elif key.upper() == HTTP_HEADER.HOST.upper(): if '://' in value: scheme, value = value.split('://')[:2] splitValue = value.split(":") host = splitValue[0] if len(splitValue) > 1: port = filterStringValue(splitValue[1], "[0-9]") # Avoid to add a static content length header to # headers and consider the following lines as # POSTed data if key.upper() == HTTP_HEADER.CONTENT_LENGTH.upper(): params = True # Avoid proxy and connection type related headers elif key not in (HTTP_HEADER.PROXY_CONNECTION, HTTP_HEADER.CONNECTION): headers.append((getUnicode(key), getUnicode(value))) # if kb.customInjectionMark in re.sub(PROBLEMATIC_CUSTOM_INJECTION_PATTERNS, "", value or ""): # params = True data = data.rstrip("\r\n") if data else data if getPostReq and (params or cookie): if not port and isinstance( scheme, basestring) and scheme.lower() == "https": port = "443" elif not scheme and port == "443": scheme = "https" if not host: errMsg = "invalid format of a request file" raise Exception, errMsg if not url.startswith("http"): url = "%s://%s:%s%s" % (scheme or "http", host, port or "80", url) scheme = None port = None # return (url, method, data, cookie, tuple(headers)) targets.add((url, method, data, cookie, tuple(headers))) checkFile(reqFile) try: with openFile(reqFile, "rb") as f: content = f.read() except (IOError, OSError, MemoryError), ex: errMsg = "something went wrong while trying " errMsg += "to read the content of file '%s' ('%s')" % ( reqFile, getSafeExString(ex)) raise Exception(errMsg)
def dictionaryAttack(attack_dict): suffix_list = [""] custom_wordlist = [""] hash_regexes = [] results = [] resumes = [] user_hash = [] processException = False foundHash = False for (_, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ regex = hashRecognition(hash_) if regex and regex not in hash_regexes: hash_regexes.append(regex) infoMsg = u"使用哈希hash方法 '%s'" % __functions__[regex].func_name logger.info(infoMsg) for hash_regex in hash_regexes: keys = set() attack_info = [] for (user, hashes) in attack_dict.items(): for hash_ in hashes: if not hash_: continue foundHash = True hash_ = hash_.split()[0] if hash_ and hash_.strip() else hash_ if re.match(hash_regex, hash_): item = None if hash_regex not in (HASH.CRYPT_GENERIC, HASH.WORDPRESS): hash_ = hash_.lower() if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): item = [(user, hash_), {}] elif hash_regex in (HASH.ORACLE_OLD, HASH.POSTGRES): item = [(user, hash_), {'username': user}] elif hash_regex in (HASH.ORACLE, ): item = [(user, hash_), {'salt': hash_[-20:]}] elif hash_regex in (HASH.MSSQL, HASH.MSSQL_OLD, HASH.MSSQL_NEW): item = [(user, hash_), {'salt': hash_[6:14]}] elif hash_regex in (HASH.CRYPT_GENERIC, ): item = [(user, hash_), {'salt': hash_[0:2]}] elif hash_regex in (HASH.WORDPRESS, ): if ITOA64.index(hash_[3]) < 32: item = [(user, hash_), { 'salt': hash_[4:12], 'count': 1 << ITOA64.index(hash_[3]), 'prefix': hash_[:12] }] else: warnMsg = "invalid hash '%s'" % hash_ logger.warn(warnMsg) if item and hash_ not in keys: resumed = hashDBRetrieve(hash_) if not resumed: attack_info.append(item) user_hash.append(item[0]) else: infoMsg = u"正在恢复哈希'%s'的密码'%s'" % (hash_, resumed) if user and not user.startswith(DUMMY_USER_PREFIX): infoMsg += u",对于用户'%s'" % user logger.info(infoMsg) resumes.append((user, hash_, resumed)) keys.add(hash_) if not attack_info: continue if not kb.wordlists: while not kb.wordlists: # the slowest of all methods hence smaller default dict if hash_regex in (HASH.ORACLE_OLD, HASH.WORDPRESS): dictPaths = [paths.SMALL_DICT] else: dictPaths = [paths.WORDLIST] message = u"你想要使用什么字典??\n" message += u"[1] 默认字典文件 '%s' (按回车)\n" % dictPaths[0] message += u"[2] 自定义字典文件\n" message += u"[3] 文件与字典文件列表" choice = readInput(message, default='1') try: if choice == '2': message = u"自定义字典位置?\n" _ = readInput(message) if _: dictPaths = [readInput(message)] logger.info(u"使用自定义字典") elif choice == '3': message = u"列表文件位置?\n" listPath = readInput(message) checkFile(listPath) dictPaths = getFileItems(listPath) logger.info(u"使用自定义的字典列表") else: logger.info(u"使用默认字典") dictPaths = filter(None, dictPaths) for dictPath in dictPaths: checkFile(dictPath) if os.path.splitext(dictPath)[1].lower() == ".zip": _ = zipfile.ZipFile(dictPath, 'r') if len(_.namelist()) == 0: errMsg = u"'%s'内没有文件" % dictPath raise SqlmapDataException(errMsg) else: _.open(_.namelist()[0]) kb.wordlists = dictPaths except Exception, ex: warnMsg = u"加载字典('%s')时出现问题" % getSafeExString(ex) logger.critical(warnMsg) message = u"你想使用常用的密码后缀吗? (slow!) [y/N] " if readInput(message, default='N', boolean=True): suffix_list += COMMON_PASSWORD_SUFFIXES infoMsg = u"开始基于字典的破解(%s)" % __functions__[hash_regex].func_name logger.info(infoMsg) for item in attack_info: ((user, _), _) = item if user and not user.startswith(DUMMY_USER_PREFIX): custom_wordlist.append(normalizeUnicode(user)) if hash_regex in (HASH.MYSQL, HASH.MYSQL_OLD, HASH.MD5_GENERIC, HASH.SHA1_GENERIC): for suffix in suffix_list: if not attack_info or processException: break if suffix: clearConsoleLine() infoMsg = u"使用后缀 '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = u"启动%d个进程" % _multiprocessing.cpu_count() singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process( target=_bruteProcessVariantA, args=(attack_info, hash_regex, suffix, retVal, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) else: warnMsg = u"此平台目前不支持多进程哈希破解" singleTimeWarnMessage(warnMsg) retVal = Queue() _bruteProcessVariantA(attack_info, hash_regex, suffix, retVal, 0, 1, kb.wordlists, custom_wordlist, conf.api) except KeyboardInterrupt: print processException = True warnMsg = u"用户在基于字典的攻击阶段中中止(Ctrl + C被按下)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get(block=False) attack_info = filter( lambda _: _[0][0] != user or _[0][1] != hash_, attack_info) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine() else: for ((user, hash_), kwargs) in attack_info: if processException: break if any(_[0] == user and _[1] == hash_ for _ in results): continue count = 0 found = False for suffix in suffix_list: if found or processException: break if suffix: clearConsoleLine() infoMsg = u"使用后缀 '%s'" % suffix logger.info(infoMsg) retVal = None processes = [] try: if _multiprocessing: if _multiprocessing.cpu_count() > 1: infoMsg = u"启动%d个进程 " % _multiprocessing.cpu_count( ) singleTimeLogMessage(infoMsg) gc.disable() retVal = _multiprocessing.Queue() found_ = _multiprocessing.Value('i', False) count = _multiprocessing.Value( 'i', _multiprocessing.cpu_count()) for i in xrange(_multiprocessing.cpu_count()): process = _multiprocessing.Process( target=_bruteProcessVariantB, args=(user, hash_, kwargs, hash_regex, suffix, retVal, found_, i, count, kb.wordlists, custom_wordlist, conf.api)) processes.append(process) for process in processes: process.daemon = True process.start() while count.value > 0: time.sleep(0.5) found = found_.value != 0 else: warnMsg = u"此平台目前不支持多进程哈希破解" singleTimeWarnMessage(warnMsg) class Value(): pass retVal = Queue() found_ = Value() found_.value = False _bruteProcessVariantB(user, hash_, kwargs, hash_regex, suffix, retVal, found_, 0, 1, kb.wordlists, custom_wordlist, conf.api) found = found_.value except KeyboardInterrupt: print processException = True warnMsg = u"用户在基于字典的攻击阶段中中止(Ctrl + C被按下)" logger.warn(warnMsg) for process in processes: try: process.terminate() process.join() except (OSError, AttributeError): pass finally: if _multiprocessing: gc.enable() if retVal: conf.hashDB.beginTransaction() while not retVal.empty(): user, hash_, word = item = retVal.get( block=False) hashDBWrite(hash_, word) results.append(item) conf.hashDB.endTransaction() clearConsoleLine()
except IOError, _: errMsg = "unable to write to the temporary directory ('%s'). " % _ errMsg += "Please make sure that your disk is not full and " errMsg += "that you have sufficient write permissions to " errMsg += "create temporary files and/or directories" raise SqlmapSystemException(errMsg) warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower())) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub(r"[^\w]", "_", normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper() in WINDOWS_RESERVED_NAMES: _ = unicodeencode(re.sub(r"[^\w]", "_", unsafeSQLIdentificatorNaming(table))) dumpFileName = os.path.join(dumpDbPath, "%s-%s.%s" % (_, hashlib.md5(unicodeencode(table)).hexdigest()[:8], conf.dumpFormat.lower())) else: dumpFileName = os.path.join(dumpDbPath, "%s.%s" % (_, conf.dumpFormat.lower())) else: appendToFile = any((conf.limitStart, conf.limitStop))
warnMsg = "unable to create dump directory " warnMsg += "'%s' (%s). " % (dumpDbPath, getSafeExString(ex)) warnMsg += "Using temporary directory '%s' instead" % tempDir logger.warn(warnMsg) dumpDbPath = tempDir dumpFileName = os.path.join( dumpDbPath, re.sub( r'[\\/]', UNSAFE_DUMP_FILEPATH_REPLACEMENT, "%s.%s" % (unsafeSQLIdentificatorNaming(table), conf.dumpFormat.lower()))) if not checkFile(dumpFileName, False): try: openFile(dumpFileName, "w+b").close() except SqlmapSystemException: raise except: warnFile = True _ = re.sub( r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, normalizeUnicode(unsafeSQLIdentificatorNaming(table))) if len(_) < len(table) or IS_WIN and table.upper( ) in WINDOWS_RESERVED_NAMES: _ = unicodeencode( re.sub(r"[^\w]", UNSAFE_DUMP_FILEPATH_REPLACEMENT, unsafeSQLIdentificatorNaming(table)))